diff --git a/.editorconfig b/.editorconfig index c35a00240..a1f2b6ae8 100644 --- a/.editorconfig +++ b/.editorconfig @@ -17,3 +17,6 @@ insert_final_newline = false [*.{diff,md}] trim_trailing_whitespace = false + +[*.py] +indent_size = 4 diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 7d5037dbc..81341246c 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -8,7 +8,7 @@ updates: time: "12:00" timezone: "Europe/Zurich" - package-ecosystem: npm - directory: "/" + directory: "/frontend" schedule: interval: "weekly" day: "friday" @@ -16,3 +16,23 @@ updates: timezone: "Europe/Zurich" open-pull-requests-limit: 10 versioning-strategy: increase + - package-ecosystem: pip + directory: "/backend" + schedule: + interval: weekly + day: friday + time: "12:00" + timezone: "Europe/Zurich" + commit-message: + prefix: chore + include: scope + - package-ecosystem: docker + directory: "/" + schedule: + interval: weekly + day: friday + time: "12:00" + timezone: "Europe/Zurich" + commit-message: + prefix: chore + include: scope diff --git a/.github/workflows/release-image.yml b/.github/workflows/release-image.yml index 1af7d355e..aacc33302 100644 --- a/.github/workflows/release-image.yml +++ b/.github/workflows/release-image.yml @@ -1,4 +1,4 @@ -name: Release ghcr image +name: Release ghcr images on: release: @@ -7,6 +7,9 @@ on: jobs: container: runs-on: ubuntu-latest + strategy: + matrix: + target: [frontend, backend] steps: - name: Checkout uses: actions/checkout@v4 @@ -15,11 +18,11 @@ jobs: id: meta uses: docker/metadata-action@v5 with: - images: ghcr.io/adfinis/timed-frontend + images: ghcr.io/adfinis/timed-${{ matrix.target }} flavor: | latest=auto labels: | - org.opencontainers.image.title=${{ github.event.repository.name }} + org.opencontainers.image.title=${{ github.event.repository.name }}-${{ matrix.target }} org.opencontainers.image.description=${{ github.event.repository.description }} org.opencontainers.image.url=${{ github.event.repository.html_url }} org.opencontainers.image.source=${{ github.event.repository.clone_url }} @@ -38,7 +41,7 @@ jobs: uses: docker/build-push-action@v5 with: context: . - file: ./Dockerfile + file: ./${{ matrix.target }}/Dockerfile push: ${{ github.event_name != 'pull_request' }} tags: ${{ steps.meta.outputs.tags }} labels: | diff --git a/.github/workflows/release-npm.yml b/.github/workflows/release-npm.yml index 2a9bdf9b0..e0ba93238 100644 --- a/.github/workflows/release-npm.yml +++ b/.github/workflows/release-npm.yml @@ -2,6 +2,9 @@ name: Release npm package on: workflow_dispatch +env: + frontend-dir: ./frontend + jobs: release: name: Release @@ -24,9 +27,11 @@ jobs: - name: Install dependencies run: pnpm install + working-directory: ${{ env.frontend-dir }} - name: Release on NPM run: pnpm semantic-release + working-directory: ${{ env.frontend-dir }} env: GH_TOKEN: ${{ secrets.GH_TOKEN }} NPM_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e715ef5b2..aaeae566f 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -13,9 +13,11 @@ on: env: NODE_VERSION: 18 + frontend-dir: ./frontend + backend-dir: ./backend jobs: - lint: + lint-frontend: name: Lint runs-on: [ubuntu-latest] timeout-minutes: 5 @@ -39,13 +41,15 @@ jobs: - name: Install dependencies run: pnpm install --no-frozen-lockfile + working-directory: ${{ env.frontend-dir }} - name: Lint ${{ matrix.target }} run: pnpm lint:${{ matrix.target }} + working-directory: ${{ env.frontend-dir }} - test: + test-frontend: name: Tests - needs: [lint] + needs: [lint-frontend] runs-on: [ubuntu-latest] timeout-minutes: 10 @@ -64,9 +68,11 @@ jobs: - name: Install dependencies run: pnpm install --no-frozen-lockfile + working-directory: ${{ env.frontend-dir }} - name: Run tests run: pnpm test + working-directory: ${{ env.frontend-dir }} env: COVERAGE: true @@ -74,3 +80,25 @@ jobs: uses: codecov/codecov-action@v3 with: file: ./coverage/lcov.info + + test-backend: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/cache@v3 + with: + path: .venv + key: poetry-${{ hashFiles('poetry.lock')}} + restore-keys: | + peotry- + - name: Build the project + run: | + echo "ENV=dev" > .env + docker-compose up -d --build backend + - name: Lint the code + run: | + docker-compose exec -T backend black --check . + docker-compose exec -T backend flake8 + docker-compose exec -T backend python manage.py makemigrations --check --dry-run --no-input + - name: Run pytest + run: docker-compose exec -T backend pytest --no-cov-on-fail --cov --create-db -vv diff --git a/.gitignore b/.gitignore index d10296ee0..7b301953c 100644 --- a/.gitignore +++ b/.gitignore @@ -1,37 +1,87 @@ # See https://help.github.com/ignore-files/ for more about ignoring files. -# compiled output -/dist -/tmp -# dependencies -/node_modules -/bower_components +# VSCode +.vscode/ -# misc -/.sass-cache -/.eslintcache -/connect.lock -/coverage/* -/libpeerconnection.log -npm-debug.log* -testem.log -*.swp -*.orig +# PyCharm +.idea/ + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*,cover +.hypothesis/ -# vscode -jsconfig.json +# Translations +*.mo +*.pot -/.vscode/ -/.idea/ +# Django stuff: +*.log -# ember-try -/.node_modules.ember-try/ -/bower.json.ember-try -/npm-shrinkwrap.json.ember-try -/package.json.ember-try -/package-lock.json.ember-try -/yarn.lock.ember-try +# Sphinx documentation +docs/_build/ -# broccoli-debug -/DEBUG/ +# PyBuilder +target/ + +#Ipython Notebook +.ipynb_checkpoints + +# Pyenv +.python-version + +# Editor swap files +*.swp + +# local .env file +.env + +# pytest +.pytest_cache + +# dependencies +/node_modules +/bower_components diff --git a/.husky/commit-msg b/.husky/commit-msg deleted file mode 100755 index 33c4fb53c..000000000 --- a/.husky/commit-msg +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -. "$(dirname "$0")/_/husky.sh" - -# skip in CI -[ -n "$CI" ] && exit 0 - -# lint commit message -pnpm commitlint --edit $1 diff --git a/.husky/pre-commit b/.husky/pre-commit deleted file mode 100755 index 8ba9200d6..000000000 --- a/.husky/pre-commit +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -. "$(dirname "$0")/_/husky.sh" - -# skip in CI -[ -n "$CI" ] && exit 0 - -# lint staged files -pnpm lint-staged diff --git a/CODEOWNERS b/CODEOWNERS new file mode 100644 index 000000000..4fcfacde5 --- /dev/null +++ b/CODEOWNERS @@ -0,0 +1,3 @@ +# Code owners for the Timed backend. +* @adfinis/dev-backend +* @adfinis/dev-frontend diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 000000000..0208ebe21 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,65 @@ +# Contributing + +Contributions to Timed are very welcome! Best have a look at the open [issues](https://github.com/adfinis/timed) +and open a [GitHub pull request](https://github.com/adfinis/timed/compare). See instructions below how to setup development +environment. Before writing any code, best discuss your proposed change in a GitHub issue to see if the proposed change makes sense for the project. + +## Setup development environment + +### Clone + +To work on Timed you first need to clone + +```bash +git clone https://github.com/adfinis/timed.git +cd timed +``` + +### Open Shell + +Once it is cloned you can easily open a shell in the docker container to +open a development environment. + +```bash +make shell +``` + +### Testing + +Once you have shelled in to the docker container as described above +you can use common python tooling for formatting, linting, testing +etc. + +```bash +# linting +poetry run flake8 +# format code +poetry run black . +# running tests +poetry run pytest +# create migrations +poetry run python manage.py makemigrations +``` + +Writing of code can still happen outside the docker container of course. + +### Install new requirements + +In case you're adding new requirements you simply need to build the docker container +again for them to be installed and re-open shell. + +```bash +docker-compose build --pull +``` + +### Setup pre commit + +Pre commit hooks are an additional option instead of executing checks in your editor of choice. + +First create a virtualenv with the tool of your choice before running below commands: + +```bash +pip install pre-commit +pip install -r requiements-dev.txt -U +pre-commit install +``` diff --git a/Makefile b/Makefile new file mode 100644 index 000000000..8d9430ffc --- /dev/null +++ b/Makefile @@ -0,0 +1,57 @@ +.DEFAULT_GOAL := help + +.PHONY: help +help: + @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort -k 1,1 | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' + +.PHONY: start +start: ## Start the development server + @docker-compose up -d --build + +.PHONY: stop +stop: ## Stop the development server + @docker-compose stop + +.PHONY: lint +lint: ## Lint the project + @docker-compose exec backend sh -c "black --check . && flake8" + +.PHONY: format-code +format-code: ## Format the backend code + @docker-compose exec backend sh -c "black . && isort ." + +.PHONY: test +test: ## Test the project + @docker-compose exec backend sh -c "black . && isort . && pytest --no-cov-on-fail --cov" + +.PHONY: bash +bash: ## Shell into the backend + @docker-compose exec backend bash + +.PHONY: dbshell +dbshell: ## Start a psql shell + @docker-compose exec db psql -Utimed timed + +.PHONY: shell_plus +shell_plus: ## Run shell_plus + @docker-compose exec backend ./manage.py shell_plus + +.PHONY: makemigrations +makemigrations: ## Make django migrations + @docker-compose exec backend ./manage.py makemigrations + +.PHONY: migrate +migrate: ## Migrate django + @docker-compose exec backend ./manage.py migrate + +.PHONY: debug-backend +debug-backend: ## Start backend container with service ports for debugging + @docker-compose run --use-aliases --service-ports backend + +.PHONY: flush +flush: ## Flush database contents + @docker-compose exec backend ./manage.py flush --no-input + +.PHONY: loaddata +loaddata: flush ## Loads test data into the database + @docker-compose exec backend ./manage.py loaddata timed/fixtures/test_data.json diff --git a/README-frontend.md b/README-frontend.md new file mode 100644 index 000000000..5641a28ec --- /dev/null +++ b/README-frontend.md @@ -0,0 +1,55 @@ +![Timed Logo](/public/assets/logo_text.png) + +[![Build Status](https://github.com/adfinis/timed-frontend/actions/workflows/test.yml/badge.svg?branch=main)](https://github.com/adfinis/timed-frontend/actions/workflows/test.yml) +[![Codecov](https://codecov.io/gh/adfinis/timed-frontend/branch/main/graph/badge.svg)](https://codecov.io/gh/adfinis/timed-frontend) +[![License: AGPL v3](https://img.shields.io/badge/License-AGPL%20v3-blue.svg)](https://www.gnu.org/licenses/agpl-3.0) +[![GHCR Image](https://github.com/adfinis/timed-frontend/actions/workflows/release-image.yml/badge.svg)](https://github.com/adfinis/timed-frontend/actions/workflows/release-image.yml) + +## Requirements + +You will need the following things properly installed on your computer. + +- [Git](https://git-scm.com/) +- [Node.js](https://nodejs.org/) +- [Yarn](https://yarnpkg.com/) +- [Ember CLI](https://cli.emberjs.com/release/) +- [Google Chrome](https://google.com/chrome/) +- [Firefox](https://www.mozilla.org/firefox/) + +Optional: + +- Docker +- docker-compose + +## Installation + +- `git clone git@github.com/adfinis/timed-frontend` +- `cd timed-frontend` +- `pnpm i` + +## Running / Development + +- `ember server` +- Visit your app at [http://localhost:4200](http://localhost:4200). + +If you have a running [backend](https://github.com/adfinis/timed-backend) you need to run + +- `ember server --proxy=http://localhost:8000` + or +- `pnpm start` + +If you are using docker-compose you can start a static frontend and the backend by following the instructions in the [backend](https://github.com/adfinis/timed-backend) + +### Running Tests + +- `COVERAGE=true ember test` +- `COVERAGE=true ember test --server` + +### Building + +- `ember build` (development) +- `ember build --environment production` (production) + +## License + +Code released under the [GNU Affero General Public License v3.0](LICENSE). diff --git a/README.md b/README.md index 5641a28ec..f967d5810 100644 --- a/README.md +++ b/README.md @@ -1,54 +1,170 @@ -![Timed Logo](/public/assets/logo_text.png) +# Timed Backend -[![Build Status](https://github.com/adfinis/timed-frontend/actions/workflows/test.yml/badge.svg?branch=main)](https://github.com/adfinis/timed-frontend/actions/workflows/test.yml) -[![Codecov](https://codecov.io/gh/adfinis/timed-frontend/branch/main/graph/badge.svg)](https://codecov.io/gh/adfinis/timed-frontend) +[![Build Status](https://github.com/adfinis/timed-backend/workflows/Test/badge.svg)](https://github.com/adfinis/timed-backend/actions?query=workflow%3A%22Test%22) +[![Coverage](https://img.shields.io/badge/coverage-100%25-brightgreen.svg)](https://github.com/adfinis/timed-backend/blob/master/setup.cfg) +[![Pyup](https://pyup.io/repos/github/adfinis/timed-backend/shield.svg)](https://pyup.io/account/repos/github/adfinis/timed-backend/) +[![Black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/adfinis/timed-backend) [![License: AGPL v3](https://img.shields.io/badge/License-AGPL%20v3-blue.svg)](https://www.gnu.org/licenses/agpl-3.0) -[![GHCR Image](https://github.com/adfinis/timed-frontend/actions/workflows/release-image.yml/badge.svg)](https://github.com/adfinis/timed-frontend/actions/workflows/release-image.yml) -## Requirements +Timed timetracking software REST API built with Django -You will need the following things properly installed on your computer. - -- [Git](https://git-scm.com/) -- [Node.js](https://nodejs.org/) -- [Yarn](https://yarnpkg.com/) -- [Ember CLI](https://cli.emberjs.com/release/) -- [Google Chrome](https://google.com/chrome/) -- [Firefox](https://www.mozilla.org/firefox/) +## Installation -Optional: +**Requirements** -- Docker +- docker - docker-compose -## Installation +After installing and configuring those requirements, you should be able to run the following +commands to complete the installation: -- `git clone git@github.com/adfinis/timed-frontend` -- `cd timed-frontend` -- `pnpm i` +Add the `timed.local` entries to your hosts file: +```bash +echo "127.0.0.1 timed.local" | sudo tee -a /etc/hosts +``` -## Running / Development +Then just start the docker-compose setup: +```bash +make start +``` -- `ember server` -- Visit your app at [http://localhost:4200](http://localhost:4200). +This brings up complete local installation, including our [Timed Frontend](https://github.com/adfinis/timed-frontend) project. -If you have a running [backend](https://github.com/adfinis/timed-backend) you need to run +You can visit it at [http://timed.local](http://timed.local). + +The API can be accessed at [http://timed.local/api/v1](http://timed.local/api/v1) and the admin interface at [http://timed.local/admin/](http://timed.local/admin/). + +The Keycloak admin interface can be accessed at [http://timed.local/auth/admin](http://timed.local/auth/admin) with the account `admin` and password `admin` + +## Development + +To get the application working locally for development, make sure to create a file `.env` with the following content: + +``` +ENV=dev +DJANGO_OIDC_CREATE_USER=True +``` -- `ember server --proxy=http://localhost:8000` - or -- `pnpm start` +If you have existing users from the previous LDAP authentication, you want to add this line as well: -If you are using docker-compose you can start a static frontend and the backend by following the instructions in the [backend](https://github.com/adfinis/timed-backend) +``` +DJANGO_OIDC_USERNAME_CLAIM=preferred_username +``` -### Running Tests +The test data includes 3 users admin, fritzm and alexs with you can log into [http://timed.local](http://timed.local) +You can initialize the test data using the following command: -- `COVERAGE=true ember test` -- `COVERAGE=true ember test --server` +```bash +make loaddata +``` -### Building +The username and password are identical. + +To access the Django admin interface you will have to change the admin password in Django directly: + +```bash +$ make bash +root@0a036a10f3c4:/app# poetry run python manage.py changepassword admin +Changing password for user 'admin' +Password: +Password (again): +Password changed successfully for user 'admin' +``` + +Then you'll be able to login in the Django admin interface [http://timed.local/admin/](http://timed.local/admin/). + +## Work locally with Ember + +```bash +cd frontend +pnpm i +``` + +## Running / Development + +```bash +ember server +``` +- Visit your app at [http://localhost:4200](http://localhost:4200). + +If you have a running [backend](https://github.com/adfinis/timed-backend) you need to run -- `ember build` (development) -- `ember build --environment production` (production) +```bash +ember server --proxy=http://localhost:8000 +``` +or +```bash +pnpm start +``` + +### Adding a user + +If you want to add other users with different roles, add them in the Keycloak interface (as they would be coming from your LDAP directory). +You will also have to correct their employment in the Django admin interface as it is not correctly set for the moment. +Head to [http://timed.local/admin/](http://timed.local/admin/) after having perform a first login with the user. +You should see that new user in the `Employment -> Users`. +Click on the user and scroll down to the `Employments` section to set a `Location`. +Save the user and you should now see the _Timed_ interface correctly under that account. + + +### Sending emails +In development mode, the apllication is configured to send all email to a Mailhog instance running in the same docker-compose setup. No emails will be sent out from the development environment, unless you specify something else. + +You can access the Mailhog interface at [http://timed.local/mailhog/](http://timed.local/mailhog/). All emails sent from the application will be visible there. + +## Configuration + +Following options can be set as environment variables to configure Timed backend in documented [format](https://github.com/joke2k/django-environ#supported-types) +according to type. + +| Parameter | Description | Default | +|----------------------------------------------|-------------------------------------------------------------------------------------------------------|--------------------------------------------------------------| +| `DJANGO_ENV_FILE` | Path to setup environment vars in a file | .env | +| `DJANGO_DEBUG` | Boolean that turns on/off debug mode | False | +| `DJANGO_SECRET_KEY` | Secret key for cryptographic signing | not set (required) | +| `DJANGO_ALLOWED_HOSTS` | List of hosts representing the host/domain names | not set (required) | +| `DJANGO_HOST_PROTOCOL` | Protocol host is running on (http or https) | http | +| `DJANGO_HOST_DOMAIN` | Main host name server is reachable on | not set (required) | +| `DJANGO_DATABASE_NAME` | Database name | timed | +| `DJANGO_DATABASE_USER` | Database username | timed | +| `DJANGO_DATABASE_HOST` | Database hostname | localhost | +| `DJANGO_DATABASE_PORT` | Database port | 5432 | +| `DJANGO_OIDC_DEFAULT_BASE_URL` | Base URL of the OIDC provider | http://timed.local/auth/realms/timed/protocol/openid-connect | +| `DJANGO_OIDC_OP_AUTHORIZATION_ENDPOINT` | OIDC /auth endpoint | {`DJANGO_OIDC_DEFAULT_BASE_URL`}/auth | +| `DJANGO_OIDC_OP_TOKEN_ENDPOINT` | OIDC /token endpoint | {`DJANGO_OIDC_DEFAULT_BASE_URL`}/token | +| `DJANGO_OIDC_OP_USER_ENDPOINT` | OIDC /userinfo endpoint | {`DJANGO_OIDC_DEFAULT_BASE_URL`}/userinfo | +| `DJANGO_OIDC_OP_JWKS_ENDPOINT` | OIDC /certs endpoint | {`DJANGO_OIDC_DEFAULT_BASE_URL`}/certs | +| `DJANGO_OIDC_RP_CLIENT_ID` | Client ID by your OIDC provider | timed-public | +| `DJANGO_OIDC_RP_CLIENT_SECRET` | Client secret by your OIDC provider, should be None (flow start is handled by frontend) | not set | +| `DJANGO_OIDC_RP_SIGN_ALGO` | Algorithm the OIDC provider uses to sign ID tokens | RS256 | +| `DJANGO_OIDC_VERIFY_SSL` | Verify SSL on OIDC request | dev: False, prod: True | +| `DJANGO_OIDC_CREATE_USER` | Create new user if it doesn't exist in the database | False | +| `DJANGO_OIDC_USERNAME_CLAIM` | Username token claim for user lookup / creation | sub | +| `DJANGO_OIDC_EMAIL_CLAIM` | Email token claim for creating new users (if `DJANGO_OIDC_CREATE_USER` is enabled) | email | +| `DJANGO_OIDC_FIRSTNAME_CLAIM` | First name token claim for creating new users (if `DJANGO_OIDC_CREATE_USER` is enabled) | given_name | +| `DJANGO_OIDC_LASTNAME_CLAIM` | Last name token claim for creating new users (if `DJANGO_OIDC_CREATE_USER` is enabled) | family_name | +| `DJANGO_OIDC_BEARER_TOKEN_REVALIDATION_TIME` | Time (in seconds) to cache a bearer token before revalidation is needed | 60 | +| `DJANGO_OIDC_CHECK_INTROSPECT` | Use token introspection for confidential clients | True | +| `DJANGO_OIDC_OP_INTROSPECT_ENDPOINT` | OIDC token introspection endpoint (if `DJANGO_OIDC_CHECK_INTROSPECT` is enabled) | {`DJANGO_OIDC_DEFAULT_BASE_URL`}/token/introspect | +| `DJANGO_OIDC_RP_INTROSPECT_CLIENT_ID` | OIDC client id (if `DJANGO_OIDC_CHECK_INTROSPECT` is enabled) of confidential client | timed-confidential | +| `DJANGO_OIDC_RP_INTROSPECT_CLIENT_SECRET` | OIDC client secret (if `DJANGO_OIDC_CHECK_INTROSPECT` is enabled) of confidential client | not set | +| `DJANGO_OIDC_ADMIN_LOGIN_REDIRECT_URL` | URL of the django-admin, to which the user is redirected after successful admin login | dev: http://timed.local/admin/, prod: not set | +| `DJANGO_ALLOW_LOCAL_LOGIN` | Enable / Disable login with local user/password (in admin) | True | +| `EMAIL_URL` | Uri of email server | smtp://localhost:25 | +| `DJANGO_DEFAULT_FROM_EMAIL` | Default email address to use for various responses | webmaster@localhost | +| `DJANGO_SERVER_EMAIL` | Email address error messages are sent from | root@localhost | +| `DJANGO_ADMINS` | List of people who get error notifications | not set | +| `DJANGO_WORK_REPORT_PATH` | Path of custom work report template | not set | +| `DJANGO_SENTRY_DSN` | Sentry DSN for error reporting | not set, set to enable Sentry integration | +| `DJANGO_SENTRY_TRACES_SAMPLE_RATE` | Sentry trace sample rate, Set 1.0 to capture 100% of transactions | 1.0 | +| `DJANGO_SENTRY_SEND_DEFAULT_PII` | Associate users to errors in Sentry | True | +| `HURRICANE_REQ_QUEUE_LEN` | Django Hurricane's request queue length. When full, the readiness probe toggles | 250 | +| `STATIC_ROOT` | Path to the static files. In prod, you may want to mount a docker volume here, so it can be served by nginx | `/app/static` | +| `STATIC_URL` | URL path to the static files on the web server. Configure nginx to point this to `$STATIC_ROOT` | `/static` | + +## Contributing + +Look at our [contributing guidelines](CONTRIBUTING.md) to start with your first contribution. ## License diff --git a/backend/.dockerignore b/backend/.dockerignore new file mode 100644 index 000000000..019c069c1 --- /dev/null +++ b/backend/.dockerignore @@ -0,0 +1,18 @@ +.cache +.coverage +.coverage.* +docker-compose.* +Dockerfile +.dockerignore +.env +.git +.github +*.pyc +__pycache__ +*.pyd +*.pyo +.pytest_cache +.Python +.python-version +*.swp +.venv diff --git a/backend/.flake8 b/backend/.flake8 new file mode 100644 index 000000000..cc912a7ab --- /dev/null +++ b/backend/.flake8 @@ -0,0 +1,33 @@ +[flake8] +ignore = + # whitespace before ':' + E203, + # too many leading ### in a block comment + E266, + # line too long (managed by black) + E501, + # Line break occurred before a binary operator (this is not PEP8 compatible) + W503, + # Missing docstring in public module + D100, + # Missing docstring in public class + D101, + # Missing docstring in public method + D102, + # Missing docstring in public function + D103, + # Missing docstring in public package + D104, + # Missing docstring in magic method + D105, + # Missing docstring in public package + D106, + # Missing docstring in __init__ + D107, + # needed because of https://github.com/ambv/black/issues/144 + D202, + # other string does contain unindexed parameters + P103 +max-line-length = 80 +exclude = migrations snapshots +max-complexity = 10 \ No newline at end of file diff --git a/backend/.pre-commit-config.yaml b/backend/.pre-commit-config.yaml new file mode 100644 index 000000000..9c483cbd2 --- /dev/null +++ b/backend/.pre-commit-config.yaml @@ -0,0 +1,18 @@ +repos: + - repo: local + hooks: + - id: black + name: black + language: system + entry: black + types: [python] + - id: isort + name: isort + language: system + entry: isort + types: [python] + - id: flake8 + name: flake8 + language: system + entry: flake8 + types: [python] diff --git a/backend/CHANGELOG.md b/backend/CHANGELOG.md new file mode 100644 index 000000000..f21689cef --- /dev/null +++ b/backend/CHANGELOG.md @@ -0,0 +1,260 @@ +# v3.0.7 + +### Fix +* **redmine:** Convert Decimal objects to floats ([`5f613d1`](https://github.com/adfinis/timed-backend/commit/5f613d1e83f007270228c109846fb6a525eced71)) + +# v3.0.6 + +### Feature +* **redmine:** Add redmine issue id to log ([`b5c509b`](https://github.com/adfinis/timed-backend/commit/b5c509bc196137b28e84b814d4dba0a60b117747)) + +# v3.0.5 + +### Fix +* **tracking:** Fix updating own rejected reports and rejecting own reports ([`6a5d0ed`](https://github.com/adfinis/timed-backend/commit/6a5d0eda470939c59ad9ea869d4296e0115dd33e)) + +# v3.0.4 + +### Fix +* **redmine:** Log estimated_hours in update_project_expenditure command ([`fc1f631`](https://github.com/adfinis/timed-backend/commit/fc1f631a7fcdc25ab93b5cbcf38845f30af3f4a5)) + +# v3.0.3 + +### Fix +* **redmine:** Fix value check for custom fields ([`5f6bc53`](https://github.com/adfinis/timed-backend/commit/5f6bc532c6e0d76c9dae07b423afa6ea7c2ab52c)) + +# v3.0.2 + +### Fix +* **redmine:** Fix NoneType for amount offered/invoiced for projects ([`6e1f4c8`](https://github.com/adfinis/timed-backend/commit/6e1f4c89672a991ce1765bbed7c5e71f02a119e2)) + +# v3.0.1 + +### Feature +* Empty sums in correcr ordering ([`757de4e`](https://github.com/adfinis/timed-backend/commit/757de4e263cd42bb8521bccdd51dc6bf2207e761)) +* **statistics:** Support ordering in new queryset wrapper ([`fb5a2dc`](https://github.com/adfinis/timed-backend/commit/fb5a2dc6480936004d90c067161905196aad58e0)) + +### Fix +* **tracking:** Fix report update notifactions ([`8d0d0fd`](https://github.com/adfinis/timed-backend/commit/8d0d0fd62896652a15ed00b840090cccdc4eaac8)) +* **tests:** Customer statistic test had a missing customer ([`c99b512`](https://github.com/adfinis/timed-backend/commit/c99b5120fce4f3dc8bddf21346eb46bb8ba72239)) +* **pytest:** Ignore "invalid escape sequence" deprecation warning ([`4e08672`](https://github.com/adfinis/timed-backend/commit/4e086727a1e69e658e5c044930ce1248aa9c1435)) +* **statistics:** Refactor multiqs to use filtering aggregates ([`345b8df`](https://github.com/adfinis/timed-backend/commit/345b8df559593b03f69dcc1b81c590a4277d8fda)) +* **makefile:** Use aliases for debug backend ([`9c47123`](https://github.com/adfinis/timed-backend/commit/9c47123af4cab3ab2095b9ff1b0e63ca973ee6ac)) + +# v3.0.0 + +### Feature +* **filters:** Add number multi value filter ([`4688e41`](https://github.com/adfinis/timed-backend/commit/4688e41da5300d789ed50bdd6af34d7d481767c8)) +* **redmine:** Add pretend mode to redmine commands ([`abc5083`](https://github.com/adfinis/timed-backend/commit/abc50834feb3f84d3018abaa31073ab68e79dd76)) +* **notifications:** Project budget check notifications ([`b81e28e`](https://github.com/adfinis/timed-backend/commit/b81e28e9d0b8386e54caf57b90960e392d5811c0)) +* **statistics:** Show amount offered and invoiced in project statistics ([`144444b`](https://github.com/adfinis/timed-backend/commit/144444b298f2139f44a8ca291ca34ccfb3f66899)) +* **redmine:** Import project expenditure from redmine ([`766f79b`](https://github.com/adfinis/timed-backend/commit/766f79bc17ce927a05217e9bacc18c478404e6f6)) +* **redmine:** Update expenditures on redmine projects ([`0aa9da6`](https://github.com/adfinis/timed-backend/commit/0aa9da69e8432a4d9537b65dd935bebe23fd4c72)) +* **filters:** Allow filtering of tasks and reports in statistics ([`b5b9c8d`](https://github.com/adfinis/timed-backend/commit/b5b9c8d633a4d6fc8633594349feec7ee59fb8d0)) +* **employment:** Add is_external filter for user endpoint ([`8a1b272`](https://github.com/adfinis/timed-backend/commit/8a1b2723147c9775cb06abd090ec307610a2d254)) +* **admin:** Add searchable dropdowns for user lists in admin ([`4c01054`](https://github.com/adfinis/timed-backend/commit/4c010542ce3b3544c3e87f1dd2ca7ff8ec4df245)) +* Track remaining effort on tasks ([`3d045f2`](https://github.com/adfinis/timed-backend/commit/3d045f21ed7fd2147b49c6190dc3e1474c69decb)) +* **tracking:** Reject reports ([`a4e8983`](https://github.com/adfinis/timed-backend/commit/a4e8983265d0b87101a6151982fbb8a802e4cd9a)) + +### Fix +* **tracking:** Fix automatic unreject when bulk updating ([`f110eb0`](https://github.com/adfinis/timed-backend/commit/f110eb0ea864a7115f7ed1d24e868aafb6c038f2)) +* **tracking:** Fix remaining effort on report creation ([`abceb32`](https://github.com/adfinis/timed-backend/commit/abceb322e042df5c34b04e685c331527848c898f)) +* **tracking:** Fix setting of remaining effort ([`16f1dbb`](https://github.com/adfinis/timed-backend/commit/16f1dbb54f625a8468fd33066a685ac1cfae7fec)) +* **notifications:** Omit projects with no reports ([`91a6dd5`](https://github.com/adfinis/timed-backend/commit/91a6dd5ec2d128d6df3994c78eebbb295ec9a2f5)) +* **tracking:** Allow null values on remaining effort for reports ([`08a5aa4`](https://github.com/adfinis/timed-backend/commit/08a5aa429eac6d25cec0699a42919ee8f959ed12)) +* **tracking:** Fix absence for users with multiple employments ([`d884ef6`](https://github.com/adfinis/timed-backend/commit/d884ef6a4463e5095fcecc6cd999aa6b595f5530)) +* Add missing rejected field to ReportIntersectionSerializer ([`ee8f79a`](https://github.com/adfinis/timed-backend/commit/ee8f79a1a724763bdd51222010f47ec40ef71622)) +* **auth:** Let failing auth requests return 401 ([`8454601`](https://github.com/adfinis/timed-backend/commit/8454601019f33272a39814ac8e3fe033c758e7e7)) +* **dev:** Remove deprecated flag from pre-commit isort ([`50e5da2`](https://github.com/adfinis/timed-backend/commit/50e5da2ad5ef12098e0128ba907ac40ac2fa1773)) +* **tracking:** Fix remaining effort check when creating report ([`fc7c92c`](https://github.com/adfinis/timed-backend/commit/fc7c92cf0f3cb937100616abb24bd06804408a51)) +* **statistics:** Add missing fields for project and task statistics ([`89fb718`](https://github.com/adfinis/timed-backend/commit/89fb718901f41914323a60d99a2983ba0454daa0)) +* **reports:** Fix project and customer statistics ([`a3ab8ac`](https://github.com/adfinis/timed-backend/commit/a3ab8acb5be4107ea7f4f6677cdbdb57dd0b95c2)) +* **projects:** Ignore signal when loading a fixture ([`21e5dd7`](https://github.com/adfinis/timed-backend/commit/21e5dd7861a52793cf4b40e94c04de78a64ca3ec)) +* **container:** Executable bit for cmd.sh ([`34f2751`](https://github.com/adfinis/timed-backend/commit/34f27517c896577ddca3e1355cdc3ba5b8233d29)) +* **filters:** Allow Q filtering for MultiQS querysets ([`b629c9d`](https://github.com/adfinis/timed-backend/commit/b629c9d97cec7d4779baaa94f6eb628b394a3c53)) +* **reports:** Refactor statistics ([`21d3677`](https://github.com/adfinis/timed-backend/commit/21d36774816467977f6a45bab0641d7abf4d6ec5)) + + +# v2.0.0 + +### Breaking +* **tracking:** rename field type to absence_type ([`8ca44d2`](https://github.com/adfinis/timed-backend/commit/8ca44d2f361228e7f71e3e28a795079a2e3e7745)) + +# v1.6.3 + +### Fix + +* **workreport:** Update metadata ([`257e2ae`](https://github.com/adfinis/timed-backend/pull/855/commits/257e2aeedd36a112018bdedaf32191eaf0100420)) +* **deps:** Bump django from 3.1.14 to 3.2.13 ([`ca8b76d`](https://github.com/adfinis/timed-backend/pull/856/commits/ca8b76dd2d1f2ce365595101bb4a6d53aa85994d)) + +# v1.6.2 + +### Fix + +* **tracking:** Allow updating of billed reports ([`e73e716`](https://github.com/adfinis/timed-backend/pull/851/commits/e73e7161d51b93b14faa0a5f5babf740166aff06)) + +# v1.6.1 + +### Fix + +* **projects:** Change permissions and visibility for billing types ([`8a705db`](https://github.com/adfinis/timed-backend/pull/847/commits/8a705dbca7a66abd443f0a99341004c3515f3dbd)) +* **subscription:** Fix parser and notifications for orders ([`0deaafa`](https://github.com/adfinis/timed-backend/pull/849/commits/0deaafa71d8520c7bf17fc91aa938f0106f96150)) + +# v1.6.0 + +### Feature +* **env:** Add tls option for emails to env var ([`c68107a`](https://github.com/adfinis/timed-backend/pull/845/commits/c68107a4a58f54fbaa2c1de2f158437ad78609f3)) + +### Fix +* **reports:** Add reviewer hierarchy in `notify_reviewers_unverified` ([`91751e9`](https://github.com/adfinis/timed-backend/pull/843/commits/91751e9497ac67ecb3072e33a6c990169d8488ee)) +* **subscription:** Include cost center in `SubscriptionProjectSerializer` ([`11640f8`](https://github.com/adfinis/timed-backend/pull/846/commits/11640f88d797480a5f110fc7fc9b27d262f22bfa)) + +# v1.5.5 + +### Fix +* **reports:** Center total hours column in workreport ([`1acd374`](https://github.com/adfinis/timed-backend/pull/840/commits/1acd3742af972e17d8600b560f16f7afe9a70d1d)) + +# v1.5.4 + +### Fix +* **auth:** Username should be case insensitive ([`1ce24bd`](https://github.com/adfinis/timed-backend/commit/1ce24bd04f4b217e560707bd699bbeb6fe14fe09)) + +# v1.5.2 + +### Fix +* **subscription/notify_admin:** Prevent invalid addition of datetime and int ([`645881d`](https://github.com/adfinis/timed-backend/pull/829/commits/645881d22aa7987614a13e7ee62a8f201b60c717)) + +# v1.5.1 + +### Fix +* **subscription/notify_admin:** Check project.estimate before calcualting total_hours ([`63273d2`](https://github.com/adfinis/timed-backend/commit/63273d27e9c57714ba9c01c9870a6949cfd33e91)) +* **subscriptions/notify_admin:** Use dateutils parser to prevent an error ([`c3a8c6c`](https://github.com/adfinis/timed-backend/commit/c3a8c6ceb708efd309f79c6f9808231e2169dea4)) + +# v1.5.0 + +### Feat + +* **settings**: add CORS_ALLOWED_ORIGINS to env (9e32bdc58171cbbd24304fb2c30d745d9e2cbe23) + +# v1.4.5 + +### Features + +* Add new `is_customer` assignee role and update permissions #810 +* Update fixtures and keycloak config #813 +* **authentication:** Update django user data according to OIDC userinfo #814 +* **subscription:** Send email on order creation #811 + +### Fixes + +* Fix visibility in various models to not depend on employment #808 +* **subscription:** fix visibility of subscription projects #812 + +# v1.4.4 + +### Features + +* **reports:** Change column for total hours for tasks #800 +* **fixtures:** Add accountant user to fixtures #802 +* **tracking:** Add user to Report Intersection #803 +* **settings:** Make DATA_UPLOAD_MAX_NUMBER_FIELDS alterable #805 + +### Fixes + +* Fix setting correct value for billed flag on projects #799 +* **tracking:** Remove billed check from "editable" filter #804 +* **tracking:** Fix reviewer filter to only show reports in which the user is sole reviewer #807 + +# v1.4.3 + +### Features + +* Use whitenoise to host static files #790 +* Add SECURE_PROXY_SSL_HEADER #785 + +### Fixes + +* Rename IsNotBilledAndVerified permission #796 +* **reports:** Add missing logo and update font in workreport #794 +* **redmine:** Fix total hours calculation #793 + +# v1.4.2 + +### Features + +* Add accountant flag for users #782 +* Add number filter for assignees #780 + +### Fixes + +* Fix calculations in workreport #781 + +# v1.4.1 + +### Fixes + +Add manager role to project assignees #779 + +# v1.4.0 + +### Features +- Serve static files for Django Admin #777 + +### Fixes +- Update fixtures according to new roles #778 + + +# v1.3.0 (12 August 2021) + +### Feature +* Use assignees with reviewer role instead of reviewers ([`89def71`](https://github.com/adfinis/timed-backend/commit/89def71eefc0f18e7989b34f882acd2fd619998d)) +* Rewrite permissions and visibilty to use with assignees and external employees ([`159e750`](https://github.com/adfinis/timed-backend/commit/159e75033ed4c477d56f2a2817dee82b3066d2a9)) +* Add user assignement to customers, projects and tasks ([`6ff4259`](https://github.com/adfinis/timed-backend/commit/6ff425941307a0386d835187eaad02e26cc718e3)) +* Add and enable sentry-sdk for error reporting ([`1e96b78`](https://github.com/adfinis/timed-backend/commit/1e96b785206ddd1a871e5b23a9126f50c94c38dc)) +* **employment:** Add new attribute is_external to employment model ([`e8e6291`](https://github.com/adfinis/timed-backend/commit/e8e629193b7aabd592fc9744bc7210577d58c910)) +* **runtime:** Use gunicorn instead of uwsgi ([`e6b1fdf`](https://github.com/adfinis/timed-backend/commit/e6b1fdfc5bb2ad5578ed2927ee210b5da2119f9b)) +* **redmine:** Update template formatting ([`9b1a6f1`](https://github.com/adfinis/timed-backend/commit/9b1a6f164f72c2eae57a1e20cc0cff763c7e535a)) + +### Fix +* Update workreport template ([`b877194`](https://github.com/adfinis/timed-backend/commit/b87719485affd6421734251c270d1fbeb37a7176)) + + +# v1.2.0 (16 April 2021) + +### Feature +* Export metrics with django-prometheus ([`6ed9cab`](https://github.com/adfinis/timed-backend/commit/6ed9cabeeefd2e6945a63b83de1ee85018fb56a5)) +* Show not_billable and review attributes for reports in weekly report ([`a02aca4`](https://github.com/adfinis/timed-backend/commit/a02aca48ae609f9ac514238be723c056fa60754f)) +* Add customer_visible field to project serializer ([`2f12f86`](https://github.com/adfinis/timed-backend/commit/2f12f86d6132c1362d7065ad0fd8cf89a4f4f377)) +* Add billed flag to project and tracking ([`fe41199`](https://github.com/adfinis/timed-backend/commit/fe41199527e5ab37f23c715d844805b7d8944d64)) +* **projects:** Add currency fields to task and project ([`7266c34`](https://github.com/adfinis/timed-backend/commit/7266c346236e9e0d1c83d9f84b99a4e782256ba4)) + +### Fix +* Translate work report to English ([`7a87d93`](https://github.com/adfinis/timed-backend/commit/7a87d935893dbc68fd59a4fb477691ad209b6a3b)) +* Add custom forms for supervisor and supervisee inlines ([`b92799d`](https://github.com/adfinis/timed-backend/commit/b92799d66759479827cf11f958c12d55d9c8d5bd)) +* Add billable column and calculate not billable time ([`4184b76`](https://github.com/adfinis/timed-backend/commit/4184b76c66b5233d7a568cc6e37d9112ae9d939f)) +* **tracking:** Set billed from project on report ([`d25e64f`](https://github.com/adfinis/timed-backend/commit/d25e64fd4c898757acb565996173f460f636c6a6)) +* **tracking:** Update billed if not sent with request ([`62295ba`](https://github.com/adfinis/timed-backend/commit/62295bac19f302fa45281a72edb09397e3cbc4c6)) +* Add test data users to keycloak config ([`082ef6e`](https://github.com/adfinis/timed-backend/commit/082ef6e14a406a5d3b1a5f286007169689c0cb1b)) + +# v1.1.2 (28 October 2020) + +### Fix +* fix user based permissions to use the IS_AUTHENTICATED permission properly (#654) + + +# v1.1.1 (14 August 2020) + +### Fix +* increase uwsgi buffer-size for big query strings + + +# v1.1.0 (11 August 2020) + +### Feature +* implement SSO OIDC login for django admin +* django-local user/password (django-admin) login is now a toggable setting, see `DJANGO_ALLOW_LOCAL_LOGIN` + + +# v1.0.0 (30 July 2020) + +See Github releases for changelog of previous versions. diff --git a/backend/Dockerfile b/backend/Dockerfile new file mode 100644 index 000000000..e3a03d7d1 --- /dev/null +++ b/backend/Dockerfile @@ -0,0 +1,28 @@ +FROM python:3.9 + +WORKDIR /app + +RUN apt-get update && apt-get install -y --no-install-recommends wait-for-it \ + libpq-dev \ + && rm -rf /var/lib/apt/lists/* \ + && mkdir -p /app + +ENV DJANGO_SETTINGS_MODULE timed.settings +ENV STATIC_ROOT /var/www/static +ENV WAITFORIT_TIMEOUT 0 + +ENV HURRICANE_REQ_QUEUE_LEN 250 + +RUN pip install -U poetry + +ARG INSTALL_DEV_DEPENDENCIES=false +COPY pyproject.toml poetry.lock /app/ +RUN poetry config virtualenvs.create false \ + && if [ "$INSTALL_DEV_DEPENDENCIES" = "true" ]; then poetry install; else poetry install --no-dev; fi + +COPY . /app + +RUN mkdir -p /var/www/static + +EXPOSE 80 +CMD ./cmd.sh diff --git a/backend/cmd.sh b/backend/cmd.sh new file mode 100755 index 000000000..a83e61a30 --- /dev/null +++ b/backend/cmd.sh @@ -0,0 +1,13 @@ +#!/bin/sh + +# All parameters to the script are appended as arguments to `manage.py serve` + +set -x + +./manage.py collectstatic --noinput + +set -e + +wait-for-it "${DJANGO_DATABASE_HOST}":"${DJANGO_DATABASE_PORT}" -t "${WAITFORIT_TIMEOUT}" +./manage.py migrate --no-input +./manage.py serve --static --port 80 --req-queue-len "${HURRICANE_REQ_QUEUE_LEN:-250}" "$@" diff --git a/backend/manage.py b/backend/manage.py new file mode 100755 index 000000000..8ce2c11b0 --- /dev/null +++ b/backend/manage.py @@ -0,0 +1,10 @@ +#!/usr/bin/env python +import os +import sys + +if __name__ == "__main__": + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "timed.settings") + + from django.core.management import execute_from_command_line + + execute_from_command_line(sys.argv) diff --git a/backend/poetry.lock b/backend/poetry.lock new file mode 100644 index 000000000..fae3a0873 --- /dev/null +++ b/backend/poetry.lock @@ -0,0 +1,2239 @@ +# This file is automatically @generated by Poetry 1.5.0 and should not be changed by hand. + +[[package]] +name = "appnope" +version = "0.1.3" +description = "Disable App Nap on macOS >= 10.9" +optional = false +python-versions = "*" +files = [ + {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, + {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, +] + +[[package]] +name = "asgiref" +version = "3.4.1" +description = "ASGI specs, helper code, and adapters" +optional = false +python-versions = ">=3.6" +files = [ + {file = "asgiref-3.4.1-py3-none-any.whl", hash = "sha256:ffc141aa908e6f175673e7b1b3b7af4fdb0ecb738fc5c8b88f69f055c2415214"}, + {file = "asgiref-3.4.1.tar.gz", hash = "sha256:4ef1ab46b484e3c706329cedeff284a5d40824200638503f5768edb6de7d58e9"}, +] + +[package.extras] +tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] + +[[package]] +name = "asttokens" +version = "2.2.1" +description = "Annotate AST trees with source code positions" +optional = false +python-versions = "*" +files = [ + {file = "asttokens-2.2.1-py2.py3-none-any.whl", hash = "sha256:6b0ac9e93fb0335014d382b8fa9b3afa7df546984258005da0b9e7095b3deb1c"}, + {file = "asttokens-2.2.1.tar.gz", hash = "sha256:4622110b2a6f30b77e1473affaa97e711bc2f07d3f10848420ff1898edbe94f3"}, +] + +[package.dependencies] +six = "*" + +[package.extras] +test = ["astroid", "pytest"] + +[[package]] +name = "babel" +version = "2.12.1" +description = "Internationalization utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "Babel-2.12.1-py3-none-any.whl", hash = "sha256:b4246fb7677d3b98f501a39d43396d3cafdc8eadb045f4a31be01863f655c610"}, + {file = "Babel-2.12.1.tar.gz", hash = "sha256:cc2d99999cd01d44420ae725a21c9e3711b3aadc7976d6147f622d8581963455"}, +] + +[[package]] +name = "backcall" +version = "0.2.0" +description = "Specifications for callback functions passed in to an API" +optional = false +python-versions = "*" +files = [ + {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, + {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, +] + +[[package]] +name = "black" +version = "23.3.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.7" +files = [ + {file = "black-23.3.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915"}, + {file = "black-23.3.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9"}, + {file = "black-23.3.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2"}, + {file = "black-23.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c"}, + {file = "black-23.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c"}, + {file = "black-23.3.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6"}, + {file = "black-23.3.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b"}, + {file = "black-23.3.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d"}, + {file = "black-23.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70"}, + {file = "black-23.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326"}, + {file = "black-23.3.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b"}, + {file = "black-23.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2"}, + {file = "black-23.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925"}, + {file = "black-23.3.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27"}, + {file = "black-23.3.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331"}, + {file = "black-23.3.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5"}, + {file = "black-23.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961"}, + {file = "black-23.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8"}, + {file = "black-23.3.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30"}, + {file = "black-23.3.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3"}, + {file = "black-23.3.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266"}, + {file = "black-23.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab"}, + {file = "black-23.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb"}, + {file = "black-23.3.0-py3-none-any.whl", hash = "sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4"}, + {file = "black-23.3.0.tar.gz", hash = "sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "certifi" +version = "2023.5.7" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"}, + {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"}, +] + +[[package]] +name = "cffi" +version = "1.15.1" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = "*" +files = [ + {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, + {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, + {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, + {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, + {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, + {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, + {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, + {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, + {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, + {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, + {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, + {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, + {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, + {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, + {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, + {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, + {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, + {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, + {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "chardet" +version = "5.1.0" +description = "Universal encoding detector for Python 3" +optional = false +python-versions = ">=3.7" +files = [ + {file = "chardet-5.1.0-py3-none-any.whl", hash = "sha256:362777fb014af596ad31334fde1e8c327dfdb076e1960d1694662d46a6917ab9"}, + {file = "chardet-5.1.0.tar.gz", hash = "sha256:0d62712b956bc154f85fb0a266e2a3c5913c2967e00348701b32411d6def31e5"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.1.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, + {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, +] + +[[package]] +name = "click" +version = "8.1.3" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.2.7" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, + {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, + {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, + {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, + {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, + {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, + {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, + {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, + {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, + {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, + {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, + {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, + {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, + {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, + {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, + {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, + {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, + {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, + {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, + {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, + {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, + {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, + {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, + {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "cryptography" +version = "41.0.1" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-41.0.1-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:f73bff05db2a3e5974a6fd248af2566134d8981fd7ab012e5dd4ddb1d9a70699"}, + {file = "cryptography-41.0.1-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:1a5472d40c8f8e91ff7a3d8ac6dfa363d8e3138b961529c996f3e2df0c7a411a"}, + {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fa01527046ca5facdf973eef2535a27fec4cb651e4daec4d043ef63f6ecd4ca"}, + {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b46e37db3cc267b4dea1f56da7346c9727e1209aa98487179ee8ebed09d21e43"}, + {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d198820aba55660b4d74f7b5fd1f17db3aa5eb3e6893b0a41b75e84e4f9e0e4b"}, + {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:948224d76c4b6457349d47c0c98657557f429b4e93057cf5a2f71d603e2fc3a3"}, + {file = "cryptography-41.0.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:059e348f9a3c1950937e1b5d7ba1f8e968508ab181e75fc32b879452f08356db"}, + {file = "cryptography-41.0.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b4ceb5324b998ce2003bc17d519080b4ec8d5b7b70794cbd2836101406a9be31"}, + {file = "cryptography-41.0.1-cp37-abi3-win32.whl", hash = "sha256:8f4ab7021127a9b4323537300a2acfb450124b2def3756f64dc3a3d2160ee4b5"}, + {file = "cryptography-41.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:1fee5aacc7367487b4e22484d3c7e547992ed726d14864ee33c0176ae43b0d7c"}, + {file = "cryptography-41.0.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9a6c7a3c87d595608a39980ebaa04d5a37f94024c9f24eb7d10262b92f739ddb"}, + {file = "cryptography-41.0.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5d092fdfedaec4cbbffbf98cddc915ba145313a6fdaab83c6e67f4e6c218e6f3"}, + {file = "cryptography-41.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a8e6c2de6fbbcc5e14fd27fb24414507cb3333198ea9ab1258d916f00bc3039"}, + {file = "cryptography-41.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:cb33ccf15e89f7ed89b235cff9d49e2e62c6c981a6061c9c8bb47ed7951190bc"}, + {file = "cryptography-41.0.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f0ff6e18d13a3de56f609dd1fd11470918f770c6bd5d00d632076c727d35485"}, + {file = "cryptography-41.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7bfc55a5eae8b86a287747053140ba221afc65eb06207bedf6e019b8934b477c"}, + {file = "cryptography-41.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:eb8163f5e549a22888c18b0d53d6bb62a20510060a22fd5a995ec8a05268df8a"}, + {file = "cryptography-41.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8dde71c4169ec5ccc1087bb7521d54251c016f126f922ab2dfe6649170a3b8c5"}, + {file = "cryptography-41.0.1.tar.gz", hash = "sha256:d34579085401d3f49762d2f7d6634d6b6c2ae1242202e860f4d26b046e3a1006"}, +] + +[package.dependencies] +cffi = ">=1.12" + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +nox = ["nox"] +pep8test = ["black", "check-sdist", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +optional = false +python-versions = ">=3.5" +files = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] + +[[package]] +name = "django" +version = "3.2.19" +description = "A high-level Python Web framework that encourages rapid development and clean, pragmatic design." +optional = false +python-versions = ">=3.6" +files = [ + {file = "Django-3.2.19-py3-none-any.whl", hash = "sha256:21cc991466245d659ab79cb01204f9515690f8dae00e5eabde307f14d24d4d7d"}, + {file = "Django-3.2.19.tar.gz", hash = "sha256:031365bae96814da19c10706218c44dff3b654cc4de20a98bd2d29b9bde469f0"}, +] + +[package.dependencies] +asgiref = ">=3.3.2,<4" +pytz = "*" +sqlparse = ">=0.2.2" + +[package.extras] +argon2 = ["argon2-cffi (>=19.1.0)"] +bcrypt = ["bcrypt"] + +[[package]] +name = "django-cors-headers" +version = "4.1.0" +description = "django-cors-headers is a Django application for handling the server headers required for Cross-Origin Resource Sharing (CORS)." +optional = false +python-versions = ">=3.7" +files = [ + {file = "django_cors_headers-4.1.0-py3-none-any.whl", hash = "sha256:88a4bfae24b6404dd0e0640203cb27704a2a57fd546a429e5d821dfa53dd1acf"}, + {file = "django_cors_headers-4.1.0.tar.gz", hash = "sha256:36a8d7a6dee6a85f872fe5916cc878a36d0812043866355438dfeda0b20b6b78"}, +] + +[package.dependencies] +Django = ">=3.2" + +[[package]] +name = "django-environ" +version = "0.10.0" +description = "A package that allows you to utilize 12factor inspired environment variables to configure your Django application." +optional = false +python-versions = ">=3.5,<4" +files = [ + {file = "django-environ-0.10.0.tar.gz", hash = "sha256:b3559a91439c9d774a9e0c1ced872364772c612cdf6dc919506a2b13f7a77225"}, + {file = "django_environ-0.10.0-py2.py3-none-any.whl", hash = "sha256:510f8c9c1d0a38b0815f91504270c29440a0cf44fab07f55942fa8d31bbb9be6"}, +] + +[package.extras] +develop = ["coverage[toml] (>=5.0a4)", "furo (>=2021.8.17b43,<2021.9.dev0)", "pytest (>=4.6.11)", "sphinx (>=3.5.0)", "sphinx-notfound-page"] +docs = ["furo (>=2021.8.17b43,<2021.9.dev0)", "sphinx (>=3.5.0)", "sphinx-notfound-page"] +testing = ["coverage[toml] (>=5.0a4)", "pytest (>=4.6.11)"] + +[[package]] +name = "django-excel" +version = "0.0.10" +description = "A django middleware that provides one application programminginterface to read and write data in different excel file formats" +optional = false +python-versions = "*" +files = [ + {file = "django-excel-0.0.10.tar.gz", hash = "sha256:81cd3bce8007009c30205f7085a97f2908557014900775577cab0b9a770c2bad"}, + {file = "django_excel-0.0.10-py2.py3-none-any.whl", hash = "sha256:f0297202fc460eb74657f8a9d4473921050fbe2e297765c174be9cf33d1195c7"}, +] + +[package.dependencies] +Django = ">=1.6.1" +pyexcel = ">=0.5.7" +pyexcel-webio = ">=0.1.2" + +[package.extras] +ods = ["pyexcel-ods3 (>=0.4.0)"] +xls = ["pyexcel-xls (>=0.4.0)"] +xlsx = ["pyexcel-xlsx (>=0.4.0)"] + +[[package]] +name = "django-extensions" +version = "3.2.3" +description = "Extensions for Django" +optional = false +python-versions = ">=3.6" +files = [ + {file = "django-extensions-3.2.3.tar.gz", hash = "sha256:44d27919d04e23b3f40231c4ab7af4e61ce832ef46d610cc650d53e68328410a"}, + {file = "django_extensions-3.2.3-py3-none-any.whl", hash = "sha256:9600b7562f79a92cbf1fde6403c04fee314608fefbb595502e34383ae8203401"}, +] + +[package.dependencies] +Django = ">=3.2" + +[[package]] +name = "django-filter" +version = "23.2" +description = "Django-filter is a reusable Django application for allowing users to filter querysets dynamically." +optional = false +python-versions = ">=3.7" +files = [ + {file = "django-filter-23.2.tar.gz", hash = "sha256:2fe15f78108475eda525692813205fa6f9e8c1caf1ae65daa5862d403c6dbf00"}, + {file = "django_filter-23.2-py3-none-any.whl", hash = "sha256:d12d8e0fc6d3eb26641e553e5d53b191eb8cec611427d4bdce0becb1f7c172b5"}, +] + +[package.dependencies] +Django = ">=3.2" + +[[package]] +name = "django-hurricane" +version = "1.3.4" +description = "Hurricane is an initiative to fit Django perfectly with Kubernetes." +optional = false +python-versions = "~=3.8" +files = [ + {file = "django-hurricane-1.3.4.tar.gz", hash = "sha256:16fd74239adc8bba75b988859a3a28820e93f8c1232c65f251e70b790de04e92"}, + {file = "django_hurricane-1.3.4-py3-none-any.whl", hash = "sha256:6706dc95b05d07e4eb32b08b6b7f11ea0fbd3952bb915f0e3648e1df3fa599a2"}, +] + +[package.dependencies] +asgiref = ">=3.4.0,<3.5.0" +Django = ">=2.2" +pika = ">=1.1.0,<1.2.0" +requests = ">=2.25,<3.0" +tornado = ">=6.1,<7.0" + +[package.extras] +debug = ["debugpy (>=1.5,<2.0)"] +pycharm = ["pydevd-pycharm (>=213.5605.23,<213.5606.0)"] + +[[package]] +name = "django-money" +version = "3.1.0" +description = "Adds support for using money and currency fields in django models and forms. Uses py-moneyed as the money implementation." +optional = false +python-versions = ">=3.7" +files = [ + {file = "django-money-3.1.0.tar.gz", hash = "sha256:06a9257fa784576f5a0885e9b179065e3d4da4797876fa0a4f310de06b6dc65b"}, + {file = "django_money-3.1.0-py3-none-any.whl", hash = "sha256:e2d3cd025704dc00fcdf05733273299db5f8a6519335ad79a82c9d4269da6789"}, +] + +[package.dependencies] +Django = ">=2.2" +py-moneyed = ">=2.0,<3.0" +setuptools = "*" + +[package.extras] +exchange = ["certifi"] +test = ["django-stubs", "mixer", "mypy", "pytest (>=3.1.0)", "pytest-cov", "pytest-django", "pytest-pythonpath"] + +[[package]] +name = "django-multiselectfield" +version = "0.1.12" +description = "Django multiple select field" +optional = false +python-versions = "*" +files = [ + {file = "django-multiselectfield-0.1.12.tar.gz", hash = "sha256:d0a4c71568fb2332c71478ffac9f8708e01314a35cf923dfd7a191343452f9f9"}, + {file = "django_multiselectfield-0.1.12-py3-none-any.whl", hash = "sha256:c270faa7f80588214c55f2d68cbddb2add525c2aa830c216b8a198de914eb470"}, +] + +[package.dependencies] +django = ">=1.4" + +[[package]] +name = "django-nested-inline" +version = "0.4.6" +description = "Recursive nesting of inline forms for Django Admin" +optional = false +python-versions = "*" +files = [ + {file = "django-nested-inline-0.4.6.tar.gz", hash = "sha256:e57b55858d112364dfb112bbcdabb888e581d1677d31c1cac3bdcef6c890dc61"}, + {file = "django_nested_inline-0.4.6-py2.py3-none-any.whl", hash = "sha256:4fc6f0e78b3b5411b4bb7f180bb984831b88874bda48e49a14307baff5da5f12"}, +] + +[[package]] +name = "django-prometheus" +version = "2.3.1" +description = "Django middlewares to monitor your application with Prometheus.io." +optional = false +python-versions = "*" +files = [ + {file = "django-prometheus-2.3.1.tar.gz", hash = "sha256:f9c8b6c780c9419ea01043c63a437d79db2c33353451347894408184ad9c3e1e"}, + {file = "django_prometheus-2.3.1-py2.py3-none-any.whl", hash = "sha256:cf9b26f7ba2e4568f08f8f91480a2882023f5908579681bcf06a4d2465f12168"}, +] + +[package.dependencies] +prometheus-client = ">=0.7" + +[[package]] +name = "djangorestframework" +version = "3.14.0" +description = "Web APIs for Django, made easy." +optional = false +python-versions = ">=3.6" +files = [ + {file = "djangorestframework-3.14.0-py3-none-any.whl", hash = "sha256:eb63f58c9f218e1a7d064d17a70751f528ed4e1d35547fdade9aaf4cd103fd08"}, + {file = "djangorestframework-3.14.0.tar.gz", hash = "sha256:579a333e6256b09489cbe0a067e66abe55c6595d8926be6b99423786334350c8"}, +] + +[package.dependencies] +django = ">=3.0" +pytz = "*" + +[[package]] +name = "djangorestframework-jsonapi" +version = "6.0.0" +description = "A Django REST framework API adapter for the JSON:API spec." +optional = false +python-versions = ">=3.7" +files = [ + {file = "djangorestframework-jsonapi-6.0.0.tar.gz", hash = "sha256:f2465b1b1cd3f372abacc8d99f82835643373f4f3f12965276ad1ccc2d110415"}, + {file = "djangorestframework_jsonapi-6.0.0-py2.py3-none-any.whl", hash = "sha256:a93b3678bd5e2f070946ca32d7d0bb3734cb5966a80f8a44fa721fcf15cf89ce"}, +] + +[package.dependencies] +django = ">=3.2,<4.2" +djangorestframework = ">=3.13,<3.15" +inflection = ">=0.5.0" + +[package.extras] +django-filter = ["django-filter (>=2.4)"] +django-polymorphic = ["django-polymorphic (>=3.0)"] +openapi = ["pyyaml (>=5.4)", "uritemplate (>=3.0.1)"] + +[[package]] +name = "et-xmlfile" +version = "1.1.0" +description = "An implementation of lxml.xmlfile for the standard library" +optional = false +python-versions = ">=3.6" +files = [ + {file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"}, + {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.1.1" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"}, + {file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "executing" +version = "1.2.0" +description = "Get the currently executing AST node of a frame, and other information" +optional = false +python-versions = "*" +files = [ + {file = "executing-1.2.0-py2.py3-none-any.whl", hash = "sha256:0314a69e37426e3608aada02473b4161d4caf5a4b244d1d0c48072b8fee7bacc"}, + {file = "executing-1.2.0.tar.gz", hash = "sha256:19da64c18d2d851112f09c287f8d3dbbdf725ab0e569077efb6cdcbd3497c107"}, +] + +[package.extras] +tests = ["asttokens", "littleutils", "pytest", "rich"] + +[[package]] +name = "factory-boy" +version = "3.2.1" +description = "A versatile test fixtures replacement based on thoughtbot's factory_bot for Ruby." +optional = false +python-versions = ">=3.6" +files = [ + {file = "factory_boy-3.2.1-py2.py3-none-any.whl", hash = "sha256:eb02a7dd1b577ef606b75a253b9818e6f9eaf996d94449c9d5ebb124f90dc795"}, + {file = "factory_boy-3.2.1.tar.gz", hash = "sha256:a98d277b0c047c75eb6e4ab8508a7f81fb03d2cb21986f627913546ef7a2a55e"}, +] + +[package.dependencies] +Faker = ">=0.7.0" + +[package.extras] +dev = ["Django", "Pillow", "SQLAlchemy", "coverage", "flake8", "isort", "mongoengine", "tox", "wheel (>=0.32.0)", "zest.releaser[recommended]"] +doc = ["Sphinx", "sphinx-rtd-theme", "sphinxcontrib-spelling"] + +[[package]] +name = "faker" +version = "18.11.2" +description = "Faker is a Python package that generates fake data for you." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Faker-18.11.2-py3-none-any.whl", hash = "sha256:21c2c29638e98502f3bba9ad6a4f07a4b09c5e2150bb491ff02411a5888f6955"}, + {file = "Faker-18.11.2.tar.gz", hash = "sha256:ec6e2824bb1d3546b36c156324b9df6bca5a3d6d03adf991e6a5586756dcab9d"}, +] + +[package.dependencies] +python-dateutil = ">=2.4" + +[[package]] +name = "fancycompleter" +version = "0.9.1" +description = "colorful TAB completion for Python prompt" +optional = false +python-versions = "*" +files = [ + {file = "fancycompleter-0.9.1-py3-none-any.whl", hash = "sha256:dd076bca7d9d524cc7f25ec8f35ef95388ffef9ef46def4d3d25e9b044ad7080"}, + {file = "fancycompleter-0.9.1.tar.gz", hash = "sha256:09e0feb8ae242abdfd7ef2ba55069a46f011814a80fe5476be48f51b00247272"}, +] + +[package.dependencies] +pyreadline = {version = "*", markers = "platform_system == \"Windows\""} +pyrepl = ">=0.8.2" + +[[package]] +name = "fastdiff" +version = "0.3.0" +description = "A fast native implementation of diff algorithm with a pure python fallback" +optional = false +python-versions = "*" +files = [ + {file = "fastdiff-0.3.0-py2.py3-none-any.whl", hash = "sha256:ca5f61f6ddf5a1564ddfd98132ad28e7abe4a88a638a8b014a2214f71e5918ec"}, + {file = "fastdiff-0.3.0.tar.gz", hash = "sha256:4dfa09c47832a8c040acda3f1f55fc0ab4d666f0e14e6951e6da78d59acd945a"}, +] + +[package.dependencies] +wasmer = ">=1.0.0" +wasmer-compiler-cranelift = ">=1.0.0" + +[[package]] +name = "flake8" +version = "6.0.0" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "flake8-6.0.0-py2.py3-none-any.whl", hash = "sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7"}, + {file = "flake8-6.0.0.tar.gz", hash = "sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.10.0,<2.11.0" +pyflakes = ">=3.0.0,<3.1.0" + +[[package]] +name = "flake8-blind-except" +version = "0.2.1" +description = "A flake8 extension that checks for blind except: statements" +optional = false +python-versions = "*" +files = [ + {file = "flake8-blind-except-0.2.1.tar.gz", hash = "sha256:f25a575a9dcb3eeb3c760bf9c22db60b8b5a23120224ed1faa9a43f75dd7dd16"}, +] + +[[package]] +name = "flake8-debugger" +version = "4.1.2" +description = "ipdb/pdb statement checker plugin for flake8" +optional = false +python-versions = ">=3.7" +files = [ + {file = "flake8-debugger-4.1.2.tar.gz", hash = "sha256:52b002560941e36d9bf806fca2523dc7fb8560a295d5f1a6e15ac2ded7a73840"}, + {file = "flake8_debugger-4.1.2-py3-none-any.whl", hash = "sha256:0a5e55aeddcc81da631ad9c8c366e7318998f83ff00985a49e6b3ecf61e571bf"}, +] + +[package.dependencies] +flake8 = ">=3.0" +pycodestyle = "*" + +[[package]] +name = "flake8-deprecated" +version = "2.0.1" +description = "Warns about deprecated method calls." +optional = false +python-versions = ">=3.7" +files = [ + {file = "flake8-deprecated-2.0.1.tar.gz", hash = "sha256:c7659a530aa76c3ad8be0c1e8331ed56d882ef8bfba074501a545bb3352b0c23"}, + {file = "flake8_deprecated-2.0.1-py3-none-any.whl", hash = "sha256:8c61d2cb8d487118b6c20392b25f08ba1ec49c759e4ea562c7a60172912bc7ee"}, +] + +[package.dependencies] +flake8 = "*" + +[package.extras] +test = ["pytest"] + +[[package]] +name = "flake8-docstrings" +version = "1.7.0" +description = "Extension for flake8 which uses pydocstyle to check docstrings" +optional = false +python-versions = ">=3.7" +files = [ + {file = "flake8_docstrings-1.7.0-py2.py3-none-any.whl", hash = "sha256:51f2344026da083fc084166a9353f5082b01f72901df422f74b4d953ae88ac75"}, + {file = "flake8_docstrings-1.7.0.tar.gz", hash = "sha256:4c8cc748dc16e6869728699e5d0d685da9a10b0ea718e090b1ba088e67a941af"}, +] + +[package.dependencies] +flake8 = ">=3" +pydocstyle = ">=2.1" + +[[package]] +name = "flake8-isort" +version = "6.0.0" +description = "flake8 plugin that integrates isort ." +optional = false +python-versions = ">=3.7" +files = [ + {file = "flake8-isort-6.0.0.tar.gz", hash = "sha256:537f453a660d7e903f602ecfa36136b140de279df58d02eb1b6a0c84e83c528c"}, + {file = "flake8_isort-6.0.0-py3-none-any.whl", hash = "sha256:aa0cac02a62c7739e370ce6b9c31743edac904bae4b157274511fc8a19c75bbc"}, +] + +[package.dependencies] +flake8 = "*" +isort = ">=5.0.0,<6" + +[package.extras] +test = ["pytest"] + +[[package]] +name = "flake8-string-format" +version = "0.3.0" +description = "string format checker, plugin for flake8" +optional = false +python-versions = "*" +files = [ + {file = "flake8-string-format-0.3.0.tar.gz", hash = "sha256:65f3da786a1461ef77fca3780b314edb2853c377f2e35069723348c8917deaa2"}, + {file = "flake8_string_format-0.3.0-py2.py3-none-any.whl", hash = "sha256:812ff431f10576a74c89be4e85b8e075a705be39bc40c4b4278b5b13e2afa9af"}, +] + +[package.dependencies] +flake8 = "*" + +[[package]] +name = "freezegun" +version = "1.2.2" +description = "Let your Python tests travel through time" +optional = false +python-versions = ">=3.6" +files = [ + {file = "freezegun-1.2.2-py3-none-any.whl", hash = "sha256:ea1b963b993cb9ea195adbd893a48d573fda951b0da64f60883d7e988b606c9f"}, + {file = "freezegun-1.2.2.tar.gz", hash = "sha256:cd22d1ba06941384410cd967d8a99d5ae2442f57dfafeff2fda5de8dc5c05446"}, +] + +[package.dependencies] +python-dateutil = ">=2.7" + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] + +[[package]] +name = "importlib-metadata" +version = "6.7.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "importlib_metadata-6.7.0-py3-none-any.whl", hash = "sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5"}, + {file = "importlib_metadata-6.7.0.tar.gz", hash = "sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + +[[package]] +name = "inflection" +version = "0.5.1" +description = "A port of Ruby on Rails inflector to Python" +optional = false +python-versions = ">=3.5" +files = [ + {file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"}, + {file = "inflection-0.5.1.tar.gz", hash = "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "ipdb" +version = "0.13.13" +description = "IPython-enabled pdb" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "ipdb-0.13.13-py3-none-any.whl", hash = "sha256:45529994741c4ab6d2388bfa5d7b725c2cf7fe9deffabdb8a6113aa5ed449ed4"}, + {file = "ipdb-0.13.13.tar.gz", hash = "sha256:e3ac6018ef05126d442af680aad863006ec19d02290561ac88b8b1c0b0cfc726"}, +] + +[package.dependencies] +decorator = {version = "*", markers = "python_version > \"3.6\""} +ipython = {version = ">=7.31.1", markers = "python_version > \"3.6\""} +tomli = {version = "*", markers = "python_version > \"3.6\" and python_version < \"3.11\""} + +[[package]] +name = "ipython" +version = "8.14.0" +description = "IPython: Productive Interactive Computing" +optional = false +python-versions = ">=3.9" +files = [ + {file = "ipython-8.14.0-py3-none-any.whl", hash = "sha256:248aca623f5c99a6635bc3857677b7320b9b8039f99f070ee0d20a5ca5a8e6bf"}, + {file = "ipython-8.14.0.tar.gz", hash = "sha256:1d197b907b6ba441b692c48cf2a3a2de280dc0ac91a3405b39349a50272ca0a1"}, +] + +[package.dependencies] +appnope = {version = "*", markers = "sys_platform == \"darwin\""} +backcall = "*" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +decorator = "*" +jedi = ">=0.16" +matplotlib-inline = "*" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} +pickleshare = "*" +prompt-toolkit = ">=3.0.30,<3.0.37 || >3.0.37,<3.1.0" +pygments = ">=2.4.0" +stack-data = "*" +traitlets = ">=5" +typing-extensions = {version = "*", markers = "python_version < \"3.10\""} + +[package.extras] +all = ["black", "curio", "docrepr", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] +black = ["black"] +doc = ["docrepr", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] +kernel = ["ipykernel"] +nbconvert = ["nbconvert"] +nbformat = ["nbformat"] +notebook = ["ipywidgets", "notebook"] +parallel = ["ipyparallel"] +qtconsole = ["qtconsole"] +test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] +test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] + +[[package]] +name = "isort" +version = "5.12.0" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, + {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, +] + +[package.extras] +colors = ["colorama (>=0.4.3)"] +pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] +plugins = ["setuptools"] +requirements-deprecated-finder = ["pip-api", "pipreqs"] + +[[package]] +name = "jedi" +version = "0.18.2" +description = "An autocompletion tool for Python that can be used for text editors." +optional = false +python-versions = ">=3.6" +files = [ + {file = "jedi-0.18.2-py2.py3-none-any.whl", hash = "sha256:203c1fd9d969ab8f2119ec0a3342e0b49910045abe6af0a3ae83a5764d54639e"}, + {file = "jedi-0.18.2.tar.gz", hash = "sha256:bae794c30d07f6d910d32a7048af09b5a39ed740918da923c6b780790ebac612"}, +] + +[package.dependencies] +parso = ">=0.8.0,<0.9.0" + +[package.extras] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] +qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] +testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] + +[[package]] +name = "josepy" +version = "1.13.0" +description = "JOSE protocol implementation in Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "josepy-1.13.0-py2.py3-none-any.whl", hash = "sha256:6f64eb35186aaa1776b7a1768651b1c616cab7f9685f9660bffc6491074a5390"}, + {file = "josepy-1.13.0.tar.gz", hash = "sha256:8931daf38f8a4c85274a0e8b7cb25addfd8d1f28f9fb8fbed053dd51aec75dc9"}, +] + +[package.dependencies] +cryptography = ">=1.5" +PyOpenSSL = ">=0.13" +setuptools = ">=1.0" + +[package.extras] +dev = ["pytest", "tox"] +docs = ["Sphinx (>=1.0)", "sphinx-rtd-theme (>=1.0)"] +tests = ["coverage (>=4.0)", "flake8 (<4)", "isort", "mypy", "pytest (>=2.8.0)", "pytest-cov", "pytest-flake8 (>=0.5)", "types-pyOpenSSL", "types-pyRFC3339", "types-requests", "types-setuptools"] + +[[package]] +name = "lml" +version = "0.1.0" +description = "Load me later. A lazy plugin management system." +optional = false +python-versions = "*" +files = [ + {file = "lml-0.1.0-py2.py3-none-any.whl", hash = "sha256:ec06e850019942a485639c8c2a26bdb99eae24505bee7492b649df98a0bed101"}, + {file = "lml-0.1.0.tar.gz", hash = "sha256:57a085a29bb7991d70d41c6c3144c560a8e35b4c1030ffb36d85fa058773bcc5"}, +] + +[[package]] +name = "lxml" +version = "4.9.2" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" +files = [ + {file = "lxml-4.9.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:76cf573e5a365e790396a5cc2b909812633409306c6531a6877c59061e42c4f2"}, + {file = "lxml-4.9.2-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b1f42b6921d0e81b1bcb5e395bc091a70f41c4d4e55ba99c6da2b31626c44892"}, + {file = "lxml-4.9.2-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9f102706d0ca011de571de32c3247c6476b55bb6bc65a20f682f000b07a4852a"}, + {file = "lxml-4.9.2-cp27-cp27m-win32.whl", hash = "sha256:8d0b4612b66ff5d62d03bcaa043bb018f74dfea51184e53f067e6fdcba4bd8de"}, + {file = "lxml-4.9.2-cp27-cp27m-win_amd64.whl", hash = "sha256:4c8f293f14abc8fd3e8e01c5bd86e6ed0b6ef71936ded5bf10fe7a5efefbaca3"}, + {file = "lxml-4.9.2-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2899456259589aa38bfb018c364d6ae7b53c5c22d8e27d0ec7609c2a1ff78b50"}, + {file = "lxml-4.9.2-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6749649eecd6a9871cae297bffa4ee76f90b4504a2a2ab528d9ebe912b101975"}, + {file = "lxml-4.9.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a08cff61517ee26cb56f1e949cca38caabe9ea9fbb4b1e10a805dc39844b7d5c"}, + {file = "lxml-4.9.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:85cabf64adec449132e55616e7ca3e1000ab449d1d0f9d7f83146ed5bdcb6d8a"}, + {file = "lxml-4.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8340225bd5e7a701c0fa98284c849c9b9fc9238abf53a0ebd90900f25d39a4e4"}, + {file = "lxml-4.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:1ab8f1f932e8f82355e75dda5413a57612c6ea448069d4fb2e217e9a4bed13d4"}, + {file = "lxml-4.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:699a9af7dffaf67deeae27b2112aa06b41c370d5e7633e0ee0aea2e0b6c211f7"}, + {file = "lxml-4.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9cc34af337a97d470040f99ba4282f6e6bac88407d021688a5d585e44a23184"}, + {file = "lxml-4.9.2-cp310-cp310-win32.whl", hash = "sha256:d02a5399126a53492415d4906ab0ad0375a5456cc05c3fc0fc4ca11771745cda"}, + {file = "lxml-4.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:a38486985ca49cfa574a507e7a2215c0c780fd1778bb6290c21193b7211702ab"}, + {file = "lxml-4.9.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c83203addf554215463b59f6399835201999b5e48019dc17f182ed5ad87205c9"}, + {file = "lxml-4.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:2a87fa548561d2f4643c99cd13131acb607ddabb70682dcf1dff5f71f781a4bf"}, + {file = "lxml-4.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:d6b430a9938a5a5d85fc107d852262ddcd48602c120e3dbb02137c83d212b380"}, + {file = "lxml-4.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3efea981d956a6f7173b4659849f55081867cf897e719f57383698af6f618a92"}, + {file = "lxml-4.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:df0623dcf9668ad0445e0558a21211d4e9a149ea8f5666917c8eeec515f0a6d1"}, + {file = "lxml-4.9.2-cp311-cp311-win32.whl", hash = "sha256:da248f93f0418a9e9d94b0080d7ebc407a9a5e6d0b57bb30db9b5cc28de1ad33"}, + {file = "lxml-4.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:3818b8e2c4b5148567e1b09ce739006acfaa44ce3156f8cbbc11062994b8e8dd"}, + {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca989b91cf3a3ba28930a9fc1e9aeafc2a395448641df1f387a2d394638943b0"}, + {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:822068f85e12a6e292803e112ab876bc03ed1f03dddb80154c395f891ca6b31e"}, + {file = "lxml-4.9.2-cp35-cp35m-win32.whl", hash = "sha256:be7292c55101e22f2a3d4d8913944cbea71eea90792bf914add27454a13905df"}, + {file = "lxml-4.9.2-cp35-cp35m-win_amd64.whl", hash = "sha256:998c7c41910666d2976928c38ea96a70d1aa43be6fe502f21a651e17483a43c5"}, + {file = "lxml-4.9.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:b26a29f0b7fc6f0897f043ca366142d2b609dc60756ee6e4e90b5f762c6adc53"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:ab323679b8b3030000f2be63e22cdeea5b47ee0abd2d6a1dc0c8103ddaa56cd7"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:689bb688a1db722485e4610a503e3e9210dcc20c520b45ac8f7533c837be76fe"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:f49e52d174375a7def9915c9f06ec4e569d235ad428f70751765f48d5926678c"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:36c3c175d34652a35475a73762b545f4527aec044910a651d2bf50de9c3352b1"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a35f8b7fa99f90dd2f5dc5a9fa12332642f087a7641289ca6c40d6e1a2637d8e"}, + {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:58bfa3aa19ca4c0f28c5dde0ff56c520fbac6f0daf4fac66ed4c8d2fb7f22e74"}, + {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc718cd47b765e790eecb74d044cc8d37d58562f6c314ee9484df26276d36a38"}, + {file = "lxml-4.9.2-cp36-cp36m-win32.whl", hash = "sha256:d5bf6545cd27aaa8a13033ce56354ed9e25ab0e4ac3b5392b763d8d04b08e0c5"}, + {file = "lxml-4.9.2-cp36-cp36m-win_amd64.whl", hash = "sha256:3ab9fa9d6dc2a7f29d7affdf3edebf6ece6fb28a6d80b14c3b2fb9d39b9322c3"}, + {file = "lxml-4.9.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:05ca3f6abf5cf78fe053da9b1166e062ade3fa5d4f92b4ed688127ea7d7b1d03"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:a5da296eb617d18e497bcf0a5c528f5d3b18dadb3619fbdadf4ed2356ef8d941"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:04876580c050a8c5341d706dd464ff04fd597095cc8c023252566a8826505726"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c9ec3eaf616d67db0764b3bb983962b4f385a1f08304fd30c7283954e6a7869b"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2a29ba94d065945944016b6b74e538bdb1751a1db6ffb80c9d3c2e40d6fa9894"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a82d05da00a58b8e4c0008edbc8a4b6ec5a4bc1e2ee0fb6ed157cf634ed7fa45"}, + {file = "lxml-4.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:223f4232855ade399bd409331e6ca70fb5578efef22cf4069a6090acc0f53c0e"}, + {file = "lxml-4.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d17bc7c2ccf49c478c5bdd447594e82692c74222698cfc9b5daae7ae7e90743b"}, + {file = "lxml-4.9.2-cp37-cp37m-win32.whl", hash = "sha256:b64d891da92e232c36976c80ed7ebb383e3f148489796d8d31a5b6a677825efe"}, + {file = "lxml-4.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:a0a336d6d3e8b234a3aae3c674873d8f0e720b76bc1d9416866c41cd9500ffb9"}, + {file = "lxml-4.9.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:da4dd7c9c50c059aba52b3524f84d7de956f7fef88f0bafcf4ad7dde94a064e8"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:821b7f59b99551c69c85a6039c65b75f5683bdc63270fec660f75da67469ca24"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:e5168986b90a8d1f2f9dc1b841467c74221bd752537b99761a93d2d981e04889"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:8e20cb5a47247e383cf4ff523205060991021233ebd6f924bca927fcf25cf86f"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:13598ecfbd2e86ea7ae45ec28a2a54fb87ee9b9fdb0f6d343297d8e548392c03"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:880bbbcbe2fca64e2f4d8e04db47bcdf504936fa2b33933efd945e1b429bea8c"}, + {file = "lxml-4.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7d2278d59425777cfcb19735018d897ca8303abe67cc735f9f97177ceff8027f"}, + {file = "lxml-4.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5344a43228767f53a9df6e5b253f8cdca7dfc7b7aeae52551958192f56d98457"}, + {file = "lxml-4.9.2-cp38-cp38-win32.whl", hash = "sha256:925073b2fe14ab9b87e73f9a5fde6ce6392da430f3004d8b72cc86f746f5163b"}, + {file = "lxml-4.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:9b22c5c66f67ae00c0199f6055705bc3eb3fcb08d03d2ec4059a2b1b25ed48d7"}, + {file = "lxml-4.9.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:5f50a1c177e2fa3ee0667a5ab79fdc6b23086bc8b589d90b93b4bd17eb0e64d1"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:090c6543d3696cbe15b4ac6e175e576bcc3f1ccfbba970061b7300b0c15a2140"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:63da2ccc0857c311d764e7d3d90f429c252e83b52d1f8f1d1fe55be26827d1f4"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:5b4545b8a40478183ac06c073e81a5ce4cf01bf1734962577cf2bb569a5b3bbf"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2e430cd2824f05f2d4f687701144556646bae8f249fd60aa1e4c768ba7018947"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6804daeb7ef69e7b36f76caddb85cccd63d0c56dedb47555d2fc969e2af6a1a5"}, + {file = "lxml-4.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a6e441a86553c310258aca15d1c05903aaf4965b23f3bc2d55f200804e005ee5"}, + {file = "lxml-4.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca34efc80a29351897e18888c71c6aca4a359247c87e0b1c7ada14f0ab0c0fb2"}, + {file = "lxml-4.9.2-cp39-cp39-win32.whl", hash = "sha256:6b418afe5df18233fc6b6093deb82a32895b6bb0b1155c2cdb05203f583053f1"}, + {file = "lxml-4.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:f1496ea22ca2c830cbcbd473de8f114a320da308438ae65abad6bab7867fe38f"}, + {file = "lxml-4.9.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:b264171e3143d842ded311b7dccd46ff9ef34247129ff5bf5066123c55c2431c"}, + {file = "lxml-4.9.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0dc313ef231edf866912e9d8f5a042ddab56c752619e92dfd3a2c277e6a7299a"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:16efd54337136e8cd72fb9485c368d91d77a47ee2d42b057564aae201257d419"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:0f2b1e0d79180f344ff9f321327b005ca043a50ece8713de61d1cb383fb8ac05"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:7b770ed79542ed52c519119473898198761d78beb24b107acf3ad65deae61f1f"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efa29c2fe6b4fdd32e8ef81c1528506895eca86e1d8c4657fda04c9b3786ddf9"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7e91ee82f4199af8c43d8158024cbdff3d931df350252288f0d4ce656df7f3b5"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:b23e19989c355ca854276178a0463951a653309fb8e57ce674497f2d9f208746"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:01d36c05f4afb8f7c20fd9ed5badca32a2029b93b1750f571ccc0b142531caf7"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7b515674acfdcadb0eb5d00d8a709868173acece5cb0be3dd165950cbfdf5409"}, + {file = "lxml-4.9.2.tar.gz", hash = "sha256:2455cfaeb7ac70338b3257f41e21f0724f4b5b0c0e7702da67ee6c3640835b67"}, +] + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html5 = ["html5lib"] +htmlsoup = ["BeautifulSoup4"] +source = ["Cython (>=0.29.7)"] + +[[package]] +name = "matplotlib-inline" +version = "0.1.6" +description = "Inline Matplotlib backend for Jupyter" +optional = false +python-versions = ">=3.5" +files = [ + {file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"}, + {file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"}, +] + +[package.dependencies] +traitlets = "*" + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mozilla-django-oidc" +version = "3.0.0" +description = "A lightweight authentication and access management library for integration with OpenID Connect enabled authentication services." +optional = false +python-versions = "*" +files = [ + {file = "mozilla-django-oidc-3.0.0.tar.gz", hash = "sha256:a7d447af83cb5aa1671a24009b0ce6b2f0d259e9c58d8c88c7a8d0c27c05c04d"}, + {file = "mozilla_django_oidc-3.0.0-py2.py3-none-any.whl", hash = "sha256:f535eeddf03698ad9fd89dd87037828e9c7d503771acef21f0509f6cc42fc875"}, +] + +[package.dependencies] +cryptography = "*" +Django = ">=3.2" +josepy = "*" +requests = "*" + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "openpyxl" +version = "3.0.10" +description = "A Python library to read/write Excel 2010 xlsx/xlsm files" +optional = false +python-versions = ">=3.6" +files = [ + {file = "openpyxl-3.0.10-py2.py3-none-any.whl", hash = "sha256:0ab6d25d01799f97a9464630abacbb34aafecdcaa0ef3cba6d6b3499867d0355"}, + {file = "openpyxl-3.0.10.tar.gz", hash = "sha256:e47805627aebcf860edb4edf7987b1309c1b3632f3750538ed962bbcc3bd7449"}, +] + +[package.dependencies] +et-xmlfile = "*" + +[[package]] +name = "packaging" +version = "23.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, + {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, +] + +[[package]] +name = "parso" +version = "0.8.3" +description = "A Python Parser" +optional = false +python-versions = ">=3.6" +files = [ + {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, + {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, +] + +[package.extras] +qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] +testing = ["docopt", "pytest (<6.0.0)"] + +[[package]] +name = "pathspec" +version = "0.11.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, + {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, +] + +[[package]] +name = "pdbpp" +version = "0.10.3" +description = "pdb++, a drop-in replacement for pdb" +optional = false +python-versions = "*" +files = [ + {file = "pdbpp-0.10.3-py2.py3-none-any.whl", hash = "sha256:79580568e33eb3d6f6b462b1187f53e10cd8e4538f7d31495c9181e2cf9665d1"}, + {file = "pdbpp-0.10.3.tar.gz", hash = "sha256:d9e43f4fda388eeb365f2887f4e7b66ac09dce9b6236b76f63616530e2f669f5"}, +] + +[package.dependencies] +fancycompleter = ">=0.8" +pygments = "*" +wmctrl = "*" + +[package.extras] +funcsigs = ["funcsigs"] +testing = ["funcsigs", "pytest"] + +[[package]] +name = "pexpect" +version = "4.8.0" +description = "Pexpect allows easy control of interactive console applications." +optional = false +python-versions = "*" +files = [ + {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, + {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, +] + +[package.dependencies] +ptyprocess = ">=0.5" + +[[package]] +name = "pickleshare" +version = "0.7.5" +description = "Tiny 'shelve'-like database with concurrency support" +optional = false +python-versions = "*" +files = [ + {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, + {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, +] + +[[package]] +name = "pika" +version = "1.1.0" +description = "Pika Python AMQP Client Library" +optional = false +python-versions = "*" +files = [ + {file = "pika-1.1.0-py2.py3-none-any.whl", hash = "sha256:4e1a1a6585a41b2341992ec32aadb7a919d649eb82904fd8e4a4e0871c8cf3af"}, + {file = "pika-1.1.0.tar.gz", hash = "sha256:9fa76ba4b65034b878b2b8de90ff8660a59d925b087c5bb88f8fdbb4b64a1dbf"}, +] + +[package.extras] +tornado = ["tornado"] +twisted = ["twisted"] + +[[package]] +name = "platformdirs" +version = "3.8.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.7" +files = [ + {file = "platformdirs-3.8.0-py3-none-any.whl", hash = "sha256:ca9ed98ce73076ba72e092b23d3c93ea6c4e186b3f1c3dad6edd98ff6ffcca2e"}, + {file = "platformdirs-3.8.0.tar.gz", hash = "sha256:b0cabcb11063d21a0b261d557acb0a9d2126350e63b70cdf7db6347baea456dc"}, +] + +[package.extras] +docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)"] + +[[package]] +name = "pluggy" +version = "1.2.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, + {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "prometheus-client" +version = "0.17.0" +description = "Python client for the Prometheus monitoring system." +optional = false +python-versions = ">=3.6" +files = [ + {file = "prometheus_client-0.17.0-py3-none-any.whl", hash = "sha256:a77b708cf083f4d1a3fb3ce5c95b4afa32b9c521ae363354a4a910204ea095ce"}, + {file = "prometheus_client-0.17.0.tar.gz", hash = "sha256:9c3b26f1535945e85b8934fb374678d263137b78ef85f305b1156c7c881cd11b"}, +] + +[package.extras] +twisted = ["twisted"] + +[[package]] +name = "prompt-toolkit" +version = "3.0.38" +description = "Library for building powerful interactive command lines in Python" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "prompt_toolkit-3.0.38-py3-none-any.whl", hash = "sha256:45ea77a2f7c60418850331366c81cf6b5b9cf4c7fd34616f733c5427e6abbb1f"}, + {file = "prompt_toolkit-3.0.38.tar.gz", hash = "sha256:23ac5d50538a9a38c8bde05fecb47d0b403ecd0662857a86f886f798563d5b9b"}, +] + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "psycopg2-binary" +version = "2.9.6" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +optional = false +python-versions = ">=3.6" +files = [ + {file = "psycopg2-binary-2.9.6.tar.gz", hash = "sha256:1f64dcfb8f6e0c014c7f55e51c9759f024f70ea572fbdef123f85318c297947c"}, + {file = "psycopg2_binary-2.9.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d26e0342183c762de3276cca7a530d574d4e25121ca7d6e4a98e4f05cb8e4df7"}, + {file = "psycopg2_binary-2.9.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c48d8f2db17f27d41fb0e2ecd703ea41984ee19362cbce52c097963b3a1b4365"}, + {file = "psycopg2_binary-2.9.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffe9dc0a884a8848075e576c1de0290d85a533a9f6e9c4e564f19adf8f6e54a7"}, + {file = "psycopg2_binary-2.9.6-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a76e027f87753f9bd1ab5f7c9cb8c7628d1077ef927f5e2446477153a602f2c"}, + {file = "psycopg2_binary-2.9.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6460c7a99fc939b849431f1e73e013d54aa54293f30f1109019c56a0b2b2ec2f"}, + {file = "psycopg2_binary-2.9.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae102a98c547ee2288637af07393dd33f440c25e5cd79556b04e3fca13325e5f"}, + {file = "psycopg2_binary-2.9.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9972aad21f965599ed0106f65334230ce826e5ae69fda7cbd688d24fa922415e"}, + {file = "psycopg2_binary-2.9.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7a40c00dbe17c0af5bdd55aafd6ff6679f94a9be9513a4c7e071baf3d7d22a70"}, + {file = "psycopg2_binary-2.9.6-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:cacbdc5839bdff804dfebc058fe25684cae322987f7a38b0168bc1b2df703fb1"}, + {file = "psycopg2_binary-2.9.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7f0438fa20fb6c7e202863e0d5ab02c246d35efb1d164e052f2f3bfe2b152bd0"}, + {file = "psycopg2_binary-2.9.6-cp310-cp310-win32.whl", hash = "sha256:b6c8288bb8a84b47e07013bb4850f50538aa913d487579e1921724631d02ea1b"}, + {file = "psycopg2_binary-2.9.6-cp310-cp310-win_amd64.whl", hash = "sha256:61b047a0537bbc3afae10f134dc6393823882eb263088c271331602b672e52e9"}, + {file = "psycopg2_binary-2.9.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:964b4dfb7c1c1965ac4c1978b0f755cc4bd698e8aa2b7667c575fb5f04ebe06b"}, + {file = "psycopg2_binary-2.9.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:afe64e9b8ea66866a771996f6ff14447e8082ea26e675a295ad3bdbffdd72afb"}, + {file = "psycopg2_binary-2.9.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15e2ee79e7cf29582ef770de7dab3d286431b01c3bb598f8e05e09601b890081"}, + {file = "psycopg2_binary-2.9.6-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dfa74c903a3c1f0d9b1c7e7b53ed2d929a4910e272add6700c38f365a6002820"}, + {file = "psycopg2_binary-2.9.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b83456c2d4979e08ff56180a76429263ea254c3f6552cd14ada95cff1dec9bb8"}, + {file = "psycopg2_binary-2.9.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0645376d399bfd64da57148694d78e1f431b1e1ee1054872a5713125681cf1be"}, + {file = "psycopg2_binary-2.9.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e99e34c82309dd78959ba3c1590975b5d3c862d6f279f843d47d26ff89d7d7e1"}, + {file = "psycopg2_binary-2.9.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4ea29fc3ad9d91162c52b578f211ff1c931d8a38e1f58e684c45aa470adf19e2"}, + {file = "psycopg2_binary-2.9.6-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:4ac30da8b4f57187dbf449294d23b808f8f53cad6b1fc3623fa8a6c11d176dd0"}, + {file = "psycopg2_binary-2.9.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e78e6e2a00c223e164c417628572a90093c031ed724492c763721c2e0bc2a8df"}, + {file = "psycopg2_binary-2.9.6-cp311-cp311-win32.whl", hash = "sha256:1876843d8e31c89c399e31b97d4b9725a3575bb9c2af92038464231ec40f9edb"}, + {file = "psycopg2_binary-2.9.6-cp311-cp311-win_amd64.whl", hash = "sha256:b4b24f75d16a89cc6b4cdff0eb6a910a966ecd476d1e73f7ce5985ff1328e9a6"}, + {file = "psycopg2_binary-2.9.6-cp36-cp36m-win32.whl", hash = "sha256:498807b927ca2510baea1b05cc91d7da4718a0f53cb766c154c417a39f1820a0"}, + {file = "psycopg2_binary-2.9.6-cp36-cp36m-win_amd64.whl", hash = "sha256:0d236c2825fa656a2d98bbb0e52370a2e852e5a0ec45fc4f402977313329174d"}, + {file = "psycopg2_binary-2.9.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:34b9ccdf210cbbb1303c7c4db2905fa0319391bd5904d32689e6dd5c963d2ea8"}, + {file = "psycopg2_binary-2.9.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84d2222e61f313c4848ff05353653bf5f5cf6ce34df540e4274516880d9c3763"}, + {file = "psycopg2_binary-2.9.6-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30637a20623e2a2eacc420059be11527f4458ef54352d870b8181a4c3020ae6b"}, + {file = "psycopg2_binary-2.9.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8122cfc7cae0da9a3077216528b8bb3629c43b25053284cc868744bfe71eb141"}, + {file = "psycopg2_binary-2.9.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38601cbbfe600362c43714482f43b7c110b20cb0f8172422c616b09b85a750c5"}, + {file = "psycopg2_binary-2.9.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c7e62ab8b332147a7593a385d4f368874d5fe4ad4e341770d4983442d89603e3"}, + {file = "psycopg2_binary-2.9.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2ab652e729ff4ad76d400df2624d223d6e265ef81bb8aa17fbd63607878ecbee"}, + {file = "psycopg2_binary-2.9.6-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c83a74b68270028dc8ee74d38ecfaf9c90eed23c8959fca95bd703d25b82c88e"}, + {file = "psycopg2_binary-2.9.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d4e6036decf4b72d6425d5b29bbd3e8f0ff1059cda7ac7b96d6ac5ed34ffbacd"}, + {file = "psycopg2_binary-2.9.6-cp37-cp37m-win32.whl", hash = "sha256:a8c28fd40a4226b4a84bdf2d2b5b37d2c7bd49486b5adcc200e8c7ec991dfa7e"}, + {file = "psycopg2_binary-2.9.6-cp37-cp37m-win_amd64.whl", hash = "sha256:51537e3d299be0db9137b321dfb6a5022caaab275775680e0c3d281feefaca6b"}, + {file = "psycopg2_binary-2.9.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cf4499e0a83b7b7edcb8dabecbd8501d0d3a5ef66457200f77bde3d210d5debb"}, + {file = "psycopg2_binary-2.9.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7e13a5a2c01151f1208d5207e42f33ba86d561b7a89fca67c700b9486a06d0e2"}, + {file = "psycopg2_binary-2.9.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e0f754d27fddcfd74006455b6e04e6705d6c31a612ec69ddc040a5468e44b4e"}, + {file = "psycopg2_binary-2.9.6-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d57c3fd55d9058645d26ae37d76e61156a27722097229d32a9e73ed54819982a"}, + {file = "psycopg2_binary-2.9.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71f14375d6f73b62800530b581aed3ada394039877818b2d5f7fc77e3bb6894d"}, + {file = "psycopg2_binary-2.9.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:441cc2f8869a4f0f4bb408475e5ae0ee1f3b55b33f350406150277f7f35384fc"}, + {file = "psycopg2_binary-2.9.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:65bee1e49fa6f9cf327ce0e01c4c10f39165ee76d35c846ade7cb0ec6683e303"}, + {file = "psycopg2_binary-2.9.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:af335bac6b666cc6aea16f11d486c3b794029d9df029967f9938a4bed59b6a19"}, + {file = "psycopg2_binary-2.9.6-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:cfec476887aa231b8548ece2e06d28edc87c1397ebd83922299af2e051cf2827"}, + {file = "psycopg2_binary-2.9.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:65c07febd1936d63bfde78948b76cd4c2a411572a44ac50719ead41947d0f26b"}, + {file = "psycopg2_binary-2.9.6-cp38-cp38-win32.whl", hash = "sha256:4dfb4be774c4436a4526d0c554af0cc2e02082c38303852a36f6456ece7b3503"}, + {file = "psycopg2_binary-2.9.6-cp38-cp38-win_amd64.whl", hash = "sha256:02c6e3cf3439e213e4ee930308dc122d6fb4d4bea9aef4a12535fbd605d1a2fe"}, + {file = "psycopg2_binary-2.9.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e9182eb20f41417ea1dd8e8f7888c4d7c6e805f8a7c98c1081778a3da2bee3e4"}, + {file = "psycopg2_binary-2.9.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8a6979cf527e2603d349a91060f428bcb135aea2be3201dff794813256c274f1"}, + {file = "psycopg2_binary-2.9.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8338a271cb71d8da40b023a35d9c1e919eba6cbd8fa20a54b748a332c355d896"}, + {file = "psycopg2_binary-2.9.6-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3ed340d2b858d6e6fb5083f87c09996506af483227735de6964a6100b4e6a54"}, + {file = "psycopg2_binary-2.9.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f81e65376e52f03422e1fb475c9514185669943798ed019ac50410fb4c4df232"}, + {file = "psycopg2_binary-2.9.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfb13af3c5dd3a9588000910178de17010ebcccd37b4f9794b00595e3a8ddad3"}, + {file = "psycopg2_binary-2.9.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4c727b597c6444a16e9119386b59388f8a424223302d0c06c676ec8b4bc1f963"}, + {file = "psycopg2_binary-2.9.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4d67fbdaf177da06374473ef6f7ed8cc0a9dc640b01abfe9e8a2ccb1b1402c1f"}, + {file = "psycopg2_binary-2.9.6-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0892ef645c2fabb0c75ec32d79f4252542d0caec1d5d949630e7d242ca4681a3"}, + {file = "psycopg2_binary-2.9.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:02c0f3757a4300cf379eb49f543fb7ac527fb00144d39246ee40e1df684ab514"}, + {file = "psycopg2_binary-2.9.6-cp39-cp39-win32.whl", hash = "sha256:c3dba7dab16709a33a847e5cd756767271697041fbe3fe97c215b1fc1f5c9848"}, + {file = "psycopg2_binary-2.9.6-cp39-cp39-win_amd64.whl", hash = "sha256:f6a88f384335bb27812293fdb11ac6aee2ca3f51d3c7820fe03de0a304ab6249"}, +] + +[[package]] +name = "ptyprocess" +version = "0.7.0" +description = "Run a subprocess in a pseudo terminal" +optional = false +python-versions = "*" +files = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] + +[[package]] +name = "pure-eval" +version = "0.2.2" +description = "Safely evaluate AST nodes without side effects" +optional = false +python-versions = "*" +files = [ + {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, + {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, +] + +[package.extras] +tests = ["pytest"] + +[[package]] +name = "py-moneyed" +version = "2.0" +description = "Provides Currency and Money classes for use in your Python code." +optional = false +python-versions = ">=3.6" +files = [ + {file = "py-moneyed-2.0.tar.gz", hash = "sha256:a56e1987deacb2e0eac5904552699a5d3fa251042e528bf2ff74a72359f5e5b3"}, + {file = "py_moneyed-2.0-py3-none-any.whl", hash = "sha256:1fafe552cfa3cba579d026924c27b070d71b4140e50ef4535e4083b3f4f2473f"}, +] + +[package.dependencies] +babel = ">=2.8.0" +typing-extensions = ">=3.7.4.3" + +[package.extras] +tests = ["pytest (>=2.3.0)", "tox (>=1.6.0)"] +type-tests = ["mypy (>=0.812)", "pytest (>=2.3.0)", "pytest-mypy-plugins"] + +[[package]] +name = "pycodestyle" +version = "2.10.0" +description = "Python style guide checker" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pycodestyle-2.10.0-py2.py3-none-any.whl", hash = "sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610"}, + {file = "pycodestyle-2.10.0.tar.gz", hash = "sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053"}, +] + +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] + +[[package]] +name = "pydocstyle" +version = "6.3.0" +description = "Python docstring style checker" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, + {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, +] + +[package.dependencies] +snowballstemmer = ">=2.2.0" + +[package.extras] +toml = ["tomli (>=1.2.3)"] + +[[package]] +name = "pyexcel" +version = "0.7.0" +description = "A wrapper library that provides one API to read, manipulate and writedata in different excel formats" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyexcel-0.7.0-py2.py3-none-any.whl", hash = "sha256:ddc6904512bfa2ecda509fb3b58229bb30db14498632fd9e7a5ba7bbfb02ed1b"}, + {file = "pyexcel-0.7.0.tar.gz", hash = "sha256:fbf0eee5d93b96cef6f19a9f00703f22c0a64f19728d91b95428009a52129709"}, +] + +[package.dependencies] +chardet = "*" +lml = ">=0.0.4" +pyexcel-io = ">=0.6.2" +texttable = ">=0.8.2" + +[package.extras] +ods = ["pyexcel-ods3 (>=0.6.0)"] +xls = ["pyexcel-xls (>=0.6.0)"] +xlsx = ["pyexcel-xlsx (>=0.6.0)"] + +[[package]] +name = "pyexcel-ezodf" +version = "0.3.4" +description = "A Python package to create/manipulate OpenDocumentFormat files" +optional = false +python-versions = "*" +files = [ + {file = "pyexcel-ezodf-0.3.4.tar.gz", hash = "sha256:972eeea9b0e4bab60dfc5cdcb7378cc7ba5e070a0b7282746c0182c5de011ff1"}, + {file = "pyexcel_ezodf-0.3.4-py2.py3-none-any.whl", hash = "sha256:a74ac7636a015fff31d35c5350dc5ad347ba98ecb453de4dbcbb9a9168434e8c"}, +] + +[package.dependencies] +lxml = "*" + +[[package]] +name = "pyexcel-io" +version = "0.6.6" +description = "A python library to read and write structured data in csv, zipped csvformat and to/from databases" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyexcel-io-0.6.6.tar.gz", hash = "sha256:f6084bf1afa5fbf4c61cf7df44370fa513821af188b02e3e19b5efb66d8a969f"}, + {file = "pyexcel_io-0.6.6-py2.py3-none-any.whl", hash = "sha256:19ff1d599a8a6c0982e4181ef86aa50e1f8d231410fa7e0e204d62e37551c1d6"}, +] + +[package.dependencies] +lml = ">=0.0.4" + +[package.extras] +ods = ["pyexcel-ods3 (>=0.6.0)"] +xls = ["pyexcel-xls (>=0.6.0)"] +xlsx = ["pyexcel-xlsx (>=0.6.0)"] + +[[package]] +name = "pyexcel-ods3" +version = "0.6.1" +description = "A wrapper library to read, manipulate and write data in ods format" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyexcel-ods3-0.6.1.tar.gz", hash = "sha256:53740fc9bc6e91e43cdc0ee4f557bb3b252d8493d34f2c11d26a93c53cfebc2e"}, + {file = "pyexcel_ods3-0.6.1-py3-none-any.whl", hash = "sha256:ca61d139879349a5d4b0a241add6504474c59fa280d1804b76f56ee4ba30eb8b"}, +] + +[package.dependencies] +lxml = "*" +pyexcel-ezodf = ">=0.3.3" +pyexcel-io = ">=0.6.2" + +[[package]] +name = "pyexcel-webio" +version = "0.1.4" +description = "A generic request and response interface for pyexcel web extensions." +optional = false +python-versions = "*" +files = [ + {file = "pyexcel-webio-0.1.4.tar.gz", hash = "sha256:039538f1b35351f1632891dde29ef4d7fba744e217678ebb5a501336e28ca265"}, + {file = "pyexcel_webio-0.1.4-py2.py3-none-any.whl", hash = "sha256:3583cf7dcddb747520a8a90e93cf07b0584878b56b3c41c46d132b458a6cfd00"}, +] + +[package.dependencies] +pyexcel = ">=0.5.6" + +[[package]] +name = "pyexcel-xlsx" +version = "0.6.0" +description = "A wrapper library to read, manipulate and write data in xlsx and xlsmformat" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyexcel-xlsx-0.6.0.tar.gz", hash = "sha256:55754f764252461aca6871db203f4bd1370ec877828e305e6be1de5f9aa6a79d"}, + {file = "pyexcel_xlsx-0.6.0-py2.py3-none-any.whl", hash = "sha256:16530f96a77c97ebcba7941517d2756ac52d3ce2903d81eecd7f300778d5242a"}, +] + +[package.dependencies] +openpyxl = ">=2.6.1" +pyexcel-io = ">=0.6.2" + +[[package]] +name = "pyflakes" +version = "3.0.1" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyflakes-3.0.1-py2.py3-none-any.whl", hash = "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf"}, + {file = "pyflakes-3.0.1.tar.gz", hash = "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"}, +] + +[[package]] +name = "pygments" +version = "2.15.1" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Pygments-2.15.1-py3-none-any.whl", hash = "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1"}, + {file = "Pygments-2.15.1.tar.gz", hash = "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c"}, +] + +[package.extras] +plugins = ["importlib-metadata"] + +[[package]] +name = "pyopenssl" +version = "23.2.0" +description = "Python wrapper module around the OpenSSL library" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyOpenSSL-23.2.0-py3-none-any.whl", hash = "sha256:24f0dc5227396b3e831f4c7f602b950a5e9833d292c8e4a2e06b709292806ae2"}, + {file = "pyOpenSSL-23.2.0.tar.gz", hash = "sha256:276f931f55a452e7dea69c7173e984eb2a4407ce413c918aa34b55f82f9b8bac"}, +] + +[package.dependencies] +cryptography = ">=38.0.0,<40.0.0 || >40.0.0,<40.0.1 || >40.0.1,<42" + +[package.extras] +docs = ["sphinx (!=5.2.0,!=5.2.0.post0)", "sphinx-rtd-theme"] +test = ["flaky", "pretend", "pytest (>=3.0.1)"] + +[[package]] +name = "pyreadline" +version = "2.1" +description = "A python implmementation of GNU readline." +optional = false +python-versions = "*" +files = [ + {file = "pyreadline-2.1.zip", hash = "sha256:4530592fc2e85b25b1a9f79664433da09237c1a270e4d78ea5aa3a2c7229e2d1"}, +] + +[[package]] +name = "pyrepl" +version = "0.9.0" +description = "A library for building flexible command line interfaces" +optional = false +python-versions = "*" +files = [ + {file = "pyrepl-0.9.0.tar.gz", hash = "sha256:292570f34b5502e871bbb966d639474f2b57fbfcd3373c2d6a2f3d56e681a775"}, +] + +[[package]] +name = "pytest" +version = "7.4.0" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, + {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-cov" +version = "4.0.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-cov-4.0.0.tar.gz", hash = "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"}, + {file = "pytest_cov-4.0.0-py3-none-any.whl", hash = "sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + +[[package]] +name = "pytest-django" +version = "4.5.2" +description = "A Django plugin for pytest." +optional = false +python-versions = ">=3.5" +files = [ + {file = "pytest-django-4.5.2.tar.gz", hash = "sha256:d9076f759bb7c36939dbdd5ae6633c18edfc2902d1a69fdbefd2426b970ce6c2"}, + {file = "pytest_django-4.5.2-py3-none-any.whl", hash = "sha256:c60834861933773109334fe5a53e83d1ef4828f2203a1d6a0fa9972f4f75ab3e"}, +] + +[package.dependencies] +pytest = ">=5.4.0" + +[package.extras] +docs = ["sphinx", "sphinx-rtd-theme"] +testing = ["Django", "django-configurations (>=2.0)"] + +[[package]] +name = "pytest-env" +version = "0.6.2" +description = "py.test plugin that allows you to add environment variables." +optional = false +python-versions = "*" +files = [ + {file = "pytest-env-0.6.2.tar.gz", hash = "sha256:7e94956aef7f2764f3c147d216ce066bf6c42948bb9e293169b1b1c880a580c2"}, +] + +[package.dependencies] +pytest = ">=2.6.0" + +[[package]] +name = "pytest-factoryboy" +version = "2.1.0" +description = "Factory Boy support for pytest." +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-factoryboy-2.1.0.tar.gz", hash = "sha256:23bc562ab32cc39eddfbbbf70e618a1b30e834a4cfa451c4bedc36216f0a7b19"}, + {file = "pytest_factoryboy-2.1.0-py3-none-any.whl", hash = "sha256:10c02d2736cb52c7af28065db9617e7f50634e95eaa07eeb9a007026aa3dc0a8"}, +] + +[package.dependencies] +factory-boy = ">=2.10.0" +inflection = "*" +pytest = ">=4.6" + +[[package]] +name = "pytest-freezegun" +version = "0.4.2" +description = "Wrap tests with fixtures in freeze_time" +optional = false +python-versions = "*" +files = [ + {file = "pytest-freezegun-0.4.2.zip", hash = "sha256:19c82d5633751bf3ec92caa481fb5cffaac1787bd485f0df6436fd6242176949"}, + {file = "pytest_freezegun-0.4.2-py2.py3-none-any.whl", hash = "sha256:5318a6bfb8ba4b709c8471c94d0033113877b3ee02da5bfcd917c1889cde99a7"}, +] + +[package.dependencies] +freezegun = ">0.3" +pytest = ">=3.0.0" + +[[package]] +name = "pytest-mock" +version = "3.7.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-mock-3.7.0.tar.gz", hash = "sha256:5112bd92cc9f186ee96e1a92efc84969ea494939c3aead39c50f421c4cc69534"}, + {file = "pytest_mock-3.7.0-py3-none-any.whl", hash = "sha256:6cff27cec936bf81dc5ee87f07132b807bcda51106b5ec4b90a04331cba76231"}, +] + +[package.dependencies] +pytest = ">=5.0" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "pytest-randomly" +version = "3.12.0" +description = "Pytest plugin to randomly order tests and control random.seed." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-randomly-3.12.0.tar.gz", hash = "sha256:d60c2db71ac319aee0fc6c4110a7597d611a8b94a5590918bfa8583f00caccb2"}, + {file = "pytest_randomly-3.12.0-py3-none-any.whl", hash = "sha256:f4f2e803daf5d1ba036cc22bf4fe9dbbf99389ec56b00e5cba732fb5c1d07fdd"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""} +pytest = "*" + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-redmine" +version = "2.4.0" +description = "Library for communicating with a Redmine project management application" +optional = false +python-versions = ">=3.7, <4" +files = [ + {file = "python-redmine-2.4.0.tar.gz", hash = "sha256:29e1c479e6bedc4b193f84dda25121a1a0fcc30969c7f0f6e729c5638749e9d8"}, + {file = "python_redmine-2.4.0-py3-none-any.whl", hash = "sha256:c9b6ee4465516c1794fe8038b98ddc8bf9d8caa3c0564cf2bc6ea7d4637a2d3a"}, +] + +[package.dependencies] +requests = ">=2.28.2" + +[[package]] +name = "pytz" +version = "2023.3" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, + {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-mock" +version = "1.9.3" +description = "Mock out responses from the requests package" +optional = false +python-versions = "*" +files = [ + {file = "requests-mock-1.9.3.tar.gz", hash = "sha256:8d72abe54546c1fc9696fa1516672f1031d72a55a1d66c85184f972a24ba0eba"}, + {file = "requests_mock-1.9.3-py2.py3-none-any.whl", hash = "sha256:0a2d38a117c08bb78939ec163522976ad59a6b7fdd82b709e23bb98004a44970"}, +] + +[package.dependencies] +requests = ">=2.3,<3" +six = "*" + +[package.extras] +fixture = ["fixtures"] +test = ["fixtures", "mock", "purl", "pytest", "sphinx", "testrepository (>=0.0.18)", "testtools"] + +[[package]] +name = "sentry-sdk" +version = "1.26.0" +description = "Python client for Sentry (https://sentry.io)" +optional = false +python-versions = "*" +files = [ + {file = "sentry-sdk-1.26.0.tar.gz", hash = "sha256:760e4fb6d01c994110507133e08ecd4bdf4d75ee4be77f296a3579796cf73134"}, + {file = "sentry_sdk-1.26.0-py2.py3-none-any.whl", hash = "sha256:0c9f858337ec3781cf4851972ef42bba8c9828aea116b0dbed8f38c5f9a1896c"}, +] + +[package.dependencies] +certifi = "*" +urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""} + +[package.extras] +aiohttp = ["aiohttp (>=3.5)"] +arq = ["arq (>=0.23)"] +beam = ["apache-beam (>=2.12)"] +bottle = ["bottle (>=0.12.13)"] +celery = ["celery (>=3)"] +chalice = ["chalice (>=1.16.0)"] +django = ["django (>=1.8)"] +falcon = ["falcon (>=1.4)"] +fastapi = ["fastapi (>=0.79.0)"] +flask = ["blinker (>=1.1)", "flask (>=0.11)", "markupsafe"] +grpcio = ["grpcio (>=1.21.1)"] +httpx = ["httpx (>=0.16.0)"] +huey = ["huey (>=2)"] +loguru = ["loguru (>=0.5)"] +opentelemetry = ["opentelemetry-distro (>=0.35b0)"] +pure-eval = ["asttokens", "executing", "pure-eval"] +pymongo = ["pymongo (>=3.1)"] +pyspark = ["pyspark (>=2.4.4)"] +quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] +rq = ["rq (>=0.6)"] +sanic = ["sanic (>=0.8)"] +sqlalchemy = ["sqlalchemy (>=1.2)"] +starlette = ["starlette (>=0.19.1)"] +starlite = ["starlite (>=1.48)"] +tornado = ["tornado (>=5)"] + +[[package]] +name = "setuptools" +version = "68.0.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "setuptools-68.0.0-py3-none-any.whl", hash = "sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f"}, + {file = "setuptools-68.0.0.tar.gz", hash = "sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "snapshottest" +version = "0.6.0" +description = "Snapshot testing for pytest, unittest, Django, and Nose" +optional = false +python-versions = "*" +files = [ + {file = "snapshottest-0.6.0-py2.py3-none-any.whl", hash = "sha256:9b177cffe0870c589df8ddbee0a770149c5474b251955bdbde58b7f32a4ec429"}, + {file = "snapshottest-0.6.0.tar.gz", hash = "sha256:bbcaf81d92d8e330042e5c928e13d9f035e99e91b314fe55fda949c2f17b653c"}, +] + +[package.dependencies] +fastdiff = ">=0.1.4,<1" +six = ">=1.10.0" +termcolor = "*" + +[package.extras] +nose = ["nose"] +pytest = ["pytest"] +test = ["django (>=1.10.6)", "nose", "pytest (>=4.6)", "pytest-cov", "six"] + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +optional = false +python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] + +[[package]] +name = "sqlparse" +version = "0.4.4" +description = "A non-validating SQL parser." +optional = false +python-versions = ">=3.5" +files = [ + {file = "sqlparse-0.4.4-py3-none-any.whl", hash = "sha256:5430a4fe2ac7d0f93e66f1efc6e1338a41884b7ddf2a350cedd20ccc4d9d28f3"}, + {file = "sqlparse-0.4.4.tar.gz", hash = "sha256:d446183e84b8349fa3061f0fe7f06ca94ba65b426946ffebe6e3e8295332420c"}, +] + +[package.extras] +dev = ["build", "flake8"] +doc = ["sphinx"] +test = ["pytest", "pytest-cov"] + +[[package]] +name = "stack-data" +version = "0.6.2" +description = "Extract data from python stack frames and tracebacks for informative displays" +optional = false +python-versions = "*" +files = [ + {file = "stack_data-0.6.2-py3-none-any.whl", hash = "sha256:cbb2a53eb64e5785878201a97ed7c7b94883f48b87bfb0bbe8b623c74679e4a8"}, + {file = "stack_data-0.6.2.tar.gz", hash = "sha256:32d2dd0376772d01b6cb9fc996f3c8b57a357089dec328ed4b6553d037eaf815"}, +] + +[package.dependencies] +asttokens = ">=2.1.0" +executing = ">=1.2.0" +pure-eval = "*" + +[package.extras] +tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] + +[[package]] +name = "termcolor" +version = "2.3.0" +description = "ANSI color formatting for output in terminal" +optional = false +python-versions = ">=3.7" +files = [ + {file = "termcolor-2.3.0-py3-none-any.whl", hash = "sha256:3afb05607b89aed0ffe25202399ee0867ad4d3cb4180d98aaf8eefa6a5f7d475"}, + {file = "termcolor-2.3.0.tar.gz", hash = "sha256:b5b08f68937f138fe92f6c089b99f1e2da0ae56c52b78bf7075fd95420fd9a5a"}, +] + +[package.extras] +tests = ["pytest", "pytest-cov"] + +[[package]] +name = "texttable" +version = "1.6.7" +description = "module to create simple ASCII tables" +optional = false +python-versions = "*" +files = [ + {file = "texttable-1.6.7-py2.py3-none-any.whl", hash = "sha256:b7b68139aa8a6339d2c320ca8b1dc42d13a7831a346b446cb9eb385f0c76310c"}, + {file = "texttable-1.6.7.tar.gz", hash = "sha256:290348fb67f7746931bcdfd55ac7584ecd4e5b0846ab164333f0794b121760f2"}, +] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "tornado" +version = "6.3.2" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +optional = false +python-versions = ">= 3.8" +files = [ + {file = "tornado-6.3.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:c367ab6c0393d71171123ca5515c61ff62fe09024fa6bf299cd1339dc9456829"}, + {file = "tornado-6.3.2-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:b46a6ab20f5c7c1cb949c72c1994a4585d2eaa0be4853f50a03b5031e964fc7c"}, + {file = "tornado-6.3.2-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2de14066c4a38b4ecbbcd55c5cc4b5340eb04f1c5e81da7451ef555859c833f"}, + {file = "tornado-6.3.2-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05615096845cf50a895026f749195bf0b10b8909f9be672f50b0fe69cba368e4"}, + {file = "tornado-6.3.2-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b17b1cf5f8354efa3d37c6e28fdfd9c1c1e5122f2cb56dac121ac61baa47cbe"}, + {file = "tornado-6.3.2-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:29e71c847a35f6e10ca3b5c2990a52ce38b233019d8e858b755ea6ce4dcdd19d"}, + {file = "tornado-6.3.2-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:834ae7540ad3a83199a8da8f9f2d383e3c3d5130a328889e4cc991acc81e87a0"}, + {file = "tornado-6.3.2-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6a0848f1aea0d196a7c4f6772197cbe2abc4266f836b0aac76947872cd29b411"}, + {file = "tornado-6.3.2-cp38-abi3-win32.whl", hash = "sha256:7efcbcc30b7c654eb6a8c9c9da787a851c18f8ccd4a5a3a95b05c7accfa068d2"}, + {file = "tornado-6.3.2-cp38-abi3-win_amd64.whl", hash = "sha256:0c325e66c8123c606eea33084976c832aa4e766b7dff8aedd7587ea44a604cdf"}, + {file = "tornado-6.3.2.tar.gz", hash = "sha256:4b927c4f19b71e627b13f3db2324e4ae660527143f9e1f2e2fb404f3a187e2ba"}, +] + +[[package]] +name = "traitlets" +version = "5.9.0" +description = "Traitlets Python configuration system" +optional = false +python-versions = ">=3.7" +files = [ + {file = "traitlets-5.9.0-py3-none-any.whl", hash = "sha256:9e6ec080259b9a5940c797d58b613b5e31441c2257b87c2e795c5228ae80d2d8"}, + {file = "traitlets-5.9.0.tar.gz", hash = "sha256:f6cde21a9c68cf756af02035f72d5a723bf607e862e7be33ece505abf4a3bad9"}, +] + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] + +[[package]] +name = "typing-extensions" +version = "4.6.3" +description = "Backported and Experimental Type Hints for Python 3.7+" +optional = false +python-versions = ">=3.7" +files = [ + {file = "typing_extensions-4.6.3-py3-none-any.whl", hash = "sha256:88a4153d8505aabbb4e13aacb7c486c2b4a33ca3b3f807914a9b4c844c471c26"}, + {file = "typing_extensions-4.6.3.tar.gz", hash = "sha256:d91d5919357fe7f681a9f2b5b4cb2a5f1ef0a1e9f59c4d8ff0d3491e05c0ffd5"}, +] + +[[package]] +name = "urllib3" +version = "2.0.3" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.7" +files = [ + {file = "urllib3-2.0.3-py3-none-any.whl", hash = "sha256:48e7fafa40319d358848e1bc6809b208340fafe2096f1725d05d67443d0483d1"}, + {file = "urllib3-2.0.3.tar.gz", hash = "sha256:bee28b5e56addb8226c96f7f13ac28cb4c301dd5ea8a6ca179c0b9835e032825"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wasmer" +version = "1.1.0" +description = "Python extension to run WebAssembly binaries" +optional = false +python-versions = "*" +files = [ + {file = "wasmer-1.1.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:c2af4b907ae2dabcac41e316e811d5937c93adf1f8b05c5d49427f8ce0f37630"}, + {file = "wasmer-1.1.0-cp310-cp310-manylinux_2_24_x86_64.whl", hash = "sha256:ab1ae980021e5ec0bf0c6cdd3b979b1d15a5f3eb2b8a32da8dcb1156e4a1e484"}, + {file = "wasmer-1.1.0-cp310-none-win_amd64.whl", hash = "sha256:d0d93aec6215893d33e803ef0a8d37bf948c585dd80ba0e23a83fafee820bc03"}, + {file = "wasmer-1.1.0-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:1e63d16bd6e2e2272d8721647831de5c537e0bb08002ee6d7abf167ec02d5178"}, + {file = "wasmer-1.1.0-cp37-cp37m-manylinux_2_24_x86_64.whl", hash = "sha256:85e6a5bf44853e8e6a12e947ee3412da9e84f7ce49fc165ba5dbd293e9c5c405"}, + {file = "wasmer-1.1.0-cp37-none-win_amd64.whl", hash = "sha256:a182a6eca9b46d895b4985fc822fab8da3d2f84fab74ca27e55a7430a7fcf336"}, + {file = "wasmer-1.1.0-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:214d9a3cfb577ea9449eb2b5f13adceae34c55365e4c3d930066beb86a7f67bc"}, + {file = "wasmer-1.1.0-cp38-cp38-manylinux_2_24_x86_64.whl", hash = "sha256:b9e5605552bd7d2bc6337519b176defe83bc69b98abf3caaaefa4f7ec231d18a"}, + {file = "wasmer-1.1.0-cp38-none-win_amd64.whl", hash = "sha256:20b5190112e2e94a8947967f2bc683c9685855d0f34130d8434c87a55216a3bd"}, + {file = "wasmer-1.1.0-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:ee442f0970f40ec5e32011c92fd753fb2061da0faa13de13fafc730c31be34e3"}, + {file = "wasmer-1.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:aa112198b743cff2e391230436813fb4b244a24443e37866522b7197e3a034da"}, + {file = "wasmer-1.1.0-cp39-cp39-manylinux_2_24_x86_64.whl", hash = "sha256:c0b37117f6d3ff51ee96431c7d224d99799b08d174e30fcd0fcd7e2e3cb8140c"}, + {file = "wasmer-1.1.0-cp39-none-win_amd64.whl", hash = "sha256:a0a4730ec4907a4cb0d9d4a77ea2608c2c814f22a22b73fc80be0f110e014836"}, + {file = "wasmer-1.1.0-py3-none-any.whl", hash = "sha256:2caf8c67feae9cd4246421551036917811c446da4f27ad4c989521ef42751931"}, +] + +[[package]] +name = "wasmer-compiler-cranelift" +version = "1.1.0" +description = "The Cranelift compiler for the `wasmer` package (to compile WebAssembly module)" +optional = false +python-versions = "*" +files = [ + {file = "wasmer_compiler_cranelift-1.1.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:9869910179f39696a020edc5689f7759257ac1cce569a7a0fcf340c59788baad"}, + {file = "wasmer_compiler_cranelift-1.1.0-cp310-cp310-manylinux_2_24_x86_64.whl", hash = "sha256:405546ee864ac158a4107f374dfbb1c8d6cfb189829bdcd13050143a4bd98f28"}, + {file = "wasmer_compiler_cranelift-1.1.0-cp310-none-win_amd64.whl", hash = "sha256:bdf75af9ef082e6aeb752550f694273340ece970b65099e0746db0f972760d11"}, + {file = "wasmer_compiler_cranelift-1.1.0-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:7d9c782b7721789b16e303b7e70c59df370896dd62b77e2779e3a44b4e1aa20c"}, + {file = "wasmer_compiler_cranelift-1.1.0-cp37-cp37m-manylinux_2_24_x86_64.whl", hash = "sha256:ff7dd5bd69030b63521c24583bf0f5457cd2580237340b91ce35370f72a4a1cc"}, + {file = "wasmer_compiler_cranelift-1.1.0-cp37-none-win_amd64.whl", hash = "sha256:447285402e366a34667a674db70458c491acd6940b797c175c0b0027f48e64bb"}, + {file = "wasmer_compiler_cranelift-1.1.0-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:55a524985179f6b7b88ac973e8fac5a2574d3b125a966fba75fedd5a2525e484"}, + {file = "wasmer_compiler_cranelift-1.1.0-cp38-cp38-manylinux_2_24_x86_64.whl", hash = "sha256:bd03db5a916ead51b442c66acad38847dfe127cf90b2019b1680f1920c4f8d06"}, + {file = "wasmer_compiler_cranelift-1.1.0-cp38-none-win_amd64.whl", hash = "sha256:157d87cbd1d04adbad55b50cb4bedc28e444caf74797fd96df17390667e58699"}, + {file = "wasmer_compiler_cranelift-1.1.0-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:ff25fc99ebafa04a6c271d08a90d17b927930e3019a2b333c7cfb48ba32c6f71"}, + {file = "wasmer_compiler_cranelift-1.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9697ae082317a56776df8ff7df8c922eac38488ef38d3478fe5f0ca144c185ab"}, + {file = "wasmer_compiler_cranelift-1.1.0-cp39-cp39-manylinux_2_24_x86_64.whl", hash = "sha256:2a4349b1ddd727bd46bc5ede741839dcfc5153c52f064a83998c4150d5d4a85c"}, + {file = "wasmer_compiler_cranelift-1.1.0-cp39-none-win_amd64.whl", hash = "sha256:32fe38614fccc933da77ee4372904a5fa9c12b859209a2e4048a8284c4c371f2"}, + {file = "wasmer_compiler_cranelift-1.1.0-py3-none-any.whl", hash = "sha256:200fea80609cfb088457327acf66d5aa61f4c4f66b5a71133ada960b534c7355"}, +] + +[[package]] +name = "wcwidth" +version = "0.2.6" +description = "Measures the displayed width of unicode strings in a terminal" +optional = false +python-versions = "*" +files = [ + {file = "wcwidth-0.2.6-py2.py3-none-any.whl", hash = "sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e"}, + {file = "wcwidth-0.2.6.tar.gz", hash = "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"}, +] + +[[package]] +name = "whitenoise" +version = "6.5.0" +description = "Radically simplified static file serving for WSGI applications" +optional = false +python-versions = ">=3.7" +files = [ + {file = "whitenoise-6.5.0-py3-none-any.whl", hash = "sha256:16468e9ad2189f09f4a8c635a9031cc9bb2cdbc8e5e53365407acf99f7ade9ec"}, + {file = "whitenoise-6.5.0.tar.gz", hash = "sha256:15fe60546ac975b58e357ccaeb165a4ca2d0ab697e48450b8f0307ca368195a8"}, +] + +[package.extras] +brotli = ["Brotli"] + +[[package]] +name = "wmctrl" +version = "0.4" +description = "A tool to programmatically control windows inside X" +optional = false +python-versions = "*" +files = [ + {file = "wmctrl-0.4.tar.gz", hash = "sha256:66cbff72b0ca06a22ec3883ac3a4d7c41078bdae4fb7310f52951769b10e14e0"}, +] + +[[package]] +name = "zipp" +version = "3.15.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.7" +files = [ + {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, + {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9" +content-hash = "4c8cc15797080b8131b35c7f79381793e76714ef0db342e5e84804b76873bb71" diff --git a/backend/pyproject.toml b/backend/pyproject.toml new file mode 100644 index 000000000..2f4287723 --- /dev/null +++ b/backend/pyproject.toml @@ -0,0 +1,134 @@ +[tool.poetry] +name = "timed-backend" +version = "3.0.7" +description = "Timetracking software" +repository = "https://github.com/adfinis/timed-backend" +authors = ["Adfinis AG"] +license = "AGPL-3.0" +readme = "README.md" +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Environment :: Console", + "Intended Audience :: Developers", + "Intended Audience :: Information Technology", + "License :: OSI Approved :: GNU Affero General Public License v3", + "Natural Language :: English", + "Operating System :: OS Independent", + "Programming Language :: Python :: 3.8", +] +include = ["CHANGELOG.md"] + +[tool.poetry.dependencies] +python = "^3.9" +python-dateutil = "^2.8.2" +django = "^3.2.19" +# might remove this once we find out how the jsonapi extras_require work +django-cors-headers = "^4.1.0" +django-filter = "^23.2" +django-multiselectfield = "^0.1.12" +django-prometheus = "^2.3.1" +djangorestframework = "^3.14.0" +djangorestframework-jsonapi = "^6.0.0" +mozilla-django-oidc = "^3.0.0" +psycopg2-binary = "^2.9.3" +pytz = "^2023.3" +pyexcel-webio = "^0.1.4" +pyexcel-io = "^0.6.6" +django-excel = "^0.0.10" +django-nested-inline = "^0.4.5" +pyexcel-ods3 = "^0.6.1" +pyexcel-xlsx = "^0.6.0" +pyexcel-ezodf = "^0.3.4" +django-environ = "^0.10.0" +django-money = "^3.1.0" +python-redmine = "^2.4.0" +sentry-sdk = "^1.26.0" +whitenoise = "^6.5.0" +django-hurricane = "^1.3.4" +openpyxl = "3.0.10" # TODO: dependency of `pyexcel-xlsx` Remove as soon as https://github.com/pyexcel/pyexcel-xlsx/issues/52 is resolved. + +[tool.poetry.dev-dependencies] +black = "23.3.0" +coverage = "7.2.7" +django-extensions = "3.2.3" +factory-boy = "3.2.1" +flake8 = "6.0.0" +flake8-blind-except = "0.2.1" +flake8-debugger = "4.1.2" +flake8-deprecated = "2.0.1" +flake8-docstrings = "1.7.0" +flake8-isort = "6.0.0" +flake8-string-format = "0.3.0" +ipdb = "0.13.13" +isort = "5.12.0" +pdbpp = "0.10.3" +pytest = "7.4.0" +pytest-cov = "4.0.0" +pytest-django = "4.5.2" +pytest-env = "0.6.2" +# needs to stay at 2.1.0 because of wrong interpretation of parameters with "__" +pytest-factoryboy = "2.1.0" +pytest-freezegun = "0.4.2" +pytest-mock = "3.7.0" +pytest-randomly = "3.12.0" +requests-mock = "1.9.3" +snapshottest = "0.6.0" + +[tool.isort] +skip = [ + "migrations", + "snapshots", +] +known_first_party = ["timed"] +known_third_party = ["pytest_factoryboy"] +multi_line_output = 3 +include_trailing_comma = true +force_grid_wrap = 0 +combine_as_imports = true +line_length = 88 + +[tool.pytest.ini_options] +DJANGO_SETTINGS_MODULE = "timed.settings" +addopts = "--reuse-db --randomly-seed=1521188767 --randomly-dont-reorganize" +env = [ + "DJANGO_OIDC_USERNAME_CLAIM=sub" +] +filterwarnings = [ + "error::DeprecationWarning", + "error::PendingDeprecationWarning", + "ignore:Using a non-boolean value for an isnull lookup is deprecated, use True or False instead.:django.utils.deprecation.RemovedInDjango40Warning", + # following is needed beceause of https://github.com/mozilla/mozilla-django-oidc/pull/371 + "ignore:distutils Version classes are deprecated:DeprecationWarning", # deprecation in pytest-freezegun + "ignore:django.conf.urls.url().*:django.utils.deprecation.RemovedInDjango40Warning", + "ignore:.*is deprecated in favour of new moneyed.l10n.format_money.*", + "ignore:.*invalid escape sequence.*", + "ignore:pkg_resources is deprecated as an API:DeprecationWarning", +] + +[tool.coverage.run] +source = ["."] + +[tool.coverage.report] +fail_under = 100 +exclude_lines = [ + "pragma: no cover", + "pragma: todo cover", + "def __str__", + "def __unicode__", + "def __repr__", +] +omit = [ + "*/migrations/*", + "*/apps.py", + "*/admin.py", + "manage.py", + "timed/settings_*.py", + "timed/wsgi.py", + "timed/forms.py", + "setup.py", +] +show_missing = true + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/backend/timed/.gitignore b/backend/timed/.gitignore new file mode 100644 index 000000000..2fa7ce7c4 --- /dev/null +++ b/backend/timed/.gitignore @@ -0,0 +1 @@ +config.ini diff --git a/backend/timed/__init__.py b/backend/timed/__init__.py new file mode 100644 index 000000000..8c0d5d5bb --- /dev/null +++ b/backend/timed/__init__.py @@ -0,0 +1 @@ +__version__ = "2.0.0" diff --git a/backend/timed/admin.py b/backend/timed/admin.py new file mode 100644 index 000000000..0fedc4fc5 --- /dev/null +++ b/backend/timed/admin.py @@ -0,0 +1,17 @@ +from django.conf import settings +from django.contrib.admin import AdminSite +from django.views.decorators.cache import never_cache + + +class TimedAdminSite(AdminSite): + login_template = "login.html" + + @never_cache + def login(self, request, extra_context=None): + extra = {"show_local_login": settings.ALLOW_LOCAL_LOGIN} + + if isinstance(extra_context, dict): + extra_context.update(extra) + else: + extra_context = extra + return super().login(request, extra_context) diff --git a/backend/timed/apps.py b/backend/timed/apps.py new file mode 100644 index 000000000..88cdfd479 --- /dev/null +++ b/backend/timed/apps.py @@ -0,0 +1,10 @@ +from django.contrib.admin.apps import AdminConfig + + +class TimedAdminConfig(AdminConfig): + """Overrides the default django.contrib.admin.site. + + This makes it possible to customize the login page. + """ + + default_site = "timed.admin.TimedAdminSite" diff --git a/backend/timed/authentication.py b/backend/timed/authentication.py new file mode 100644 index 000000000..e5f7dac71 --- /dev/null +++ b/backend/timed/authentication.py @@ -0,0 +1,123 @@ +import base64 +import functools +import hashlib + +import requests +from django.conf import settings +from django.core.cache import cache +from django.core.exceptions import SuspiciousOperation +from django.utils.encoding import force_bytes +from mozilla_django_oidc.auth import LOGGER, OIDCAuthenticationBackend +from rest_framework.exceptions import AuthenticationFailed + + +class TimedOIDCAuthenticationBackend(OIDCAuthenticationBackend): + def get_introspection(self, access_token, id_token, payload): + """Return user details dictionary.""" + + basic = base64.b64encode( + f"{settings.OIDC_RP_INTROSPECT_CLIENT_ID}:{settings.OIDC_RP_INTROSPECT_CLIENT_SECRET}".encode( + "utf-8" + ) + ).decode() + headers = { + "Authorization": f"Basic {basic}", + "Content-Type": "application/x-www-form-urlencoded", + } + response = requests.post( + settings.OIDC_OP_INTROSPECT_ENDPOINT, + verify=settings.OIDC_VERIFY_SSL, + headers=headers, + data={"token": access_token}, + ) + response.raise_for_status() + return response.json() + + def get_userinfo_or_introspection(self, access_token): + try: + claims = self.cached_request( + self.get_userinfo, access_token, "auth.userinfo" + ) + return claims + except requests.HTTPError as e: + if e.response.status_code not in [401, 403]: + raise e + if settings.OIDC_CHECK_INTROSPECT: + try: + # check introspection if userinfo fails (confidential client) + claims = self.cached_request( + self.get_introspection, access_token, "auth.introspection" + ) + if "client_id" not in claims: + raise SuspiciousOperation( + "client_id not present in introspection" + ) + return claims + except requests.HTTPError as e: + # if the authorization fails it's not a valid client or + # the token is expired and permission is denied. + # Handing on the 401 Client Error would be transformed into + # a 500 by Django's exception handling. But that's not what we want. + if e.response.status_code not in [401, 403]: # pragma: no cover + raise e + raise AuthenticationFailed() + + def get_or_create_user(self, access_token, id_token, payload): + """Verify claims and return user, otherwise raise an Exception.""" + + claims = self.get_userinfo_or_introspection(access_token) + + users = self.filter_users_by_claims(claims) + + if len(users) == 1: + user = users.get() + self.update_user_from_claims(user, claims) + return user + elif settings.OIDC_CREATE_USER: + return self.create_user(claims) + else: + LOGGER.debug( + "Login failed: No user with username %s found, and " + "OIDC_CREATE_USER is False", + self.get_username(claims), + ) + return None + + def update_user_from_claims(self, user, claims): + user.email = claims.get(settings.OIDC_EMAIL_CLAIM, "") + user.first_name = claims.get(settings.OIDC_FIRSTNAME_CLAIM, "") + user.last_name = claims.get(settings.OIDC_LASTNAME_CLAIM, "") + user.save() + + def filter_users_by_claims(self, claims): + username = self.get_username(claims) + return self.UserModel.objects.filter(username__iexact=username) + + def cached_request(self, method, token, cache_prefix): + token_hash = hashlib.sha256(force_bytes(token)).hexdigest() + + func = functools.partial(method, token, None, None) + + return cache.get_or_set( + f"{cache_prefix}.{token_hash}", + func, + timeout=settings.OIDC_BEARER_TOKEN_REVALIDATION_TIME, + ) + + def create_user(self, claims): + """Return object for a newly created user account.""" + + username = self.get_username(claims) + email = claims.get(settings.OIDC_EMAIL_CLAIM, "") + first_name = claims.get(settings.OIDC_FIRSTNAME_CLAIM, "") + last_name = claims.get(settings.OIDC_LASTNAME_CLAIM, "") + + return self.UserModel.objects.create( + username=username, email=email, first_name=first_name, last_name=last_name + ) + + def get_username(self, claims): + try: + return claims[settings.OIDC_USERNAME_CLAIM] + except KeyError: + raise SuspiciousOperation("Couldn't find username claim") diff --git a/backend/timed/conftest.py b/backend/timed/conftest.py new file mode 100644 index 000000000..725c82234 --- /dev/null +++ b/backend/timed/conftest.py @@ -0,0 +1,167 @@ +import inspect + +import pytest +from django.contrib.auth import get_user_model +from django.core.cache import cache +from factory.base import FactoryMetaClass +from pytest_factoryboy import register +from rest_framework.test import APIClient + +from timed.employment import factories as employment_factories +from timed.projects import factories as projects_factories +from timed.subscription import factories as subscription_factories +from timed.tracking import factories as tracking_factories + + +def register_module(module): + for name, obj in inspect.getmembers(module): + if isinstance(obj, FactoryMetaClass) and not obj._meta.abstract: + register(obj) + + +register_module(employment_factories) +register_module(projects_factories) +register_module(subscription_factories) +register_module(tracking_factories) + + +@pytest.fixture +def auth_user(db): + return get_user_model().objects.create_user( + username="user", + password="123qweasd", + first_name="Test", + last_name="User", + is_superuser=False, + is_staff=False, + ) + + +@pytest.fixture +def admin_user(db): + return get_user_model().objects.create_user( + username="admin", + password="123qweasd", + first_name="Admin", + last_name="User", + is_superuser=False, + is_staff=True, + ) + + +@pytest.fixture +def superadmin_user(db): + return get_user_model().objects.create_user( + username="superadmin", + password="123qweasd", + first_name="Superadmin", + last_name="User", + is_superuser=True, + is_staff=True, + ) + + +@pytest.fixture +def external_employee(db): + user = get_user_model().objects.create_user( + username="user", + password="123qweasd", + first_name="Test", + last_name="User", + is_superuser=False, + is_staff=False, + ) + employment_factories.EmploymentFactory.create(user=user, is_external=True) + return user + + +@pytest.fixture +def internal_employee(db): + user = get_user_model().objects.create_user( + username="user", + password="123qweasd", + first_name="Test", + last_name="User", + email="test@example.com", + is_superuser=False, + is_staff=False, + ) + employment_factories.EmploymentFactory.create(user=user, is_external=False) + return user + + +@pytest.fixture +def client(): + return APIClient() + + +@pytest.fixture +def auth_client(auth_user): + """Return instance of a APIClient that is logged in as test user.""" + client = APIClient() + client.force_authenticate(user=auth_user) + client.user = auth_user + return client + + +@pytest.fixture +def admin_client(admin_user): + """Return instance of a APIClient that is logged in as a staff user.""" + client = APIClient() + client.force_authenticate(user=admin_user) + client.user = admin_user + return client + + +@pytest.fixture +def superadmin_client(superadmin_user): + """Return instance of a APIClient that is logged in as superuser.""" + client = APIClient() + client.force_authenticate(user=superadmin_user) + client.user = superadmin_user + return client + + +@pytest.fixture +def external_employee_client(external_employee): + """Return instance of a APIClient that is logged in as external test user.""" + client = APIClient() + client.force_authenticate(user=external_employee) + client.user = external_employee + return client + + +@pytest.fixture +def internal_employee_client(internal_employee): + """Return instance of a APIClient that is logged in as external test user.""" + client = APIClient() + client.force_authenticate(user=internal_employee) + client.user = internal_employee + return client + + +@pytest.fixture(scope="function", autouse=True) +def _autoclear_cache(): + cache.clear() + + +def setup_customer_and_employment_status( + user, is_assignee, is_customer, is_employed, is_external +): + """ + Set up customer and employment status. + + Return a 2-tuple of assignee and employment, if they + were created + """ + assignee = None + employment = None + if is_assignee: + assignee = projects_factories.CustomerAssigneeFactory.create( + user=user, is_customer=is_customer + ) + if is_employed: + employment = employment_factories.EmploymentFactory.create( + user=user, is_external=is_external + ) + return assignee, employment diff --git a/app/components/filter-sidebar/group/styles.scss b/backend/timed/employment/__init__.py similarity index 100% rename from app/components/filter-sidebar/group/styles.scss rename to backend/timed/employment/__init__.py diff --git a/backend/timed/employment/admin.py b/backend/timed/employment/admin.py new file mode 100644 index 000000000..436beae55 --- /dev/null +++ b/backend/timed/employment/admin.py @@ -0,0 +1,222 @@ +"""Views for the admin interface.""" + +import datetime + +from django import forms +from django.contrib import admin +from django.contrib.admin.widgets import AutocompleteSelect +from django.contrib.auth.admin import UserAdmin +from django.core.exceptions import ValidationError +from django.utils.translation import gettext_lazy as _ + +from timed.employment import models +from timed.forms import DurationInHoursField + +# do not allow deletion of objects site wide +# objects need to be deactivated resp. archived +admin.site.disable_action("delete_selected") + + +class SupervisorForm(forms.ModelForm): + """Custom form for the supervisor admin.""" + + # Change the label of the supervisor through table attribute to_user + to_user = forms.ModelChoiceField( + queryset=models.User.objects.all(), + label=_("supervised by"), + widget=AutocompleteSelect( + models.User.supervisors.through.to_user.field, admin_site=admin.site + ), + ) + + class Meta: + """Meta information for the supervisor form.""" + + fields = "__all__" + model = models.User.supervisors.through + + +class SuperviseeForm(forms.ModelForm): + """Custom form for the supervisee admin.""" + + # Change the label of the supervisor through table attribute from_user + from_user = forms.ModelChoiceField( + queryset=models.User.objects.all(), + label=_("supervising"), + widget=AutocompleteSelect( + models.User.supervisors.through.from_user.field, admin_site=admin.site + ), + ) + + class Meta: + """Meta information for the supervisee form.""" + + fields = "__all__" + model = models.User.supervisors.through + + +class SupervisorInline(admin.TabularInline): + autocomplete_fields = ["to_user"] + form = SupervisorForm + model = models.User.supervisors.through + extra = 0 + fk_name = "from_user" + verbose_name = _("Supervisor") + verbose_name_plural = _("Supervisors") + + +class SuperviseeInline(admin.TabularInline): + autocomplete_fields = ["from_user"] + form = SuperviseeForm + model = models.User.supervisors.through + extra = 0 + fk_name = "to_user" + verbose_name = _("Employee") + verbose_name_plural = _("Employees") + + +class EmploymentForm(forms.ModelForm): + """Custom form for the employment admin.""" + + worktime_per_day = DurationInHoursField(label=_("Worktime per day in hours")) + + def clean(self): + """Validate the employment as a whole. + + Ensure the end date is after the start date and there is only one + active employment per user and there are no overlapping employments. + + :throws: django.core.exceptions.ValidationError + :return: The cleaned data + :rtype: dict + """ + data = super().clean() + + employments = models.Employment.objects.filter(user=data.get("user")) + + if self.instance: + employments = employments.exclude(id=self.instance.id) + + if data.get("end_date") and data.get("start_date") >= data.get("end_date"): + raise ValidationError(_("The end date must be after the start date")) + + if any( + [ + e.start_date <= (data.get("end_date") or datetime.date.today()) + and data.get("start_date") <= (e.end_date or datetime.date.today()) + for e in employments + ] + ): + raise ValidationError( + _("A user can't have multiple employments at the same time") + ) + + return data + + class Meta: + """Meta information for the employment form.""" + + fields = "__all__" + model = models.Employment + + +class EmploymentInline(admin.TabularInline): + form = EmploymentForm + model = models.Employment + extra = 0 + + +class OvertimeCreditForm(forms.ModelForm): + model = models.OvertimeCredit + duration = DurationInHoursField(label=_("Duration in hours")) + + +class OvertimeCreditInline(admin.TabularInline): + model = models.OvertimeCredit + form = OvertimeCreditForm + extra = 0 + + +class AbsenceCreditInline(admin.TabularInline): + model = models.AbsenceCredit + extra = 0 + + +@admin.register(models.User) +class UserAdmin(UserAdmin): + """Timed specific user admin.""" + + inlines = [ + SupervisorInline, + SuperviseeInline, + EmploymentInline, + OvertimeCreditInline, + AbsenceCreditInline, + ] + list_display = ("username", "first_name", "last_name", "is_staff", "is_active") + search_fields = ["username"] + + actions = [ + "disable_users", + "enable_users", + "disable_staff_status", + "enable_staff_status", + ] + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.fieldsets += ( + (_("Extra fields"), {"fields": ["tour_done", "is_accountant"]}), + ) + + def disable_users(self, request, queryset): + queryset.update(is_active=False) + + disable_users.short_description = _("Disable selected users") + + def enable_users(self, request, queryset): + queryset.update(is_active=True) + + enable_users.short_description = _("Enable selected users") + + def disable_staff_status(self, request, queryset): + queryset.update(is_staff=False) + + disable_staff_status.short_description = _("Disable staff status of selected users") + + def enable_staff_status(self, request, queryset): + queryset.update(is_staff=True) + + enable_staff_status.short_description = _("Enable staff status of selected users") + + def has_delete_permission(self, request, obj=None): + return obj and not obj.reports.exists() + + +@admin.register(models.Location) +class LocationAdmin(admin.ModelAdmin): + """Location admin view.""" + + list_display = ["name"] + search_fields = ["name"] + + def has_delete_permission(self, request, obj=None): + return obj and not obj.employments.exists() + + +@admin.register(models.PublicHoliday) +class PublicHolidayAdmin(admin.ModelAdmin): + """Public holiday admin view.""" + + list_display = ["__str__", "date", "location"] + list_filter = ["location"] + + +@admin.register(models.AbsenceType) +class AbsenceTypeAdmin(admin.ModelAdmin): + """Absence type admin view.""" + + list_display = ["name"] + + def has_delete_permission(self, request, obj=None): + return obj and not obj.absences.exists() and not obj.absencecredit_set.exists() diff --git a/backend/timed/employment/apps.py b/backend/timed/employment/apps.py new file mode 100644 index 000000000..be5d2bfb0 --- /dev/null +++ b/backend/timed/employment/apps.py @@ -0,0 +1,10 @@ +"""Configuration for employment app.""" + +from django.apps import AppConfig + + +class EmploymentConfig(AppConfig): + """App configuration for employment app.""" + + name = "timed.employment" + label = "employment" diff --git a/backend/timed/employment/factories.py b/backend/timed/employment/factories.py new file mode 100644 index 000000000..2fd347ac7 --- /dev/null +++ b/backend/timed/employment/factories.py @@ -0,0 +1,121 @@ +"""Factories for testing the tracking app.""" + +import datetime +import random + +from django.contrib.auth import get_user_model +from factory import Faker, SubFactory, lazy_attribute +from factory.django import DjangoModelFactory + +from timed.employment import models + + +class UserFactory(DjangoModelFactory): + """User factory.""" + + first_name = Faker("first_name") + last_name = Faker("last_name") + email = Faker("email") + password = Faker("password", length=12) + username = Faker("user_name") + + class Meta: + """Meta informations for the user factory.""" + + model = get_user_model() + + +class LocationFactory(DjangoModelFactory): + """Location factory.""" + + name = Faker("city") + + class Meta: + """Meta informations for the location factory.""" + + model = models.Location + + +class PublicHolidayFactory(DjangoModelFactory): + """Public holiday factory.""" + + name = Faker("word") + date = Faker("date_object") + location = SubFactory(LocationFactory) + + class Meta: + """Meta informations for the public holiday factory.""" + + model = models.PublicHoliday + + +class EmploymentFactory(DjangoModelFactory): + """Employment factory.""" + + user = SubFactory(UserFactory) + location = SubFactory(LocationFactory) + percentage = Faker("random_int", min=50, max=100) + start_date = Faker("date_object") + end_date = None + is_external = False + + @lazy_attribute + def worktime_per_day(self): + """Generate the worktime per day based on the percentage. + + :return: The generated worktime + :rtype: datetime.timedelta + """ + return datetime.timedelta(minutes=60 * 8.5 * self.percentage / 100) + + class Meta: + """Meta informations for the employment factory.""" + + model = models.Employment + + +class AbsenceTypeFactory(DjangoModelFactory): + """Absence type factory.""" + + name = Faker("word") + fill_worktime = False + + class Meta: + """Meta informations for the absence type factory.""" + + model = models.AbsenceType + + +class AbsenceCreditFactory(DjangoModelFactory): + """Absence credit factory.""" + + absence_type = SubFactory(AbsenceTypeFactory) + user = SubFactory(UserFactory) + date = Faker("date_object") + days = Faker("random_int", min=1, max=25) + + class Meta: + """Meta informations for the absence credit factory.""" + + model = models.AbsenceCredit + + +class OvertimeCreditFactory(DjangoModelFactory): + """Overtime credit factory.""" + + user = SubFactory(UserFactory) + date = Faker("date_object") + + @lazy_attribute + def duration(self): + """Generate a random duration. + + :return: The generated duration + :rtype: datetime.timedelta + """ + return datetime.timedelta(hours=random.randint(5, 40)) + + class Meta: + """Meta informations for the overtime credit factory.""" + + model = models.OvertimeCredit diff --git a/backend/timed/employment/filters.py b/backend/timed/employment/filters.py new file mode 100644 index 000000000..c74a8b138 --- /dev/null +++ b/backend/timed/employment/filters.py @@ -0,0 +1,128 @@ +from django.db.models import Q +from django_filters.constants import EMPTY_VALUES +from django_filters.rest_framework import DateFilter, Filter, FilterSet, NumberFilter + +from timed.employment import models +from timed.employment.models import User + + +class YearFilter(Filter): + """Filter to filter a queryset by year.""" + + def filter(self, qs, value): + if value in EMPTY_VALUES: + return qs + + return qs.filter(**{"%s__year" % self.field_name: value}) + + +class PublicHolidayFilterSet(FilterSet): + """Filter set for the public holidays endpoint.""" + + year = YearFilter(field_name="date") + from_date = DateFilter(field_name="date", lookup_expr="gte") + to_date = DateFilter(field_name="date", lookup_expr="lte") + + class Meta: + """Meta information for the public holiday filter set.""" + + model = models.PublicHoliday + fields = ["year", "location", "date", "from_date", "to_date"] + + +class AbsenceTypeFilterSet(FilterSet): + fill_worktime = NumberFilter(field_name="fill_worktime") + + class Meta: + """Meta information for the public holiday filter set.""" + + model = models.AbsenceType + fields = ["fill_worktime"] + + +class UserFilterSet(FilterSet): + active = NumberFilter(field_name="is_active") + supervisor = NumberFilter(field_name="supervisors") + is_reviewer = NumberFilter(method="filter_is_reviewer") + is_supervisor = NumberFilter(method="filter_is_supervisor") + is_accountant = NumberFilter(field_name="is_accountant") + is_external = NumberFilter(method="filter_is_external") + + def filter_is_external(self, queryset, name, value): + return queryset.filter(employments__is_external=value) + + def filter_is_reviewer(self, queryset, name, value): + if value: + return queryset.filter(pk__in=User.objects.all_reviewers()) + return queryset.exclude(pk__in=User.objects.all_reviewers()) + + def filter_is_supervisor(self, queryset, name, value): + if value: + return queryset.filter(pk__in=User.objects.all_supervisors()) + return queryset.exclude(pk__in=User.objects.all_supervisors()) + + class Meta: + model = models.User + fields = [ + "active", + "supervisor", + "is_reviewer", + "is_supervisor", + "is_accountant", + ] + + +class EmploymentFilterSet(FilterSet): + date = DateFilter(method="filter_date") + + def filter_date(self, queryset, name, value): + queryset = queryset.filter( + Q(start_date__lte=value) + & Q(Q(end_date__gte=value) | Q(end_date__isnull=True)) + ) + + return queryset + + class Meta: + model = models.Employment + fields = ["user", "location"] + + +class OvertimeCreditFilterSet(FilterSet): + year = YearFilter(field_name="date") + from_date = DateFilter(field_name="date", lookup_expr="gte") + to_date = DateFilter(field_name="date", lookup_expr="lte") + + class Meta: + model = models.OvertimeCredit + fields = ["year", "user", "date", "from_date", "to_date"] + + +class AbsenceCreditFilterSet(FilterSet): + year = YearFilter(field_name="date") + from_date = DateFilter(field_name="date", lookup_expr="gte") + to_date = DateFilter(field_name="date", lookup_expr="lte") + + class Meta: + model = models.AbsenceCredit + fields = ["year", "user", "date", "from_date", "to_date", "absence_type"] + + +class WorktimeBalanceFilterSet(FilterSet): + user = NumberFilter(field_name="id") + supervisor = NumberFilter(field_name="supervisors") + # additional filters analyzed in WorktimeBalanceView + # date = DateFilter() + # last_reported_date = NumberFilter() + + class Meta: + model = models.User + fields = ["user"] + + +class AbsenceBalanceFilterSet(FilterSet): + absence_type = NumberFilter(field_name="id") + + class Meta: + model = models.AbsenceType + fields = ["absence_type"] diff --git a/backend/timed/employment/migrations/0001_initial.py b/backend/timed/employment/migrations/0001_initial.py new file mode 100644 index 000000000..10770c7bc --- /dev/null +++ b/backend/timed/employment/migrations/0001_initial.py @@ -0,0 +1,327 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.4 on 2017-08-17 09:16 +from __future__ import unicode_literals + +from django.conf import settings +import django.contrib.auth.validators +import django.core.validators +from django.db import migrations, models +import django.db.models.deletion +import django.utils.timezone +import timed.employment.models +import timed.models + + +class Migration(migrations.Migration): + initial = True + + dependencies = [("auth", "0008_alter_user_username_max_length")] + + operations = [ + migrations.CreateModel( + name="User", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("password", models.CharField(max_length=128, verbose_name="password")), + ( + "last_login", + models.DateTimeField( + blank=True, null=True, verbose_name="last login" + ), + ), + ( + "is_superuser", + models.BooleanField( + default=False, + help_text="Designates that this user has all permissions without explicitly assigning them.", + verbose_name="superuser status", + ), + ), + ( + "username", + models.CharField( + error_messages={ + "unique": "A user with that username already exists." + }, + help_text="Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.", + max_length=150, + unique=True, + validators=[ + django.contrib.auth.validators.UnicodeUsernameValidator() + ], + verbose_name="username", + ), + ), + ( + "first_name", + models.CharField( + blank=True, max_length=30, verbose_name="first name" + ), + ), + ( + "last_name", + models.CharField( + blank=True, max_length=30, verbose_name="last name" + ), + ), + ( + "email", + models.EmailField( + blank=True, max_length=254, verbose_name="email address" + ), + ), + ( + "is_staff", + models.BooleanField( + default=False, + help_text="Designates whether the user can log into this admin site.", + verbose_name="staff status", + ), + ), + ( + "is_active", + models.BooleanField( + default=True, + help_text="Designates whether this user should be treated as active. Unselect this instead of deleting accounts.", + verbose_name="active", + ), + ), + ( + "date_joined", + models.DateTimeField( + default=django.utils.timezone.now, verbose_name="date joined" + ), + ), + ( + "groups", + models.ManyToManyField( + blank=True, + help_text="The groups this user belongs to. A user will get all permissions granted to each of their groups.", + related_name="user_set", + related_query_name="user", + to="auth.Group", + verbose_name="groups", + ), + ), + ( + "supervisors", + models.ManyToManyField( + related_name="supervisees", to=settings.AUTH_USER_MODEL + ), + ), + ( + "user_permissions", + models.ManyToManyField( + blank=True, + help_text="Specific permissions for this user.", + related_name="user_set", + related_query_name="user", + to="auth.Permission", + verbose_name="user permissions", + ), + ), + ], + options={ + "abstract": False, + "verbose_name_plural": "users", + "verbose_name": "user", + }, + managers=[("objects", timed.employment.models.UserManager())], + ), + migrations.CreateModel( + name="AbsenceCredit", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("comment", models.CharField(blank=True, max_length=255)), + ("date", models.DateField()), + ("days", models.PositiveIntegerField(default=0)), + ], + ), + migrations.CreateModel( + name="AbsenceType", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("name", models.CharField(max_length=50)), + ("fill_worktime", models.BooleanField(default=False)), + ], + ), + migrations.CreateModel( + name="Employment", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "percentage", + models.IntegerField( + validators=[ + django.core.validators.MinValueValidator(0), + django.core.validators.MaxValueValidator(100), + ] + ), + ), + ("worktime_per_day", models.DurationField()), + ("start_date", models.DateField()), + ("end_date", models.DateField(blank=True, null=True)), + ], + ), + migrations.CreateModel( + name="Location", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("name", models.CharField(max_length=50, unique=True)), + ( + "workdays", + timed.models.WeekdaysField( + choices=[ + (1, "Monday"), + (2, "Tuesday"), + (3, "Wednesday"), + (4, "Thursday"), + (5, "Friday"), + (6, "Saturday"), + (7, "Sunday"), + ], + default=["1", "2", "3", "4", "5"], + max_length=13, + ), + ), + ], + ), + migrations.CreateModel( + name="OvertimeCredit", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("date", models.DateField()), + ("duration", models.DurationField(blank=True, null=True)), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="overtime_credits", + to=settings.AUTH_USER_MODEL, + ), + ), + ], + ), + migrations.CreateModel( + name="PublicHoliday", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("name", models.CharField(max_length=50)), + ("date", models.DateField()), + ( + "location", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="public_holidays", + to="employment.Location", + ), + ), + ], + ), + migrations.AddField( + model_name="employment", + name="location", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="employment.Location" + ), + ), + migrations.AddField( + model_name="employment", + name="user", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="employments", + to=settings.AUTH_USER_MODEL, + ), + ), + migrations.AddField( + model_name="absencecredit", + name="absence_type", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="absence_credits", + to="employment.AbsenceType", + ), + ), + migrations.AddField( + model_name="absencecredit", + name="user", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="absence_credits", + to=settings.AUTH_USER_MODEL, + ), + ), + migrations.CreateModel( + name="UserAbsenceType", + fields=[], + options={"indexes": [], "proxy": True}, + bases=("employment.absencetype",), + ), + migrations.AddIndex( + model_name="publicholiday", + index=models.Index(fields=["date"], name="employment__date_2d002c_idx"), + ), + migrations.AddIndex( + model_name="employment", + index=models.Index( + fields=["start_date", "end_date"], name="employment__start_d_74c274_idx" + ), + ), + ] diff --git a/backend/timed/employment/migrations/0002_auto_20170823_1051.py b/backend/timed/employment/migrations/0002_auto_20170823_1051.py new file mode 100644 index 000000000..e72625ce2 --- /dev/null +++ b/backend/timed/employment/migrations/0002_auto_20170823_1051.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.4 on 2017-08-23 08:51 +from __future__ import unicode_literals + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + dependencies = [("employment", "0001_initial")] + + operations = [ + migrations.AlterField( + model_name="employment", + name="location", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="employments", + to="employment.Location", + ), + ) + ] diff --git a/backend/timed/employment/migrations/0003_user_tour_done.py b/backend/timed/employment/migrations/0003_user_tour_done.py new file mode 100644 index 000000000..ddf2c3ae8 --- /dev/null +++ b/backend/timed/employment/migrations/0003_user_tour_done.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.4 on 2017-08-25 07:28 +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [("employment", "0002_auto_20170823_1051")] + + operations = [ + migrations.AddField( + model_name="user", + name="tour_done", + field=models.BooleanField(default=False), + ) + ] diff --git a/backend/timed/employment/migrations/0004_auto_20170904_1510.py b/backend/timed/employment/migrations/0004_auto_20170904_1510.py new file mode 100644 index 000000000..49adf55f0 --- /dev/null +++ b/backend/timed/employment/migrations/0004_auto_20170904_1510.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.4 on 2017-09-04 13:10 +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [("employment", "0003_user_tour_done")] + + operations = [ + migrations.AlterModelOptions( + name="absencetype", options={"ordering": ("name",)} + ), + migrations.AlterModelOptions(name="location", options={"ordering": ("name",)}), + migrations.AlterModelOptions( + name="publicholiday", options={"ordering": ("date",)} + ), + migrations.AlterField( + model_name="absencecredit", + name="days", + field=models.IntegerField(default=0), + ), + ] diff --git a/backend/timed/employment/migrations/0005_auto_20170906_1259.py b/backend/timed/employment/migrations/0005_auto_20170906_1259.py new file mode 100644 index 000000000..e630ee6f3 --- /dev/null +++ b/backend/timed/employment/migrations/0005_auto_20170906_1259.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.4 on 2017-09-06 10:59 +from __future__ import unicode_literals + +from django.db import migrations, models +import django.utils.timezone + + +class Migration(migrations.Migration): + dependencies = [("employment", "0004_auto_20170904_1510")] + + operations = [ + migrations.AddField( + model_name="employment", + name="added", + field=models.DateTimeField( + auto_now_add=True, default=django.utils.timezone.now + ), + preserve_default=False, + ), + migrations.AddField( + model_name="employment", + name="updated", + field=models.DateTimeField(auto_now=True), + ), + ] diff --git a/backend/timed/employment/migrations/0006_auto_20170906_1635.py b/backend/timed/employment/migrations/0006_auto_20170906_1635.py new file mode 100644 index 000000000..98ab8f8af --- /dev/null +++ b/backend/timed/employment/migrations/0006_auto_20170906_1635.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.4 on 2017-09-06 14:35 +from __future__ import unicode_literals + +import datetime +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [("employment", "0005_auto_20170906_1259")] + + operations = [ + migrations.AlterField( + model_name="overtimecredit", + name="duration", + field=models.DurationField(default=datetime.timedelta(0)), + ) + ] diff --git a/backend/timed/employment/migrations/0007_auto_20170911_0959.py b/backend/timed/employment/migrations/0007_auto_20170911_0959.py new file mode 100644 index 000000000..5739b4a7a --- /dev/null +++ b/backend/timed/employment/migrations/0007_auto_20170911_0959.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.5 on 2017-09-11 07:59 +from __future__ import unicode_literals + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + dependencies = [("employment", "0006_auto_20170906_1635")] + + operations = [ + migrations.AlterField( + model_name="absencecredit", + name="absence_type", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="employment.AbsenceType" + ), + ) + ] diff --git a/backend/timed/employment/migrations/0008_auto_20171013_1041.py b/backend/timed/employment/migrations/0008_auto_20171013_1041.py new file mode 100644 index 000000000..453045186 --- /dev/null +++ b/backend/timed/employment/migrations/0008_auto_20171013_1041.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.6 on 2017-10-13 08:41 +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [("employment", "0007_auto_20170911_0959")] + + operations = [ + migrations.AlterField( + model_name="user", + name="last_name", + field=models.CharField(max_length=30, verbose_name="last name"), + ) + ] diff --git a/backend/timed/employment/migrations/0009_delete_userabsencetype.py b/backend/timed/employment/migrations/0009_delete_userabsencetype.py new file mode 100644 index 000000000..1efbe7178 --- /dev/null +++ b/backend/timed/employment/migrations/0009_delete_userabsencetype.py @@ -0,0 +1,11 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.6 on 2017-10-25 08:50 +from __future__ import unicode_literals + +from django.db import migrations + + +class Migration(migrations.Migration): + dependencies = [("employment", "0008_auto_20171013_1041")] + + operations = [migrations.DeleteModel(name="UserAbsenceType")] diff --git a/backend/timed/employment/migrations/0010_overtimecredit_comment.py b/backend/timed/employment/migrations/0010_overtimecredit_comment.py new file mode 100644 index 000000000..7c945e661 --- /dev/null +++ b/backend/timed/employment/migrations/0010_overtimecredit_comment.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.6 on 2017-10-31 10:25 +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [("employment", "0009_delete_userabsencetype")] + + operations = [ + migrations.AddField( + model_name="overtimecredit", + name="comment", + field=models.CharField(blank=True, max_length=255), + ) + ] diff --git a/backend/timed/employment/migrations/0011_auto_20171101_1227.py b/backend/timed/employment/migrations/0011_auto_20171101_1227.py new file mode 100644 index 000000000..7abff86be --- /dev/null +++ b/backend/timed/employment/migrations/0011_auto_20171101_1227.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.6 on 2017-11-01 11:27 +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [("employment", "0010_overtimecredit_comment")] + + operations = [ + migrations.AddField( + model_name="absencecredit", + name="transfer", + field=models.BooleanField(default=False), + ), + migrations.AddField( + model_name="overtimecredit", + name="transfer", + field=models.BooleanField(default=False), + ), + ] diff --git a/backend/timed/employment/migrations/0012_auto_20181026_1528.py b/backend/timed/employment/migrations/0012_auto_20181026_1528.py new file mode 100644 index 000000000..7428cda1f --- /dev/null +++ b/backend/timed/employment/migrations/0012_auto_20181026_1528.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.16 on 2018-10-26 13:28 +from __future__ import unicode_literals + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + dependencies = [("employment", "0011_auto_20171101_1227")] + + operations = [ + migrations.AlterField( + model_name="absencecredit", + name="absence_type", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, to="employment.AbsenceType" + ), + ), + migrations.AlterField( + model_name="employment", + name="location", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, + related_name="employments", + to="employment.Location", + ), + ), + ] diff --git a/backend/timed/employment/migrations/0013_auto_20210302_1136.py b/backend/timed/employment/migrations/0013_auto_20210302_1136.py new file mode 100644 index 000000000..0a6757a8e --- /dev/null +++ b/backend/timed/employment/migrations/0013_auto_20210302_1136.py @@ -0,0 +1,19 @@ +# Generated by Django 3.1.7 on 2021-03-02 10:36 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("employment", "0012_auto_20181026_1528"), + ] + + operations = [ + migrations.AlterField( + model_name="user", + name="first_name", + field=models.CharField( + blank=True, max_length=150, verbose_name="first name" + ), + ), + ] diff --git a/backend/timed/employment/migrations/0014_employment_is_external.py b/backend/timed/employment/migrations/0014_employment_is_external.py new file mode 100644 index 000000000..307f569f6 --- /dev/null +++ b/backend/timed/employment/migrations/0014_employment_is_external.py @@ -0,0 +1,17 @@ +# Generated by Django 3.1.7 on 2021-04-22 11:14 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("employment", "0013_auto_20210302_1136"), + ] + + operations = [ + migrations.AddField( + model_name="employment", + name="is_external", + field=models.BooleanField(default=False), + ), + ] diff --git a/backend/timed/employment/migrations/0015_user_is_accountant.py b/backend/timed/employment/migrations/0015_user_is_accountant.py new file mode 100644 index 000000000..71b551f13 --- /dev/null +++ b/backend/timed/employment/migrations/0015_user_is_accountant.py @@ -0,0 +1,17 @@ +# Generated by Django 3.1.7 on 2021-09-01 15:00 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("employment", "0014_employment_is_external"), + ] + + operations = [ + migrations.AddField( + model_name="user", + name="is_accountant", + field=models.BooleanField(default=False), + ), + ] diff --git a/vendor/.gitkeep b/backend/timed/employment/migrations/__init__.py similarity index 100% rename from vendor/.gitkeep rename to backend/timed/employment/migrations/__init__.py diff --git a/backend/timed/employment/models.py b/backend/timed/employment/models.py new file mode 100644 index 000000000..8e54db546 --- /dev/null +++ b/backend/timed/employment/models.py @@ -0,0 +1,416 @@ +"""Models for the employment app.""" + +from datetime import date, timedelta + +from dateutil import rrule +from django.conf import settings +from django.contrib.auth.models import AbstractUser, UserManager +from django.core.validators import MaxValueValidator, MinValueValidator +from django.db import models +from django.db.models import Sum, functions +from django.utils.translation import gettext_lazy as _ + +from timed.models import WeekdaysField +from timed.projects.models import CustomerAssignee, ProjectAssignee, TaskAssignee +from timed.tracking.models import Absence + + +class Location(models.Model): + """Location model. + + A location is the place where an employee works. + """ + + name = models.CharField(max_length=50, unique=True) + workdays = WeekdaysField(default=[str(day) for day in range(1, 6)]) + """ + Workdays defined per location, default is Monday - Friday + """ + + def __str__(self): + """Represent the model as a string. + + :return: The string representation + :rtype: str + """ + return self.name + + class Meta: + ordering = ("name",) + + +class PublicHoliday(models.Model): + """Public holiday model. + + A public holiday is a day on which no employee of a certain location has + to work. + """ + + name = models.CharField(max_length=50) + date = models.DateField() + location = models.ForeignKey( + Location, on_delete=models.CASCADE, related_name="public_holidays" + ) + + def __str__(self): + """Represent the model as a string. + + :return: The string representation + :rtype: str + """ + return "{0} {1}".format(self.name, self.date.strftime("%Y")) + + class Meta: + """Meta information for the public holiday model.""" + + indexes = [models.Index(fields=["date"])] + ordering = ("date",) + + +class AbsenceType(models.Model): + """Absence type model. + + An absence type defines the type of an absence. E.g sickness, holiday or + school. + """ + + name = models.CharField(max_length=50) + fill_worktime = models.BooleanField(default=False) + + def __str__(self): + """Represent the model as a string. + + :return: The string representation + :rtype: str + """ + return self.name + + def calculate_credit(self, user, start, end): + """ + Calculate approved days of type for user in given time frame. + + For absence types which fill worktime this will be None. + """ + if self.fill_worktime: + return None + + credits = AbsenceCredit.objects.filter( + user=user, absence_type=self, date__range=[start, end] + ) + data = credits.aggregate(credit=Sum("days")) + credit = data["credit"] or 0 + + return credit + + def calculate_used_days(self, user, start, end): + """ + Calculate used days of type for user in given time frame. + + For absence types which fill worktime this will be None. + """ + if self.fill_worktime: + return None + + absences = Absence.objects.filter( + user=user, absence_type=self, date__range=[start, end] + ) + used_days = absences.count() + return used_days + + class Meta: + ordering = ("name",) + + +class AbsenceCredit(models.Model): + """Absence credit model. + + An absence credit is a credit for an absence of a certain type. A user + should only be able to create as many absences as defined in this credit. + E.g a credit that defines that a user can only have 25 holidays. + """ + + user = models.ForeignKey( + settings.AUTH_USER_MODEL, + on_delete=models.CASCADE, + related_name="absence_credits", + ) + comment = models.CharField(max_length=255, blank=True) + absence_type = models.ForeignKey(AbsenceType, on_delete=models.PROTECT) + date = models.DateField() + days = models.IntegerField(default=0) + transfer = models.BooleanField(default=False) + """ + Mark whether this absence credit is a transfer from last year. + """ + + +class OvertimeCredit(models.Model): + """Overtime credit model. + + An overtime credit is a transferred overtime from the last year. This is + added to the worktime of a user. + """ + + user = models.ForeignKey( + settings.AUTH_USER_MODEL, + on_delete=models.CASCADE, + related_name="overtime_credits", + ) + comment = models.CharField(max_length=255, blank=True) + date = models.DateField() + duration = models.DurationField(default=timedelta(0)) + transfer = models.BooleanField(default=False) + """ + Mark whether this absence credit is a transfer from last year. + """ + + +class EmploymentManager(models.Manager): + """Custom manager for employments.""" + + def get_at(self, user, date): + """Get employment of user at given date. + + :param User user: The user of the searched employments + :param datetime.date date: date of employment + :returns: Employment + """ + return self.get( + (models.Q(end_date__gte=date) | models.Q(end_date__isnull=True)), + start_date__lte=date, + user=user, + ) + + def for_user(self, user, start, end): + """Get employments in given time frame for current user. + + This includes overlapping employments. + + :param User user: The user of the searched employments + :param datetime.date start: start of time frame + :param datetime.date end: end of time frame + :returns: queryset of employments + """ + # end date NULL on database is like employment is ending today + queryset = self.annotate( + end=functions.Coalesce("end_date", models.Value(date.today())) + ) + return queryset.filter(user=user).exclude( + models.Q(end__lt=start) | models.Q(start_date__gt=end) + ) + + +class Employment(models.Model): + """Employment model. + + An employment represents a contract which defines where an employee works + and from when to when. + """ + + user = models.ForeignKey( + settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name="employments" + ) + location = models.ForeignKey( + Location, on_delete=models.PROTECT, related_name="employments" + ) + percentage = models.IntegerField( + validators=[MinValueValidator(0), MaxValueValidator(100)] + ) + worktime_per_day = models.DurationField() + start_date = models.DateField() + end_date = models.DateField(blank=True, null=True) + objects = EmploymentManager() + + added = models.DateTimeField(auto_now_add=True) + updated = models.DateTimeField(auto_now=True) + is_external = models.BooleanField(default=False) + + def __str__(self): + """Represent the model as a string. + + :return: The string representation + :rtype: str + """ + return "{0} ({1} - {2})".format( + self.user.username, + self.start_date.strftime("%d.%m.%Y"), + self.end_date.strftime("%d.%m.%Y") if self.end_date else "today", + ) + + def calculate_worktime(self, start, end): + """Calculate reported, expected and balance for employment. + + 1. It shortens the time frame so it is within given employment + 1. Determine the count of workdays within time frame + 2. Determine the count of public holidays within time frame + 3. The expected worktime consists of following elements: + * Workdays + * Subtracted by holidays + * Multiplied with the worktime per day of the employment + 4. Determine the overtime credit duration within time frame + 5. The reported worktime is the sum of the durations of all reports + for this user within time frame + 6. The absences are all absences for this user within time frame + 7. The balance is the reported time plus the absences plus the + overtime credit minus the expected worktime + + :param start: calculate worktime starting on given day. + :param end: calculate worktime till given day + :returns: tuple of 3 values reported, expected and delta in given + time frame + """ + from timed.tracking.models import Absence, Report + + # shorten time frame to employment + start = max(start, self.start_date) + end = min(self.end_date or date.today(), end) + + # workdays is in isoweekday, byweekday expects Monday to be zero + week_workdays = [int(day) - 1 for day in self.location.workdays] + workdays = rrule.rrule( + rrule.DAILY, dtstart=start, until=end, byweekday=week_workdays + ).count() + + # converting workdays as db expects 1 (Sunday) to 7 (Saturday) + workdays_db = [ + # special case for Sunday + int(day) == 7 and 1 or int(day) + 1 + for day in self.location.workdays + ] + holidays = PublicHoliday.objects.filter( + location=self.location, + date__gte=start, + date__lte=end, + date__week_day__in=workdays_db, + ).count() + + expected_worktime = self.worktime_per_day * (workdays - holidays) + + overtime_credit_data = OvertimeCredit.objects.filter( + user=self.user_id, date__gte=start, date__lte=end + ).aggregate(total_duration=Sum("duration")) + overtime_credit = overtime_credit_data["total_duration"] or timedelta() + + reported_worktime_data = Report.objects.filter( + user=self.user_id, date__gte=start, date__lte=end + ).aggregate(duration_total=Sum("duration")) + reported_worktime = reported_worktime_data["duration_total"] or timedelta() + + absences = sum( + [ + absence.calculate_duration(self) + for absence in Absence.objects.filter( + user=self.user_id, date__gte=start, date__lte=end + ).select_related("absence_type") + ], + timedelta(), + ) + + reported = reported_worktime + absences + overtime_credit + + return (reported, expected_worktime, reported - expected_worktime) + + class Meta: + """Meta information for the employment model.""" + + indexes = [models.Index(fields=["start_date", "end_date"])] + + +class UserManager(UserManager): + def all_supervisors(self): + objects = self.model.objects.annotate( + supervisees_count=models.Count("supervisees") + ) + return objects.filter(supervisees_count__gt=0) + + def all_reviewers(self): + return self.all().filter( + models.Q( + pk__in=TaskAssignee.objects.filter(is_reviewer=True).values("user") + ) + | models.Q( + pk__in=ProjectAssignee.objects.filter(is_reviewer=True).values("user") + ) + | models.Q( + pk__in=CustomerAssignee.objects.filter(is_reviewer=True).values("user") + ) + ) + + def all_supervisees(self): + objects = self.model.objects.annotate( + supervisors_count=models.Count("supervisors") + ) + return objects.filter(supervisors_count__gt=0) + + +class User(AbstractUser): + """Timed specific user.""" + + supervisors = models.ManyToManyField( + "self", symmetrical=False, related_name="supervisees" + ) + + tour_done = models.BooleanField(default=False) + """ + Indicate whether user has finished tour through Timed in frontend. + """ + + last_name = models.CharField(_("last name"), max_length=30, blank=False) + """ + Overwrite last name to make it required as interface relies on it. + May also be name of organization if need to. + """ + + is_accountant = models.BooleanField(default=False) + + objects = UserManager() + + @property + def is_reviewer(self): + return ( + TaskAssignee.objects.filter(user=self, is_reviewer=True).exists() + or ProjectAssignee.objects.filter(user=self, is_reviewer=True).exists() + or CustomerAssignee.objects.filter(user=self, is_reviewer=True).exists() + ) + + @property + def user_id(self): + """Map to id to be able to use generic permissions.""" + return self.id + + def calculate_worktime(self, start, end): + """Calculate reported, expected and balance for user. + + This calculates summarizes worktime for all employments of users which + are in given time frame. + + :param start: calculate worktime starting on given day. + :param end: calculate worktime till given day + :returns: tuple of 3 values reported, expected and delta in given + time frame + """ + employments = Employment.objects.for_user(self, start, end).select_related( + "location" + ) + + balances = [ + employment.calculate_worktime(start, end) for employment in employments + ] + + reported = sum([balance[0] for balance in balances], timedelta()) + expected = sum([balance[1] for balance in balances], timedelta()) + balance = sum([balance[2] for balance in balances], timedelta()) + + return (reported, expected, balance) + + def get_active_employment(self): + """Get current employment of the user. + + Get current active employment of the user. + If the user doesn't have a return None. + """ + try: + current_employment = Employment.objects.get_at(user=self, date=date.today()) + return current_employment + except Employment.DoesNotExist: + return None diff --git a/backend/timed/employment/permissions.py b/backend/timed/employment/permissions.py new file mode 100644 index 000000000..218b5f4d9 --- /dev/null +++ b/backend/timed/employment/permissions.py @@ -0,0 +1,6 @@ +from rest_framework.permissions import BasePermission + + +class NoReports(BasePermission): + def has_object_permission(self, request, view, obj): + return not obj.reports.exists() diff --git a/backend/timed/employment/relations.py b/backend/timed/employment/relations.py new file mode 100644 index 000000000..2bc6d8d91 --- /dev/null +++ b/backend/timed/employment/relations.py @@ -0,0 +1,15 @@ +from django.contrib.auth import get_user_model +from rest_framework_json_api.relations import ResourceRelatedField +from rest_framework_json_api.serializers import CurrentUserDefault + + +class CurrentUserResourceRelatedField(ResourceRelatedField): + """User resource related field restricting user to current user.""" + + def __init__(self, *args, **kwargs): + kwargs["default"] = CurrentUserDefault() + super().__init__(*args, **kwargs) + + def get_queryset(self): + request = self.context["request"] + return get_user_model().objects.filter(pk=request.user.pk) diff --git a/backend/timed/employment/serializers.py b/backend/timed/employment/serializers.py new file mode 100644 index 000000000..0a4490a01 --- /dev/null +++ b/backend/timed/employment/serializers.py @@ -0,0 +1,334 @@ +"""Serializers for the employment app.""" + +from datetime import date, timedelta + +from django.contrib.auth import get_user_model +from django.db.models import Max, Value +from django.db.models.functions import Coalesce +from django.utils.duration import duration_string +from django.utils.translation import gettext_lazy as _ +from rest_framework_json_api import relations +from rest_framework_json_api.serializers import ( + ModelSerializer, + Serializer, + SerializerMethodField, + ValidationError, +) + +from timed.employment import models +from timed.tracking.models import Absence, Report + + +class UserSerializer(ModelSerializer): + included_serializers = { + "supervisors": "timed.employment.serializers.UserSerializer", + "supervisees": "timed.employment.serializers.UserSerializer", + } + + class Meta: + """Meta information for the user serializer.""" + + model = get_user_model() + fields = [ + "email", + "first_name", + "is_active", + "is_staff", + "is_superuser", + "last_name", + "supervisees", + "supervisors", + "tour_done", + "username", + "is_reviewer", + "is_accountant", + ] + read_only_fields = [ + "first_name", + "is_active", + "is_staff", + "is_superuser", + "last_name", + "supervisees", + "supervisors", + "username", + "is_reviewer", + "is_accountant", + ] + + +class WorktimeBalanceSerializer(Serializer): + date = SerializerMethodField() + balance = SerializerMethodField() + user = relations.ResourceRelatedField( + model=get_user_model(), read_only=True, source="id" + ) + + def get_date(self, instance): + user = instance.id + today = date.today() + + if instance.date is not None: + return instance.date + + # calculate last reported day if no specific date is set + max_absence_date = Absence.objects.filter(user=user, date__lt=today).aggregate( + date=Max("date") + ) + max_report_date = Report.objects.filter(user=user, date__lt=today).aggregate( + date=Max("date") + ) + + last_reported_date = max( + max_absence_date["date"] or date.min, max_report_date["date"] or date.min + ) + + instance.date = last_reported_date + return instance.date + + def get_balance(self, instance): + balance_date = self.get_date(instance) + start = date(balance_date.year, 1, 1) + + # id is mapped to user instance + _, _, balance = instance.id.calculate_worktime(start, balance_date) + return duration_string(balance) + + included_serializers = {"user": "timed.employment.serializers.UserSerializer"} + + class Meta: + resource_name = "worktime-balances" + + +class AbsenceBalanceSerializer(Serializer): + credit = SerializerMethodField() + used_days = SerializerMethodField() + used_duration = SerializerMethodField() + balance = SerializerMethodField() + + user = relations.ResourceRelatedField(model=get_user_model(), read_only=True) + + absence_type = relations.ResourceRelatedField( + model=models.AbsenceType, read_only=True, source="id" + ) + + absence_credits = relations.SerializerMethodResourceRelatedField( + method_name="get_absence_credits", + model=models.AbsenceCredit, + many=True, + read_only=True, + ) + + def _get_start(self, instance): + return date(instance.date.year, 1, 1) + + def get_credit(self, instance): + """ + Calculate how many days are approved for given absence type. + + For absence types which fill worktime this will be None. + """ + if "credit" in instance: + return instance["credit"] + + # id is mapped to absence type + absence_type = instance.id + + start = self._get_start(instance) + + # avoid multiple calculations as get_balance needs it as well + instance["credit"] = absence_type.calculate_credit( + instance.user, start, instance.date + ) + return instance["credit"] + + def get_used_days(self, instance): + """ + Calculate how many days are used of given absence type. + + For absence types which fill worktime this will be None. + """ + if "used_days" in instance: + return instance["used_days"] + + # id is mapped to absence type + absence_type = instance.id + + start = self._get_start(instance) + + # avoid multiple calculations as get_balance needs it as well + instance["used_days"] = absence_type.calculate_used_days( + instance.user, start, instance.date + ) + return instance["used_days"] + + def get_used_duration(self, instance): + """ + Calculate duration of absence type. + + For absence types which fill worktime this will be None. + """ + # id is mapped to absence type + absence_type = instance.id + if not absence_type.fill_worktime: + return None + + start = self._get_start(instance) + absences = sum( + [ + absence.calculate_duration( + models.Employment.objects.get_at(instance.user, absence.date) + ) + for absence in Absence.objects.filter( + user=instance.user, + date__range=[start, instance.date], + absence_type_id=instance.id, + ).select_related("absence_type") + ], + timedelta(), + ) + return duration_string(absences) + + def get_absence_credits(self, instance): + """Get the absence credits for the user and type.""" + if "absence_credits" in instance: + return instance["absence_credits"] + + # id is mapped to absence type + absence_type = instance.id + + start = self._get_start(instance) + absence_credits = models.AbsenceCredit.objects.filter( + absence_type=absence_type, + user=instance.user, + date__range=[start, instance.date], + ).select_related("user") + + # avoid multiple calculations when absence credits need to be included + instance["absence_credits"] = absence_credits + + return absence_credits + + def get_balance(self, instance): + # id is mapped to absence type + absence_type = instance.id + if absence_type.fill_worktime: + return None + + return self.get_credit(instance) - self.get_used_days(instance) + + included_serializers = { + "absence_type": "timed.employment.serializers.AbsenceTypeSerializer", + "absence_credits": "timed.employment.serializers.AbsenceCreditSerializer", + } + + class Meta: + resource_name = "absence-balances" + + +class EmploymentSerializer(ModelSerializer): + included_serializers = { + "user": "timed.employment.serializers.UserSerializer", + "location": "timed.employment.serializers.LocationSerializer", + } + + def validate(self, data): + """Validate the employment as a whole. + + Ensure the end date is after the start date and there is only one + active employment per user and there are no overlapping employments. + + :throws: django.core.exceptions.ValidationError + :return: validated data + :rtype: dict + """ + instance = self.instance + start_date = data.get("start_date", instance and instance.start_date) + end_date = data.get("end_date", instance and instance.end_date) + if end_date and start_date >= end_date: + raise ValidationError(_("The end date must be after the start date")) + + user = data.get("user", instance and instance.user) + employments = models.Employment.objects.filter(user=user) + # end date not set means employment is ending today + end_date = end_date or date.today() + employments = employments.annotate( + end=Coalesce("end_date", Value(date.today())) + ) + if instance: + employments = employments.exclude(id=instance.id) + + if any([e.start_date <= end_date and start_date <= e.end for e in employments]): + raise ValidationError( + _("A user can't have multiple employments at the same time") + ) + + return data + + class Meta: + model = models.Employment + fields = [ + "user", + "location", + "percentage", + "worktime_per_day", + "start_date", + "end_date", + "is_external", + ] + + +class LocationSerializer(ModelSerializer): + """Location serializer.""" + + class Meta: + """Meta information for the location serializer.""" + + model = models.Location + fields = ["name", "workdays"] + + +class PublicHolidaySerializer(ModelSerializer): + """Public holiday serializer.""" + + location = relations.ResourceRelatedField(read_only=True) + + included_serializers = { + "location": "timed.employment.serializers.LocationSerializer" + } + + class Meta: + """Meta information for the public holiday serializer.""" + + model = models.PublicHoliday + fields = ["name", "date", "location"] + + +class AbsenceTypeSerializer(ModelSerializer): + """Absence type serializer.""" + + class Meta: + """Meta information for the absence type serializer.""" + + model = models.AbsenceType + fields = ["name", "fill_worktime"] + + +class AbsenceCreditSerializer(ModelSerializer): + """Absence credit serializer.""" + + included_serializers = { + "absence_type": "timed.employment.serializers.AbsenceTypeSerializer" + } + + class Meta: + """Meta information for the absence credit serializer.""" + + model = models.AbsenceCredit + fields = ["user", "absence_type", "date", "days", "comment", "transfer"] + + +class OvertimeCreditSerializer(ModelSerializer): + class Meta: + model = models.OvertimeCredit + fields = ["user", "date", "duration", "comment", "transfer"] diff --git a/backend/timed/employment/tests/__init__.py b/backend/timed/employment/tests/__init__.py new file mode 100644 index 000000000..6e031999e --- /dev/null +++ b/backend/timed/employment/tests/__init__.py @@ -0,0 +1 @@ +# noqa: D104 diff --git a/backend/timed/employment/tests/test_absence_balance.py b/backend/timed/employment/tests/test_absence_balance.py new file mode 100644 index 000000000..df7760435 --- /dev/null +++ b/backend/timed/employment/tests/test_absence_balance.py @@ -0,0 +1,188 @@ +from datetime import date, timedelta + +from django.urls import reverse +from rest_framework import status + +from timed.employment.factories import ( + AbsenceCreditFactory, + AbsenceTypeFactory, + EmploymentFactory, + UserFactory, +) +from timed.tracking.factories import AbsenceFactory, ReportFactory + + +def test_absence_balance_full_day(auth_client, django_assert_num_queries): + day = date(2017, 2, 28) + + user = auth_client.user + EmploymentFactory.create(user=user, start_date=day) + absence_type = AbsenceTypeFactory.create() + + AbsenceCreditFactory.create(date=day, user=user, days=5, absence_type=absence_type) + + # credit on different user, may not show up + AbsenceCreditFactory.create(date=date.today(), absence_type=absence_type) + + AbsenceFactory.create(date=day, user=user, absence_type=absence_type) + + AbsenceFactory.create( + date=day - timedelta(days=1), user=user, absence_type=absence_type + ) + + url = reverse("absence-balance-list") + + with django_assert_num_queries(6): + result = auth_client.get( + url, + data={ + "date": "2017-03-01", + "user": user.id, + "include": "absence_credits,absence_type", + }, + ) + + assert result.status_code == status.HTTP_200_OK + json = result.json() + assert len(json["data"]) == 1 + entry = json["data"][0] + + assert entry["id"] == "{0}_{1}_2017-03-01".format(user.id, absence_type.id) + assert entry["attributes"]["credit"] == 5 + assert entry["attributes"]["used-days"] == 2 + assert entry["attributes"]["used-duration"] is None + assert entry["attributes"]["balance"] == 3 + + assert len(json["included"]) == 2 + + +def test_absence_balance_fill_worktime(auth_client, django_assert_num_queries): + day = date(2017, 2, 28) + + user = UserFactory.create() + user.supervisors.add(auth_client.user) + EmploymentFactory.create( + user=user, start_date=day, worktime_per_day=timedelta(hours=5) + ) + absence_type = AbsenceTypeFactory.create(fill_worktime=True) + + ReportFactory.create( + user=user, date=day + timedelta(days=1), duration=timedelta(hours=4) + ) + + AbsenceFactory.create( + date=day + timedelta(days=1), user=user, absence_type=absence_type + ) + + AbsenceFactory.create(date=day, user=user, absence_type=absence_type) + + url = reverse("absence-balance-list") + with django_assert_num_queries(11): + result = auth_client.get( + url, + data={ + "date": "2017-03-01", + "user": user.id, + "include": "absence_credits,absence_type", + }, + ) + assert result.status_code == status.HTTP_200_OK + + json = result.json() + assert len(json["data"]) == 1 + entry = json["data"][0] + + assert entry["id"] == "{0}_{1}_2017-03-01".format(user.id, absence_type.id) + + assert entry["attributes"]["credit"] is None + assert entry["attributes"]["balance"] is None + assert entry["attributes"]["used-days"] is None + assert entry["attributes"]["used-duration"] == "06:00:00" + + +def test_absence_balance_detail(auth_client): + user = auth_client.user + absence_type = AbsenceTypeFactory.create() + url = reverse( + "absence-balance-detail", + args=["{0}_{1}_2017-03-01".format(user.id, absence_type.id)], + ) + + result = auth_client.get(url) + assert result.status_code == status.HTTP_200_OK + + json = result.json() + entry = json["data"] + + assert entry["attributes"]["credit"] == 0 + assert entry["attributes"]["balance"] == 0 + assert entry["attributes"]["used-days"] == 0 + assert entry["attributes"]["used-duration"] is None + + +def test_absence_balance_list_none_supervisee(auth_client): + url = reverse("absence-balance-list") + AbsenceTypeFactory.create() + unrelated_user = UserFactory.create() + + result = auth_client.get( + url, data={"user": unrelated_user.id, "date": "2017-01-03"} + ) + assert result.status_code == status.HTTP_200_OK + assert len(result.json()["data"]) == 0 + + +def test_absence_balance_detail_none_supervisee(auth_client): + url = reverse("absence-balance-list") + absence_type = AbsenceTypeFactory.create() + unrelated_user = UserFactory.create() + + url = reverse( + "absence-balance-detail", + args=["{0}_{1}_2017-03-01".format(unrelated_user.id, absence_type.id)], + ) + + result = auth_client.get(url) + assert result.status_code == status.HTTP_404_NOT_FOUND + + +def test_absence_balance_invalid_date_in_pk(auth_client): + url = reverse("absence-balance-detail", args=["1_2_invalid"]) + + result = auth_client.get(url) + assert result.status_code == status.HTTP_404_NOT_FOUND + + +def test_absence_balance_invalid_user_in_pk(auth_client): + url = reverse("absence-balance-detail", args=["999999_2_2017-03-01"]) + + result = auth_client.get(url) + assert result.status_code == status.HTTP_404_NOT_FOUND + + +def test_absence_balance_no_date(auth_client): + url = reverse("absence-balance-list") + + result = auth_client.get(url, data={"user": auth_client.user.id}) + assert result.status_code == status.HTTP_400_BAD_REQUEST + + +def test_absence_balance_invalid_date(auth_client): + url = reverse("absence-balance-list") + + result = auth_client.get(url, data={"user": auth_client.user.id, "date": "invalid"}) + assert result.status_code == status.HTTP_400_BAD_REQUEST + + +def test_absence_balance_no_user(auth_client): + url = reverse("absence-balance-list") + + result = auth_client.get(url, data={"date": "2017-03-01"}) + assert result.status_code == status.HTTP_400_BAD_REQUEST + + +def test_absence_balance_invalid_user(auth_client): + url = reverse("absence-balance-list") + + result = auth_client.get(url, data={"date": "2017-03-01", "user": "invalid"}) + assert result.status_code == status.HTTP_400_BAD_REQUEST diff --git a/backend/timed/employment/tests/test_absence_credit.py b/backend/timed/employment/tests/test_absence_credit.py new file mode 100644 index 000000000..b494f7c2e --- /dev/null +++ b/backend/timed/employment/tests/test_absence_credit.py @@ -0,0 +1,76 @@ +from django.urls import reverse +from rest_framework import status + +from timed.employment.factories import ( + AbsenceCreditFactory, + AbsenceTypeFactory, + UserFactory, +) + + +def test_absence_credit_create_authenticated(auth_client): + url = reverse("absence-credit-list") + + result = auth_client.post(url) + assert result.status_code == status.HTTP_403_FORBIDDEN + + +def test_absence_credit_create_superuser(superadmin_client): + absence_type = AbsenceTypeFactory.create() + + url = reverse("absence-credit-list") + + data = { + "data": { + "type": "absence-credits", + "id": None, + "attributes": {"date": "2017-01-01", "duration": "01:00:00"}, + "relationships": { + "user": {"data": {"type": "users", "id": superadmin_client.user.id}}, + "absence_type": { + "data": {"type": "absence-types", "id": absence_type.id} + }, + }, + } + } + + result = superadmin_client.post(url, data) + assert result.status_code == status.HTTP_201_CREATED + + +def test_absence_credit_get_authenticated(auth_client): + AbsenceCreditFactory.create_batch(2) + absence_credit = AbsenceCreditFactory.create(user=auth_client.user) + url = reverse("absence-credit-list") + + result = auth_client.get(url) + assert result.status_code == status.HTTP_200_OK + json = result.json() + assert len(json["data"]) == 1 + assert json["data"][0]["id"] == str(absence_credit.id) + + +def test_absence_credit_get_superuser(superadmin_client): + AbsenceCreditFactory.create_batch(2) + AbsenceCreditFactory.create(user=superadmin_client.user) + url = reverse("absence-credit-list") + + result = superadmin_client.get(url) + assert result.status_code == status.HTTP_200_OK + json = result.json() + assert len(json["data"]) == 3 + + +def test_absence_credit_get_supervisor(auth_client): + user = UserFactory.create() + auth_client.user.supervisees.add(user) + + AbsenceCreditFactory.create_batch(1) + AbsenceCreditFactory.create(user=auth_client.user) + AbsenceCreditFactory.create(user=user) + url = reverse("absence-credit-list") + + result = auth_client.get(url) + assert result.status_code == status.HTTP_200_OK + json = result.json() + assert len(json["data"]) == 2 diff --git a/backend/timed/employment/tests/test_absence_type.py b/backend/timed/employment/tests/test_absence_type.py new file mode 100644 index 000000000..ad6811227 --- /dev/null +++ b/backend/timed/employment/tests/test_absence_type.py @@ -0,0 +1,94 @@ +import pytest +from django.urls import reverse +from rest_framework import status + +from timed.conftest import setup_customer_and_employment_status +from timed.employment.factories import AbsenceTypeFactory, EmploymentFactory + + +@pytest.mark.parametrize( + "is_employed, is_customer_assignee, is_customer, expected", + [ + (False, True, True, 0), + (False, True, False, 0), + (True, False, False, 2), + (True, True, False, 2), + (True, True, True, 2), + ], +) +def test_absence_type_list( + auth_client, is_employed, is_customer_assignee, is_customer, expected +): + setup_customer_and_employment_status( + user=auth_client.user, + is_assignee=is_customer_assignee, + is_customer=is_customer, + is_employed=is_employed, + is_external=False, + ) + AbsenceTypeFactory.create_batch(2) + url = reverse("absence-type-list") + + response = auth_client.get(url) + assert response.status_code == status.HTTP_200_OK + + json = response.json() + assert len(json["data"]) == expected + + +def test_absence_type_list_filter_fill_worktime(internal_employee_client): + absence_type = AbsenceTypeFactory.create(fill_worktime=True) + AbsenceTypeFactory.create() + + url = reverse("absence-type-list") + + response = internal_employee_client.get(url, data={"fill_worktime": 1}) + assert response.status_code == status.HTTP_200_OK + + json = response.json() + assert len(json["data"]) == 1 + assert json["data"][0]["id"] == str(absence_type.id) + + +@pytest.mark.parametrize( + "is_employed, expected", + [ + (True, status.HTTP_200_OK), + (False, status.HTTP_404_NOT_FOUND), + ], +) +def test_absence_type_detail(auth_client, is_employed, expected): + absence_type = AbsenceTypeFactory.create() + if is_employed: + EmploymentFactory.create(user=auth_client.user) + + url = reverse("absence-type-detail", args=[absence_type.id]) + + response = auth_client.get(url) + + assert response.status_code == expected + + +def test_absence_type_create(auth_client): + url = reverse("absence-type-list") + + response = auth_client.post(url) + assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED + + +def test_absence_type_update(auth_client): + absence_type = AbsenceTypeFactory.create() + + url = reverse("absence-type-detail", args=[absence_type.id]) + + response = auth_client.patch(url) + assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED + + +def test_absence_type_delete(auth_client): + absence_type = AbsenceTypeFactory.create() + + url = reverse("absence-type-detail", args=[absence_type.id]) + + response = auth_client.delete(url) + assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED diff --git a/backend/timed/employment/tests/test_employment.py b/backend/timed/employment/tests/test_employment.py new file mode 100644 index 000000000..97ea8f73d --- /dev/null +++ b/backend/timed/employment/tests/test_employment.py @@ -0,0 +1,296 @@ +"""Tests for the employments endpoint.""" + +from datetime import date, timedelta + +import pytest +from django.urls import reverse +from rest_framework import status + +from timed.employment import factories +from timed.employment.admin import EmploymentForm +from timed.employment.factories import EmploymentFactory, LocationFactory, UserFactory +from timed.employment.models import Employment +from timed.tracking.factories import ReportFactory + + +def test_employment_create_authenticated(auth_client): + url = reverse("employment-list") + + result = auth_client.post(url) + assert result.status_code == status.HTTP_403_FORBIDDEN + + +def test_employment_create_superuser(superadmin_client): + url = reverse("employment-list") + location = LocationFactory.create() + + data = { + "data": { + "type": "employments", + "id": None, + "attributes": { + "percentage": "100", + "worktime_per_day": "08:00:00", + "start-date": "2017-04-01", + }, + "relationships": { + "user": {"data": {"type": "users", "id": superadmin_client.user.id}}, + "location": {"data": {"type": "locations", "id": location.id}}, + }, + } + } + + result = superadmin_client.post(url, data) + assert result.status_code == status.HTTP_201_CREATED + + +def test_employment_update_end_before_start(superadmin_client): + employment = EmploymentFactory.create(user=superadmin_client.user) + + data = { + "data": { + "type": "employments", + "id": employment.id, + "attributes": {"start_date": "2017-03-01", "end_date": "2017-01-01"}, + } + } + + url = reverse("employment-detail", args=[employment.id]) + result = superadmin_client.patch(url, data) + assert result.status_code == status.HTTP_400_BAD_REQUEST + + +def test_employment_update_overlapping(superadmin_client): + user = superadmin_client.user + EmploymentFactory.create(user=user, end_date=None) + employment = EmploymentFactory.create(user=user) + + data = { + "data": { + "type": "employments", + "id": employment.id, + "attributes": {"end_date": None}, + } + } + + url = reverse("employment-detail", args=[employment.id]) + result = superadmin_client.patch(url, data) + assert result.status_code == status.HTTP_400_BAD_REQUEST + + +def test_employment_list_authenticated(auth_client): + EmploymentFactory.create_batch(2) + employment = EmploymentFactory.create(user=auth_client.user) + + url = reverse("employment-list") + + result = auth_client.get(url) + assert result.status_code == status.HTTP_200_OK + json = result.json() + assert len(json["data"]) == 1 + assert json["data"][0]["id"] == str(employment.id) + + +def test_employment_list_superuser(superadmin_client): + EmploymentFactory.create_batch(2) + EmploymentFactory.create(user=superadmin_client.user) + + url = reverse("employment-list") + + result = superadmin_client.get(url) + assert result.status_code == status.HTTP_200_OK + json = result.json() + assert len(json["data"]) == 3 + + +def test_employment_list_filter_date(auth_client): + EmploymentFactory.create( + user=auth_client.user, start_date=date(2017, 1, 1), end_date=date(2017, 4, 1) + ) + employment = EmploymentFactory.create( + user=auth_client.user, start_date=date(2017, 4, 2), end_date=None + ) + + url = reverse("employment-list") + + result = auth_client.get(url, data={"date": "2017-04-05"}) + assert result.status_code == status.HTTP_200_OK + json = result.json() + assert len(json["data"]) == 1 + assert json["data"][0]["id"] == str(employment.id) + + +def test_employment_list_supervisor(auth_client): + user = UserFactory.create() + auth_client.user.supervisees.add(user) + + EmploymentFactory.create_batch(1) + EmploymentFactory.create(user=auth_client.user) + EmploymentFactory.create(user=user) + + url = reverse("employment-list") + + result = auth_client.get(url) + assert result.status_code == status.HTTP_200_OK + json = result.json() + assert len(json["data"]) == 2 + + +def test_employment_unique_active(db): + """Should only be able to have one active employment per user.""" + user = UserFactory.create() + EmploymentFactory.create(user=user, end_date=None) + employment = EmploymentFactory.create(user=user) + form = EmploymentForm({"end_date": None}, instance=employment) + + with pytest.raises(ValueError): + form.save() + + +def test_employment_start_before_end(db): + employment = EmploymentFactory.create() + form = EmploymentForm( + {"start_date": date(2009, 1, 1), "end_date": date(2016, 1, 1)}, + instance=employment, + ) + + with pytest.raises(ValueError): + form.save() + + +def test_employment_get_at(db): + """Should return the right employment on a date.""" + user = UserFactory.create() + employment = EmploymentFactory.create(user=user) + + assert Employment.objects.get_at(user, employment.start_date) == employment + + employment.end_date = employment.start_date + timedelta(days=20) + + employment.save() + + with pytest.raises(Employment.DoesNotExist): + Employment.objects.get_at(user, employment.start_date + timedelta(days=21)) + + +def test_worktime_balance_partial(db): + """ + Test partial calculation of worktime balance. + + Partial is defined as a worktime balance of a time frame + which is shorter than employment. + """ + employment = factories.EmploymentFactory.create( + start_date=date(2010, 1, 1), end_date=None, worktime_per_day=timedelta(hours=8) + ) + user = employment.user + + # Calculate over one week + start = date(2017, 3, 19) + end = date(2017, 3, 26) + + # Overtime credit of 10.5 hours + factories.OvertimeCreditFactory.create( + user=user, date=start, duration=timedelta(hours=10, minutes=30) + ) + + # One public holiday during workdays + factories.PublicHolidayFactory.create(date=start, location=employment.location) + # One public holiday on weekend + factories.PublicHolidayFactory.create( + date=start + timedelta(days=1), location=employment.location + ) + # 5 workdays minus one holiday (32 hours) + expected_expected = timedelta(hours=32) + + # reported 2 days each 10 hours + for day in range(3, 5): + ReportFactory.create( + user=user, date=start + timedelta(days=day), duration=timedelta(hours=10) + ) + # 10 hours reported time + 10.5 overtime credit + expected_reported = timedelta(hours=30, minutes=30) + expected_balance = expected_reported - expected_expected + + reported, expected, balance = employment.calculate_worktime(start, end) + + assert expected == expected_expected + assert reported == expected_reported + assert balance == expected_balance + + +def test_worktime_balance_longer(db): + """Test calculation of worktime when frame is longer than employment.""" + employment = factories.EmploymentFactory.create( + start_date=date(2017, 3, 21), + end_date=date(2017, 3, 27), + worktime_per_day=timedelta(hours=8), + ) + user = employment.user + + # Calculate over one year + start = date(2017, 1, 1) + end = date(2017, 12, 31) + + # Overtime credit of 10.5 hours before employment + factories.OvertimeCreditFactory.create( + user=user, date=start, duration=timedelta(hours=10, minutes=30) + ) + # Overtime credit of during employment + factories.OvertimeCreditFactory.create( + user=user, date=employment.start_date, duration=timedelta(hours=10, minutes=30) + ) + + # One public holiday during employment + factories.PublicHolidayFactory.create( + date=employment.start_date, location=employment.location + ) + # One public holiday before employment started + factories.PublicHolidayFactory.create( + date=date(2017, 3, 20), location=employment.location + ) + # 5 workdays minus one holiday (32 hours) + expected_expected = timedelta(hours=32) + + # reported 2 days each 10 hours + for day in range(3, 5): + ReportFactory.create( + user=user, + date=employment.start_date + timedelta(days=day), + duration=timedelta(hours=10), + ) + # reported time not on current employment + ReportFactory.create(user=user, date=date(2017, 1, 5), duration=timedelta(hours=10)) + # 10 hours reported time + 10.5 overtime credit + expected_reported = timedelta(hours=30, minutes=30) + expected_balance = expected_reported - expected_expected + + reported, expected, balance = employment.calculate_worktime(start, end) + + assert expected == expected_expected + assert reported == expected_reported + assert balance == expected_balance + + +def test_employment_for_user(db): + user = factories.UserFactory.create() + # employment overlapping time frame (early start) + factories.EmploymentFactory.create( + start_date=date(2017, 1, 1), end_date=date(2017, 2, 28), user=user + ) + # employment overlapping time frame (early end) + factories.EmploymentFactory.create( + start_date=date(2017, 3, 1), end_date=date(2017, 3, 31), user=user + ) + # employment within time frame + factories.EmploymentFactory.create( + start_date=date(2017, 4, 1), end_date=date(2017, 4, 30), user=user + ) + # employment without end date + factories.EmploymentFactory.create( + start_date=date(2017, 5, 1), end_date=None, user=user + ) + + employments = Employment.objects.for_user(user, date(2017, 2, 1), date(2017, 12, 1)) + + assert employments.count() == 4 diff --git a/backend/timed/employment/tests/test_location.py b/backend/timed/employment/tests/test_location.py new file mode 100644 index 000000000..5685afda6 --- /dev/null +++ b/backend/timed/employment/tests/test_location.py @@ -0,0 +1,80 @@ +import pytest +from django.urls import reverse +from rest_framework import status + +from timed.conftest import setup_customer_and_employment_status +from timed.employment.factories import EmploymentFactory, LocationFactory + + +@pytest.mark.parametrize( + "is_employed, is_customer_assignee, is_customer, expected", + [ + (False, True, True, 0), + (False, True, False, 0), + (True, True, True, 2), + (True, True, False, 2), + (True, False, False, 2), + ], +) +def test_location_list( + auth_client, is_employed, is_customer_assignee, is_customer, expected, location +): + setup_customer_and_employment_status( + user=auth_client.user, + is_assignee=is_customer_assignee, + is_customer=is_customer, + is_employed=is_employed, + is_external=False, + ) + url = reverse("location-list") + + response = auth_client.get(url) + assert response.status_code == status.HTTP_200_OK + + data = response.json()["data"] + assert len(data) == expected + if expected: + assert data[0]["attributes"]["workdays"] == ([str(day) for day in range(1, 6)]) + + +@pytest.mark.parametrize( + "is_employed, expected", + [ + (True, status.HTTP_200_OK), + (False, status.HTTP_404_NOT_FOUND), + ], +) +def test_location_detail(auth_client, is_employed, expected): + location = LocationFactory.create() + if is_employed: + EmploymentFactory.create(user=auth_client.user) + + url = reverse("location-detail", args=[location.id]) + + response = auth_client.get(url) + assert response.status_code == expected + + +def test_location_create(auth_client): + url = reverse("location-list") + + response = auth_client.post(url) + assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED + + +def test_location_update(auth_client): + location = LocationFactory.create() + + url = reverse("location-detail", args=[location.id]) + + response = auth_client.patch(url) + assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED + + +def test_location_delete(auth_client): + location = LocationFactory.create() + + url = reverse("location-detail", args=[location.id]) + + response = auth_client.delete(url) + assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED diff --git a/backend/timed/employment/tests/test_overtime_credit.py b/backend/timed/employment/tests/test_overtime_credit.py new file mode 100644 index 000000000..f5ec531b3 --- /dev/null +++ b/backend/timed/employment/tests/test_overtime_credit.py @@ -0,0 +1,69 @@ +"""Tests for the overtime credits endpoint.""" + +from django.urls import reverse +from rest_framework import status + +from timed.employment.factories import OvertimeCreditFactory, UserFactory + + +def test_overtime_credit_create_authenticated(auth_client): + url = reverse("overtime-credit-list") + + result = auth_client.post(url) + assert result.status_code == status.HTTP_403_FORBIDDEN + + +def test_overtime_credit_create_superuser(superadmin_client): + url = reverse("overtime-credit-list") + + data = { + "data": { + "type": "overtime-credits", + "id": None, + "attributes": {"date": "2017-01-01", "duration": "01:00:00"}, + "relationships": { + "user": {"data": {"type": "users", "id": superadmin_client.user.id}} + }, + } + } + + result = superadmin_client.post(url, data) + assert result.status_code == status.HTTP_201_CREATED + + +def test_overtime_credit_get_authenticated(auth_client): + OvertimeCreditFactory.create_batch(2) + overtime_credit = OvertimeCreditFactory.create(user=auth_client.user) + url = reverse("overtime-credit-list") + + result = auth_client.get(url) + assert result.status_code == status.HTTP_200_OK + json = result.json() + assert len(json["data"]) == 1 + assert json["data"][0]["id"] == str(overtime_credit.id) + + +def test_overtime_credit_get_superuser(superadmin_client): + OvertimeCreditFactory.create_batch(2) + OvertimeCreditFactory.create(user=superadmin_client.user) + url = reverse("overtime-credit-list") + + result = superadmin_client.get(url) + assert result.status_code == status.HTTP_200_OK + json = result.json() + assert len(json["data"]) == 3 + + +def test_overtime_credit_get_supervisor(auth_client): + user = UserFactory.create() + auth_client.user.supervisees.add(user) + + OvertimeCreditFactory.create_batch(1) + OvertimeCreditFactory.create(user=auth_client.user) + OvertimeCreditFactory.create(user=user) + url = reverse("overtime-credit-list") + + result = auth_client.get(url) + assert result.status_code == status.HTTP_200_OK + json = result.json() + assert len(json["data"]) == 2 diff --git a/backend/timed/employment/tests/test_public_holiday.py b/backend/timed/employment/tests/test_public_holiday.py new file mode 100644 index 000000000..033252876 --- /dev/null +++ b/backend/timed/employment/tests/test_public_holiday.py @@ -0,0 +1,95 @@ +from datetime import date + +import pytest +from django.urls import reverse +from rest_framework import status + +from timed.conftest import setup_customer_and_employment_status +from timed.employment.factories import EmploymentFactory, PublicHolidayFactory + + +@pytest.mark.parametrize( + "is_employed, is_customer_assignee, is_customer, expected", + [ + (False, True, True, 0), + (False, True, False, 0), + (True, False, False, 1), + (True, True, False, 1), + (True, True, True, 1), + ], +) +def test_public_holiday_list( + auth_client, is_employed, is_customer_assignee, is_customer, expected +): + setup_customer_and_employment_status( + user=auth_client.user, + is_assignee=is_customer_assignee, + is_customer=is_customer, + is_employed=is_employed, + is_external=False, + ) + PublicHolidayFactory.create() + url = reverse("public-holiday-list") + + response = auth_client.get(url) + assert response.status_code == status.HTTP_200_OK + + json = response.json() + assert len(json["data"]) == expected + + +@pytest.mark.parametrize( + "is_employed, expected", + [ + (True, status.HTTP_200_OK), + (False, status.HTTP_404_NOT_FOUND), + ], +) +def test_public_holiday_detail(auth_client, is_employed, expected): + public_holiday = PublicHolidayFactory.create() + if is_employed: + EmploymentFactory.create(user=auth_client.user) + + url = reverse("public-holiday-detail", args=[public_holiday.id]) + + response = auth_client.get(url) + assert response.status_code == expected + + +def test_public_holiday_create(auth_client): + url = reverse("public-holiday-list") + + response = auth_client.post(url) + assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED + + +def test_public_holiday_update(auth_client): + public_holiday = PublicHolidayFactory.create() + + url = reverse("public-holiday-detail", args=[public_holiday.id]) + + response = auth_client.patch(url) + assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED + + +def test_public_holiday_delete(auth_client): + public_holiday = PublicHolidayFactory.create() + + url = reverse("public-holiday-detail", args=[public_holiday.id]) + + response = auth_client.delete(url) + assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED + + +def test_public_holiday_year_filter(internal_employee_client): + PublicHolidayFactory.create(date=date(2017, 1, 1)) + public_holiday = PublicHolidayFactory.create(date=date(2018, 1, 1)) + + url = reverse("public-holiday-list") + + response = internal_employee_client.get(url, data={"year": 2018}) + assert response.status_code == status.HTTP_200_OK + + json = response.json() + assert len(json["data"]) == 1 + assert json["data"][0]["id"] == str(public_holiday.id) diff --git a/backend/timed/employment/tests/test_user.py b/backend/timed/employment/tests/test_user.py new file mode 100644 index 000000000..136e62a21 --- /dev/null +++ b/backend/timed/employment/tests/test_user.py @@ -0,0 +1,306 @@ +from datetime import date, timedelta + +import pytest +from django.urls import reverse +from rest_framework import status + +from timed.employment.factories import ( + AbsenceTypeFactory, + EmploymentFactory, + UserFactory, +) +from timed.projects.factories import ( + CustomerAssigneeFactory, + ProjectAssigneeFactory, + ProjectFactory, +) +from timed.tracking.factories import AbsenceFactory, ReportFactory + + +def test_user_list_unauthenticated(client): + url = reverse("user-list") + response = client.get(url) + assert response.status_code == status.HTTP_401_UNAUTHORIZED + + +def test_user_update_unauthenticated(client, db): + user = UserFactory.create() + url = reverse("user-detail", args=[user.id]) + response = client.patch(url) + assert response.status_code == status.HTTP_401_UNAUTHORIZED + + +def test_user_list(db, internal_employee_client, django_assert_num_queries): + UserFactory.create_batch(2) + + url = reverse("user-list") + + with django_assert_num_queries(14): + response = internal_employee_client.get(url) + + assert response.status_code == status.HTTP_200_OK + + json = response.json() + assert len(json["data"]) == 3 + + +def test_user_list_external_employee(external_employee_client): + UserFactory.create_batch(2) + + url = reverse("user-list") + + response = external_employee_client.get(url) + + assert response.status_code == status.HTTP_200_OK + + json = response.json() + assert len(json["data"]) == 1 + + +def test_user_detail(internal_employee_client): + user = internal_employee_client.user + + url = reverse("user-detail", args=[user.id]) + + response = internal_employee_client.get(url) + assert response.status_code == status.HTTP_200_OK + + +def test_user_create_authenticated(internal_employee_client): + url = reverse("user-list") + + response = internal_employee_client.post(url) + assert response.status_code == status.HTTP_403_FORBIDDEN + + +def test_user_create_superuser(superadmin_client): + url = reverse("user-list") + + data = { + "data": { + "type": "users", + "id": None, + "attributes": { + "is_staff": True, + "tour_done": True, + "email": "test@example.net", + "first_name": "First name", + "last_name": "Last name", + }, + } + } + + response = superadmin_client.post(url, data) + assert response.status_code == status.HTTP_201_CREATED + + +def test_user_update_owner(internal_employee_client): + user = internal_employee_client.user + data = { + "data": { + "type": "users", + "id": user.id, + "attributes": {"is_staff": True, "tour_done": True}, + } + } + + url = reverse("user-detail", args=[user.id]) + + response = internal_employee_client.patch(url, data) + assert response.status_code == status.HTTP_200_OK + + user.refresh_from_db() + assert user.tour_done + assert not user.is_staff + + +def test_user_update_other(internal_employee_client): + """User may not change other user.""" + user = UserFactory.create() + url = reverse("user-detail", args=[user.id]) + res = internal_employee_client.patch(url) + + assert res.status_code == status.HTTP_403_FORBIDDEN + + +def test_user_delete_authenticated(internal_employee_client): + """Should not be able delete a user.""" + user = internal_employee_client.user + + url = reverse("user-detail", args=[user.id]) + + response = internal_employee_client.delete(url) + assert response.status_code == status.HTTP_403_FORBIDDEN + + +def test_user_delete_superuser(superadmin_client): + """Should not be able delete a user.""" + user = UserFactory.create() + EmploymentFactory.create(user=superadmin_client.user) + + url = reverse("user-detail", args=[user.id]) + + response = superadmin_client.delete(url) + assert response.status_code == status.HTTP_204_NO_CONTENT + + +def test_user_delete_with_reports_superuser(superadmin_client, db): + """Test that user with reports may not be deleted.""" + user = UserFactory.create() + ReportFactory.create(user=user) + EmploymentFactory.create(user=superadmin_client.user) + + url = reverse("user-detail", args=[user.id]) + + response = superadmin_client.delete(url) + assert response.status_code == status.HTTP_403_FORBIDDEN + + +def test_user_supervisor_filter(internal_employee_client): + """Should filter users by supervisor.""" + supervisees = UserFactory.create_batch(5) + + UserFactory.create_batch(5) + + internal_employee_client.user.supervisees.add(*supervisees) + internal_employee_client.user.save() + + res = internal_employee_client.get( + reverse("user-list"), {"supervisor": internal_employee_client.user.id} + ) + + assert len(res.json()["data"]) == 5 + + +@pytest.mark.freeze_time("2018-01-07") +def test_user_transfer(superadmin_client): + user = UserFactory.create() + EmploymentFactory.create(user=superadmin_client.user) + EmploymentFactory.create(user=user, start_date=date(2017, 12, 28), percentage=100) + AbsenceTypeFactory.create(fill_worktime=True) + AbsenceTypeFactory.create(fill_worktime=False) + absence_type = AbsenceTypeFactory.create(fill_worktime=False) + AbsenceFactory.create(user=user, absence_type=absence_type, date=date(2017, 12, 29)) + + url = reverse("user-transfer", args=[user.id]) + response = superadmin_client.post(url) + assert response.status_code == status.HTTP_204_NO_CONTENT + + # running transfer twice should lead to same result + response = superadmin_client.post(url) + assert response.status_code == status.HTTP_204_NO_CONTENT + + assert user.overtime_credits.count() == 1 + overtime_credit = user.overtime_credits.first() + assert overtime_credit.transfer + assert overtime_credit.date == date(2018, 1, 1) + assert overtime_credit.duration == timedelta(hours=-8, minutes=-30) + assert overtime_credit.comment == "Transfer 2017" + + assert user.absence_credits.count() == 1 + absence_credit = user.absence_credits.first() + assert absence_credit.transfer + assert absence_credit.date == date(2018, 1, 1) + assert absence_credit.days == -1 + assert absence_credit.comment == "Transfer 2017" + + +@pytest.mark.parametrize("value,expected", [(1, 2), (0, 2)]) +def test_user_is_external_filter(internal_employee_client, value, expected): + """Should filter users if they have an internal employment.""" + user = UserFactory.create() + user2, user3 = UserFactory.create_batch(2) + EmploymentFactory.create(is_external=False, user=user) + EmploymentFactory.create(is_external=True, user=user2) + EmploymentFactory.create(is_external=True, user=user3) + + response = internal_employee_client.get( + reverse("user-list"), {"is_external": value} + ) + assert len(response.json()["data"]) == expected + + +@pytest.mark.parametrize("value,expected", [(1, 1), (0, 4)]) +def test_user_is_reviewer_filter(internal_employee_client, value, expected): + """Should filter users if they are a reviewer.""" + user = UserFactory.create() + project = ProjectFactory.create() + UserFactory.create_batch(3) + ProjectAssigneeFactory.create(user=user, project=project, is_reviewer=True) + + res = internal_employee_client.get(reverse("user-list"), {"is_reviewer": value}) + assert len(res.json()["data"]) == expected + + +@pytest.mark.parametrize("value,expected", [(1, 1), (0, 5)]) +def test_user_is_supervisor_filter(internal_employee_client, value, expected): + """Should filter useres if they are a supervisor.""" + users = UserFactory.create_batch(2) + UserFactory.create_batch(3) + + internal_employee_client.user.supervisees.add(*users) + + res = internal_employee_client.get(reverse("user-list"), {"is_supervisor": value}) + assert len(res.json()["data"]) == expected + + +def test_user_attributes(internal_employee_client, project): + """Should filter users if they are a reviewer.""" + user = UserFactory.create() + + url = reverse("user-detail", args=[user.id]) + + res = internal_employee_client.get(url) + assert not res.json()["data"]["attributes"]["is-reviewer"] + + ProjectAssigneeFactory.create(user=user, project=project, is_reviewer=True) + res = internal_employee_client.get(url) + assert res.json()["data"]["attributes"]["is-reviewer"] + + +def test_user_me_auth(internal_employee_client): + """Should return the internal_employee_client user.""" + user = internal_employee_client.user + + url = reverse("user-me") + + response = internal_employee_client.get(url) + assert response.status_code == status.HTTP_200_OK + + me_data = response.json()["data"] + assert me_data["id"] == str(user.id) + + # should be the same as user-detail + url = reverse("user-detail", args=[user.id]) + + response = internal_employee_client.get(url) + assert me_data == response.json()["data"] + + +def test_user_me_anonymous(client): + """Non-authenticated client doesn't do anything.""" + url = reverse("user-me") + + response = client.get(url) + assert response.status_code == status.HTTP_401_UNAUTHORIZED + + +@pytest.mark.parametrize( + "is_customer, expected, status_code", + [(True, 1, status.HTTP_200_OK), (False, 0, status.HTTP_403_FORBIDDEN)], +) +def test_user_list_no_employment(auth_client, is_customer, expected, status_code): + user = auth_client.user + UserFactory.create_batch(2) + if is_customer: + CustomerAssigneeFactory.create(user=user, is_customer=True) + + url = reverse("user-list") + + response = auth_client.get(url) + assert response.status_code == status_code + + if expected: + json = response.json() + assert len(json["data"]) == 1 + assert json["data"][0]["id"] == str(user.id) diff --git a/backend/timed/employment/tests/test_worktime_balance.py b/backend/timed/employment/tests/test_worktime_balance.py new file mode 100644 index 000000000..4db21dde3 --- /dev/null +++ b/backend/timed/employment/tests/test_worktime_balance.py @@ -0,0 +1,225 @@ +from datetime import date, timedelta + +import pytest +from django.urls import reverse +from django.utils.duration import duration_string +from rest_framework import status + +from timed.employment.factories import ( + EmploymentFactory, + OvertimeCreditFactory, + PublicHolidayFactory, + UserFactory, +) +from timed.tracking.factories import AbsenceFactory, ReportFactory + + +def test_worktime_balance_create(auth_client): + url = reverse("worktime-balance-list") + + result = auth_client.post(url) + assert result.status_code == status.HTTP_405_METHOD_NOT_ALLOWED + + +def test_worktime_balance_no_employment(auth_client, django_assert_num_queries): + url = reverse("worktime-balance-list") + + with django_assert_num_queries(3): + result = auth_client.get( + url, data={"user": auth_client.user.id, "date": "2017-01-01"} + ) + + assert result.status_code == status.HTTP_200_OK + + json = result.json() + assert len(json["data"]) == 1 + data = json["data"][0] + assert data["id"] == "{0}_2017-01-01".format(auth_client.user.id) + assert data["attributes"]["balance"] == "00:00:00" + + +def test_worktime_balance_with_employments(auth_client, django_assert_num_queries): + # Calculate over one week + start_date = date(2017, 3, 19) + end_date = date(2017, 3, 26) + + employment = EmploymentFactory.create( + user=auth_client.user, + start_date=start_date, + worktime_per_day=timedelta(hours=8, minutes=30), + end_date=date(2017, 3, 23), + ) + EmploymentFactory.create( + user=auth_client.user, + start_date=date(2017, 3, 24), + worktime_per_day=timedelta(hours=8), + end_date=None, + ) + + # Overtime credit of 10 hours + OvertimeCreditFactory.create( + user=auth_client.user, date=start_date, duration=timedelta(hours=10, minutes=30) + ) + + # One public holiday during workdays + PublicHolidayFactory.create(date=start_date, location=employment.location) + # One public holiday on weekend + PublicHolidayFactory.create( + date=start_date + timedelta(days=1), location=employment.location + ) + + # 2x 10 hour reported worktime + ReportFactory.create( + user=auth_client.user, + date=start_date + timedelta(days=3), + duration=timedelta(hours=10), + ) + + ReportFactory.create( + user=auth_client.user, + date=start_date + timedelta(days=4), + duration=timedelta(hours=10), + ) + + # one absence + AbsenceFactory.create(user=auth_client.user, date=start_date + timedelta(days=5)) + + url = reverse( + "worktime-balance-detail", + args=["{0}_{1}".format(auth_client.user.id, end_date.strftime("%Y-%m-%d"))], + ) + + with django_assert_num_queries(11): + result = auth_client.get(url) + assert result.status_code == status.HTTP_200_OK + + # 4 workdays 8.5 hours, 1 workday 8 hours, minus one holiday 8.5 + # minutes 10.5 hours overtime credit + expected_worktime = timedelta(hours=23) + + # 2 x 10 reports hours + 1 absence of 8 hours + expected_reported = timedelta(hours=28) + + json = result.json() + assert json["data"]["attributes"]["balance"] == ( + duration_string(expected_reported - expected_worktime) + ) + + +def test_worktime_balance_invalid_pk(auth_client): + url = reverse("worktime-balance-detail", args=["invalid"]) + + result = auth_client.get(url) + assert result.status_code == status.HTTP_404_NOT_FOUND + + +def test_worktime_balance_no_date(auth_client): + url = reverse("worktime-balance-list") + + result = auth_client.get(url) + assert result.status_code == status.HTTP_400_BAD_REQUEST + + +def test_worktime_balance_invalid_date(auth_client): + url = reverse("worktime-balance-list") + + result = auth_client.get(url, data={"date": "invalid"}) + assert result.status_code == status.HTTP_400_BAD_REQUEST + + +def test_user_worktime_list_superuser(auth_client): + auth_client.user.is_superuser = True + auth_client.user.save() + supervisee = UserFactory.create() + UserFactory.create() + auth_client.user.supervisees.add(supervisee) + + url = reverse("worktime-balance-list") + + result = auth_client.get(url, data={"date": "2017-01-01"}) + + assert result.status_code == status.HTTP_200_OK + + json = result.json() + assert len(json["data"]) == 3 + + +def test_worktime_balance_list_supervisor(auth_client): + supervisee = UserFactory.create() + UserFactory.create() + auth_client.user.supervisees.add(supervisee) + + url = reverse("worktime-balance-list") + + result = auth_client.get(url, data={"date": "2017-01-01"}) + + assert result.status_code == status.HTTP_200_OK + + json = result.json() + assert len(json["data"]) == 2 + + +def test_worktime_balance_list_filter_user(auth_client): + supervisee = UserFactory.create() + UserFactory.create() + auth_client.user.supervisees.add(supervisee) + + url = reverse("worktime-balance-list") + + result = auth_client.get(url, data={"date": "2017-01-01", "user": supervisee.id}) + + assert result.status_code == status.HTTP_200_OK + + json = result.json() + assert len(json["data"]) == 1 + + +def test_worktime_balance_list_last_reported_date_no_reports( + auth_client, django_assert_num_queries +): + url = reverse("worktime-balance-list") + + with django_assert_num_queries(1): + result = auth_client.get(url, data={"last_reported_date": 1}) + + assert result.status_code == status.HTTP_200_OK + + json = result.json() + assert len(json["data"]) == 0 + + +@pytest.mark.freeze_time("2017-02-02") +def test_worktime_balance_list_last_reported_date( + auth_client, django_assert_num_queries +): + EmploymentFactory.create( + user=auth_client.user, + start_date=date(2017, 2, 1), + end_date=date(2017, 2, 2), + worktime_per_day=timedelta(hours=8), + ) + + ReportFactory.create( + user=auth_client.user, date=date(2017, 2, 1), duration=timedelta(hours=10) + ) + + # reports today and in the future should be ignored + ReportFactory.create( + user=auth_client.user, date=date(2017, 2, 2), duration=timedelta(hours=10) + ) + ReportFactory.create( + user=auth_client.user, date=date(2017, 2, 3), duration=timedelta(hours=10) + ) + + url = reverse("worktime-balance-list") + + with django_assert_num_queries(9): + result = auth_client.get(url, data={"last_reported_date": 1}) + + assert result.status_code == status.HTTP_200_OK + + json = result.json() + assert len(json["data"]) == 1 + entry = json["data"][0] + assert entry["attributes"]["date"] == "2017-02-01" + assert entry["attributes"]["balance"] == "02:00:00" diff --git a/backend/timed/employment/urls.py b/backend/timed/employment/urls.py new file mode 100644 index 000000000..5c8a59f52 --- /dev/null +++ b/backend/timed/employment/urls.py @@ -0,0 +1,20 @@ +"""URL to view mapping for the employment app.""" + +from django.conf import settings +from rest_framework.routers import SimpleRouter + +from timed.employment import views + +r = SimpleRouter(trailing_slash=settings.APPEND_SLASH) + +r.register(r"users", views.UserViewSet, "user") +r.register(r"employments", views.EmploymentViewSet, "employment") +r.register(r"locations", views.LocationViewSet, "location") +r.register(r"public-holidays", views.PublicHolidayViewSet, "public-holiday") +r.register(r"absence-types", views.AbsenceTypeViewSet, "absence-type") +r.register(r"overtime-credits", views.OvertimeCreditViewSet, "overtime-credit") +r.register(r"absence-credits", views.AbsenceCreditViewSet, "absence-credit") +r.register(r"worktime-balances", views.WorktimeBalanceViewSet, "worktime-balance") +r.register(r"absence-balances", views.AbsenceBalanceViewSet, "absence-balance") + +urlpatterns = r.urls diff --git a/backend/timed/employment/views.py b/backend/timed/employment/views.py new file mode 100644 index 000000000..e48ceacac --- /dev/null +++ b/backend/timed/employment/views.py @@ -0,0 +1,461 @@ +"""Viewsets for the employment app.""" +import datetime + +from django.contrib.auth import get_user_model +from django.db.models import CharField, DateField, IntegerField, Q, Value +from django.db.models.functions import Concat +from django.shortcuts import get_object_or_404 +from django.utils.translation import gettext_lazy as _ +from rest_framework import exceptions, status +from rest_framework.decorators import action +from rest_framework.response import Response +from rest_framework.viewsets import ModelViewSet, ReadOnlyModelViewSet + +from timed.employment import filters, models, serializers +from timed.employment.permissions import NoReports +from timed.mixins import AggregateQuerysetMixin +from timed.permissions import ( + IsAuthenticated, + IsCreateOnly, + IsDeleteOnly, + IsOwner, + IsReadOnly, + IsSuperUser, + IsSupervisor, + IsUpdateOnly, +) +from timed.projects.models import CustomerAssignee, Task +from timed.tracking.models import Absence, Report + + +class UserViewSet(ModelViewSet): + """ + Expose user actions. + + Users are managed in admin therefore this end point + only allows retrieving and updating. + """ + + permission_classes = [ + # only owner, superuser and supervisor may update user + (IsOwner | IsSuperUser | IsSupervisor) & IsUpdateOnly + # only superuser may delete users without reports + | IsSuperUser & IsDeleteOnly & NoReports + # only superuser may create users + | IsSuperUser & IsCreateOnly + # all authenticated users may read + | IsAuthenticated & IsReadOnly + ] + + serializer_class = serializers.UserSerializer + filterset_class = filters.UserFilterSet + search_fields = ("username", "first_name", "last_name") + + def get_queryset(self): + user = self.request.user + queryset = get_user_model().objects.prefetch_related( + "employments", "supervisees", "supervisors" + ) + + try: + current_employment = models.Employment.objects.get_at( + user=user, date=datetime.date.today() + ) + if current_employment.is_external: + assigned_tasks = Task.objects.filter( + Q(task_assignees__user=user, task_assignees__is_reviewer=True) + | Q( + project__project_assignees__user=user, + project__project_assignees__is_reviewer=True, + ) + | Q( + project__customer__customer_assignees__user=user, + project__customer__customer_assignees__is_reviewer=True, + ) + ) + visible_reports = Report.objects.all().filter( + Q(task__in=assigned_tasks) | Q(user=user) + ) + + return queryset.filter(Q(reports__in=visible_reports) | Q(id=user.id)) + return queryset + except models.Employment.DoesNotExist: + if CustomerAssignee.objects.filter(user=user, is_customer=True).exists(): + assigned_tasks = Task.objects.filter( + Q( + project__customer__customer_assignees__user=user, + project__customer__customer_assignees__is_customer=True, + ) + ) + visible_reports = Report.objects.all().filter( + Q(task__in=assigned_tasks) | Q(user=user) + ) + return queryset.filter(Q(reports__in=visible_reports) | Q(id=user.id)) + raise exceptions.PermissionDenied("User has no employment") + + @action(methods=["get"], detail=False) + def me(self, request, pk=None): + User = get_user_model() + self.object = get_object_or_404(User, pk=request.user.id) + serializer = self.get_serializer(self.object) + + return Response(serializer.data) + + @action(methods=["post"], detail=True) + def transfer(self, request, pk=None): + """ + Transfer worktime and absence balance to new year. + + It will skip any credits if a credit already exists on the first + of the new year. + """ + user = self.get_object() + + year = datetime.date.today().year + start_year = datetime.date(year, 1, 1) + start = datetime.date(year - 1, 1, 1) + end = datetime.date(year - 1, 12, 31) + + # transfer absence types + transfered_absence_credits = user.absence_credits.filter( + date=start_year, transfer=True + ) + types = models.AbsenceType.objects.filter(fill_worktime=False).exclude( + id__in=transfered_absence_credits.values("absence_type") + ) + for absence_type in types: + credit = absence_type.calculate_credit(user, start, end) + used_days = absence_type.calculate_used_days(user, start, end) + balance = credit - used_days + if balance != 0: + models.AbsenceCredit.objects.create( + absence_type=absence_type, + user=user, + comment=_("Transfer %(year)s") % {"year": year - 1}, + date=start_year, + days=balance, + transfer=True, + ) + + # transfer overtime + overtime_credit = user.overtime_credits.filter(date=start_year, transfer=True) + if not overtime_credit.exists(): + reported, expected, delta = user.calculate_worktime(start, end) + models.OvertimeCredit.objects.create( + user=user, + comment=_("Transfer %(year)s") % {"year": year - 1}, + date=start_year, + duration=delta, + transfer=True, + ) + + return Response(status=status.HTTP_204_NO_CONTENT) + + +class WorktimeBalanceViewSet(AggregateQuerysetMixin, ReadOnlyModelViewSet): + """Calculate worktime for different user on different dates.""" + + serializer_class = serializers.WorktimeBalanceSerializer + filterset_class = filters.WorktimeBalanceFilterSet + + def _extract_date(self): + """ + Extract date from request. + + In detail route extract it from pk and it list + from query params. + """ + pk = self.request.parser_context["kwargs"].get("pk") + + # detail case + if pk is not None: + try: + return datetime.datetime.strptime(pk.split("_")[1], "%Y-%m-%d") + + except (ValueError, TypeError, IndexError): + raise exceptions.NotFound() + + # list case + query_params = self.request.query_params + try: + return datetime.datetime.strptime( + query_params.get("date"), "%Y-%m-%d" + ).date() + except ValueError: + raise exceptions.ParseError(_("Date is invalid")) + except TypeError: + if query_params.get("last_reported_date", "0") == "0": + raise exceptions.ParseError(_("Date filter needs to be set")) + + return None + + def get_queryset(self): + date = self._extract_date() + user = self.request.user + queryset = get_user_model().objects.values("id") + queryset = queryset.annotate(date=Value(date, DateField())) + # last_reported_date filter is set, a date can only be calucated + # for users with either at least one absence or report + if date is None: + users_with_reports = Report.objects.values("user").distinct() + users_with_absences = Absence.objects.values("user").distinct() + active_users = users_with_reports.union(users_with_absences) + queryset = queryset.filter(id__in=active_users) + + queryset = queryset.annotate( + pk=Concat("id", Value("_"), "date", output_field=CharField()) + ) + + if not user.is_superuser: + queryset = queryset.filter(Q(id=user.id) | Q(supervisors=user)) + + return queryset + + +class AbsenceBalanceViewSet(AggregateQuerysetMixin, ReadOnlyModelViewSet): + """Calculate absence balance for different user on different dates.""" + + serializer_class = serializers.AbsenceBalanceSerializer + filterset_class = filters.AbsenceBalanceFilterSet + + def _extract_date(self): + """ + Extract date from request. + + In detail route extract it from pk and it list + from query params. + """ + pk = self.request.parser_context["kwargs"].get("pk") + + # detail case + if pk is not None: + try: + return datetime.datetime.strptime(pk.split("_")[2], "%Y-%m-%d") + + except (ValueError, TypeError, IndexError): + raise exceptions.NotFound() + + # list case + try: + return datetime.datetime.strptime( + self.request.query_params.get("date"), "%Y-%m-%d" + ).date() + except ValueError: + raise exceptions.ParseError(_("Date is invalid")) + except TypeError: + raise exceptions.ParseError(_("Date filter needs to be set")) + + def _extract_user(self): + """ + Extract user from request. + + In detail route extract it from pk and it list + from query params. + """ + pk = self.request.parser_context["kwargs"].get("pk") + + # detail case + if pk is not None: + try: + user_id = int(pk.split("_")[0]) + # avoid query if user is self + if self.request.user.id == user_id: + return self.request.user + return get_user_model().objects.get(pk=pk.split("_")[0]) + except (ValueError, get_user_model().DoesNotExist): + raise exceptions.NotFound() + + # list case + try: + user_id = self.request.query_params.get("user") + if user_id is None: + raise exceptions.ParseError(_("User filter needs to be set")) + + # avoid query if user is self + if self.request.user.id == int(user_id): + return self.request.user + + return get_user_model().objects.get(pk=user_id) + except (ValueError, get_user_model().DoesNotExist): + raise exceptions.ParseError(_("User is invalid")) + + def get_queryset(self): + date = self._extract_date() + user = self._extract_user() + + queryset = models.AbsenceType.objects.values("id") + queryset = queryset.annotate(date=Value(date, DateField())) + queryset = queryset.annotate(user=Value(user.id, IntegerField())) + queryset = queryset.annotate( + pk=Concat( + "user", + Value("_"), + "id", + Value("_"), + "date", + output_field=CharField(), + ) + ) + + # only myself, superuser and supervisors may see by absence balances + current_user = self.request.user + + if not current_user.is_superuser: + if current_user.id != user.id: + if not current_user.supervisees.filter(id=user.id).exists(): + return models.AbsenceType.objects.none() + + return queryset + + +class EmploymentViewSet(ModelViewSet): + serializer_class = serializers.EmploymentSerializer + ordering = ("-end_date",) + filterset_class = filters.EmploymentFilterSet + permission_classes = [ + # super user can add/read overtime credits + IsSuperUser + # user may only read filtered results + | IsAuthenticated & IsReadOnly + ] + + def get_queryset(self): + """ + Get queryset of employments. + + Following rules apply: + 1. super user may see all + 2. user may see credits of all its supervisors and self + 3. user may only see its own credit + """ + user = self.request.user + + queryset = models.Employment.objects.select_related("user", "location") + + if not user.is_superuser: + queryset = queryset.filter(Q(user=user) | Q(user__supervisors=user)) + + return queryset + + +class LocationViewSet(ReadOnlyModelViewSet): + """Location viewset set.""" + + queryset = models.Location.objects.all() + serializer_class = serializers.LocationSerializer + ordering = ("name",) + + def get_queryset(self): + """Don't show locations to customers.""" + user = self.request.user + + queryset = models.Location.objects.all() + + if user.get_active_employment(): + return queryset + return queryset.none() + + +class PublicHolidayViewSet(ReadOnlyModelViewSet): + """Public holiday view set.""" + + serializer_class = serializers.PublicHolidaySerializer + filterset_class = filters.PublicHolidayFilterSet + ordering = ("date",) + + def get_queryset(self): + """Prefetch the related data. + + Don't show public holidays to customers. + + :return: The public holidays + :rtype: QuerySet + """ + user = self.request.user + + queryset = models.PublicHoliday.objects.select_related("location") + + if user.get_active_employment(): + return queryset + return queryset.none() + + +class AbsenceTypeViewSet(ReadOnlyModelViewSet): + """Absence type view set.""" + + queryset = models.AbsenceType.objects.all() + serializer_class = serializers.AbsenceTypeSerializer + filterset_class = filters.AbsenceTypeFilterSet + ordering = ("name",) + + def get_queryset(self): + """Don't show absence types to customers.""" + user = self.request.user + + queryset = models.AbsenceType.objects.all() + + if user.get_active_employment(): + return queryset + return queryset.none() + + +class AbsenceCreditViewSet(ModelViewSet): + """Absence type view set.""" + + filterset_class = filters.AbsenceCreditFilterSet + serializer_class = serializers.AbsenceCreditSerializer + permission_classes = [ + # super user can add/read absence credits + IsSuperUser + # user may only read filtered results + | IsAuthenticated & IsReadOnly + ] + + def get_queryset(self): + """ + Get queryset of absence credits. + + Following rules apply: + 1. super user may see all + 2. user may see credits of all its supervisors and self + 3. user may only see its own credit + """ + user = self.request.user + + queryset = models.AbsenceCredit.objects.select_related("user") + + if not user.is_superuser: + queryset = queryset.filter(Q(user=user) | Q(user__supervisors=user)) + + return queryset + + +class OvertimeCreditViewSet(ModelViewSet): + """Absence type view set.""" + + filterset_class = filters.OvertimeCreditFilterSet + serializer_class = serializers.OvertimeCreditSerializer + permission_classes = [ + # super user can add/read overtime credits + IsSuperUser + # user may only read filtered results + | IsAuthenticated & IsReadOnly + ] + + def get_queryset(self): + """ + Get queryset of overtime credits. + + Following rules apply: + 1. super user may see all + 2. user may see credits of all its supervisors and self + 3. user may only see its own credit + """ + user = self.request.user + + queryset = models.OvertimeCredit.objects.select_related("user") + + if not user.is_superuser: + queryset = queryset.filter(Q(user=user) | Q(user__supervisors=user)) + + return queryset diff --git a/backend/timed/fixtures/test_data.json b/backend/timed/fixtures/test_data.json new file mode 100644 index 000000000..2aa474fda --- /dev/null +++ b/backend/timed/fixtures/test_data.json @@ -0,0 +1,387 @@ +[ + { + "model": "employment.location", + "pk": 1, + "fields": { "name": "Location 1", "workdays": "1,2,3,4,5" } + }, + { + "model": "employment.absencetype", + "pk": 1, + "fields": { "name": "EO", "fill_worktime": false } + }, + { + "model": "employment.absencetype", + "pk": 2, + "fields": { "name": "Ferien", "fill_worktime": false } + }, + { + "model": "employment.absencetype", + "pk": 3, + "fields": { "name": "Krankheit", "fill_worktime": true } + }, + { + "model": "projects.customer", + "pk": 1, + "fields": { + "name": "Customer", + "reference": null, + "email": "customer@customer.customer", + "website": "", + "comment": "", + "archived": false + } + }, + { + "model": "projects.customer", + "pk": 2, + "fields": { + "name": "Bob", + "reference": null, + "email": "", + "website": "", + "comment": "", + "archived": false + } + }, + { + "model": "projects.costcenter", + "pk": 1, + "fields": { "name": "Cash", "reference": null } + }, + { + "model": "projects.billingtype", + "pk": 1, + "fields": { "name": "Cash", "reference": null } + }, + { + "model": "projects.task", + "pk": 1, + "fields": { + "name": "Work", + "reference": null, + "estimated_time": "2 02:00:00", + "archived": false, + "project": 1, + "cost_center": 1 + } + }, + { + "model": "projects.task", + "pk": 2, + "fields": { + "name": "Special Work", + "reference": null, + "estimated_time": "4 04:00:00", + "archived": false, + "project": 1, + "cost_center": null + } + }, + { + "model": "projects.task", + "pk": 3, + "fields": { + "name": "No Work", + "reference": null, + "estimated_time": null, + "archived": false, + "project": 1, + "cost_center": null + } + }, + { + "model": "projects.task", + "pk": 4, + "fields": { + "name": "Some Work", + "reference": null, + "estimated_time": "05:00:00", + "archived": false, + "project": 2, + "cost_center": 1 + } + }, + { + "model": "projects.task", + "pk": 5, + "fields": { + "name": "Build", + "reference": null, + "estimated_time": null, + "archived": false, + "project": 3, + "cost_center": null + } + }, + { + "model": "employment.user", + "pk": 1, + "fields": { + "password": "pbkdf2_sha256$150000$lgPMtNRsmi6E$l+HWaKslNGUOpRniI2AqeMxJpn8nDBCIfK4cMO/WcRo=", + "last_login": "2020-03-12T09:24:33.471Z", + "is_superuser": true, + "username": "admin", + "first_name": "Admin", + "email": "admin@example.com", + "is_staff": true, + "is_active": true, + "date_joined": "2020-03-12T09:24:27.469Z", + "tour_done": false, + "last_name": "Strator", + "groups": [], + "user_permissions": [], + "supervisors": [] + } + }, + { + "model": "employment.user", + "pk": 2, + "fields": { + "password": "pbkdf2_sha256$150000$pK2Jl6xl4iYO$NvhTs+T85I5Z9RTzTm/QNXbk20iM384gst9Nj0nWWrI=", + "last_login": null, + "is_superuser": false, + "username": "axels", + "first_name": "Axel", + "email": "axel@example.com", + "is_staff": false, + "is_active": true, + "date_joined": "2020-03-12T09:27:21Z", + "tour_done": true, + "last_name": "Schöni", + "groups": [], + "user_permissions": [], + "supervisors": [3] + } + }, + { + "model": "employment.user", + "pk": 3, + "fields": { + "password": "pbkdf2_sha256$150000$R6spIXkVyNm7$Qg2vsL0klTpgTqRwXm9bu0efHtYM8aAVYsgcXqVJsF0=", + "last_login": null, + "is_superuser": false, + "username": "fritzm", + "first_name": "Fritz", + "email": "fritz@example.com", + "is_staff": false, + "is_active": true, + "date_joined": "2020-03-12T09:28:55Z", + "tour_done": true, + "last_name": "Muster", + "groups": [], + "user_permissions": [], + "supervisors": [] + } + }, + { + "model": "employment.user", + "pk": 4, + "fields": { + "password": "", + "last_login": null, + "is_superuser": false, + "username": "jasminem", + "first_name": "Jasmine", + "email": "jasmine@example.com", + "is_accountant": true, + "is_staff": false, + "is_active": true, + "date_joined": "2020-03-12T09:28:55Z", + "tour_done": true, + "last_name": "Meier", + "groups": [], + "user_permissions": [], + "supervisors": [] + } + }, + { + "model": "employment.user", + "pk": 5, + "fields": { + "password": "pbkdf2_sha256$150000$R6spIXkVyNm7$Qg2vsL0klTpgTqRwXm9bu0efHtYM8aAVYsgcXqVJsF0=", + "last_login": null, + "is_superuser": false, + "username": "wladimirc", + "first_name": "Wladimir", + "email": "wladimir@example.com", + "is_staff": false, + "is_active": true, + "date_joined": "2020-03-12T09:28:55Z", + "tour_done": true, + "last_name": "Customer", + "groups": [], + "user_permissions": [], + "supervisors": [] + } + }, + { + "model": "projects.project", + "pk": 1, + "fields": { + "name": "Big Project", + "reference": null, + "comment": "A very big project", + "archived": false, + "estimated_time": "20 20:00:00", + "customer": 1, + "billing_type": 1, + "cost_center": 1, + "customer_visible": false + } + }, + { + "model": "projects.project", + "pk": 2, + "fields": { + "name": "Small Project", + "reference": null, + "comment": "A small project", + "archived": false, + "estimated_time": "1 06:00:00", + "customer": 1, + "billing_type": 1, + "cost_center": 1, + "customer_visible": false + } + }, + { + "model": "projects.project", + "pk": 3, + "fields": { + "name": "Building", + "reference": null, + "comment": "", + "archived": false, + "estimated_time": "4 04:00:00", + "customer": 2, + "billing_type": null, + "cost_center": null, + "customer_visible": false + } + }, + { + "model": "employment.employment", + "pk": 1, + "fields": { + "user": 2, + "location": 1, + "percentage": 100, + "worktime_per_day": "08:00:00", + "start_date": "2020-01-01", + "end_date": null, + "added": "2020-03-12T09:27:21.761Z", + "updated": "2020-03-12T09:27:21.761Z" + } + }, + { + "model": "employment.employment", + "pk": 2, + "fields": { + "user": 3, + "location": 1, + "percentage": 80, + "worktime_per_day": "06:00:00", + "start_date": "2020-01-01", + "end_date": null, + "added": "2020-03-12T09:28:55.640Z", + "updated": "2020-03-12T09:28:55.640Z" + } + }, + { + "model": "employment.employment", + "pk": 3, + "fields": { + "user": 1, + "location": 1, + "percentage": 100, + "worktime_per_day": "08:00:00", + "start_date": "2020-01-01", + "end_date": null, + "added": "2020-03-19T09:27:21.761Z", + "updated": "2020-03-19T09:27:21.761Z" + } + }, + { + "model": "employment.employment", + "pk": 4, + "fields": { + "user": 4, + "location": 1, + "percentage": 100, + "worktime_per_day": "08:00:00", + "start_date": "2020-01-01", + "end_date": null, + "added": "2020-03-19T09:27:21.761Z", + "updated": "2020-03-19T09:27:21.761Z" + } + }, + { + "model": "employment.overtimecredit", + "pk": 1, + "fields": { + "user": 2, + "comment": "Overtime", + "date": "2020-01-01", + "duration": "20:00:00", + "transfer": false + } + }, + { + "model": "employment.absencecredit", + "pk": 1, + "fields": { + "user": 2, + "comment": "Absence", + "absence_type": 1, + "date": "2020-01-01", + "days": 20, + "transfer": false + } + }, + { + "model": "employment.absencecredit", + "pk": 2, + "fields": { + "user": 2, + "comment": "Sickness", + "absence_type": 2, + "date": "2020-01-01", + "days": 5, + "transfer": false + } + }, + { + "model": "projects.customerassignee", + "pk": 1, + "fields": { + "user": 2, + "customer": 1, + "is_resource": false, + "is_reviewer": true, + "is_manager": false + } + }, + { + "model": "projects.customerassignee", + "pk": 2, + "fields": { + "user": 5, + "customer": 1, + "is_resource": false, + "is_reviewer": true, + "is_manager": false, + "is_customer": true + } + }, + { + "model": "projects.taskassignee", + "pk": 1, + "fields": { + "user": 3, + "task": 5, + "is_resource": false, + "is_reviewer": true, + "is_manager": true + } + } +] diff --git a/backend/timed/forms.py b/backend/timed/forms.py new file mode 100644 index 000000000..915fdf1ef --- /dev/null +++ b/backend/timed/forms.py @@ -0,0 +1,33 @@ +from datetime import timedelta + +from django.core.exceptions import ValidationError +from django.forms.fields import FloatField +from django.utils.translation import gettext_lazy as _ + + +class DurationInHoursField(FloatField): + """Field representing duration as float hours.""" + + def _get_hours(self, value): + return value.total_seconds() / 3600 + + def prepare_value(self, value): + if isinstance(value, timedelta): + return self._get_hours(value) + return value + + def to_python(self, value): + value = super().to_python(value) + if value is None: + return value + + return timedelta(seconds=value * 3600) + + def validate(self, value): + if value in self.empty_values: + return + + if not isinstance(value, timedelta): + raise ValidationError(_("Enter a datetime.timedelta")) + + super().validate(self._get_hours(value)) diff --git a/backend/timed/locale/en/LC_MESSAGES/django.po b/backend/timed/locale/en/LC_MESSAGES/django.po new file mode 100644 index 000000000..5bee27d66 --- /dev/null +++ b/backend/timed/locale/en/LC_MESSAGES/django.po @@ -0,0 +1,250 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# FIRST AUTHOR , YEAR. +# +msgid "" +msgstr "" +"Project-Id-Version: \n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2021-04-08 14:55+0200\n" +"PO-Revision-Date: 2017-03-02 13:59+0100\n" +"Last-Translator: \n" +"Language-Team: \n" +"Language: en\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"X-Generator: Poedit 1.8.11\n" + +#: timed/employment/admin.py:22 +msgid "supervised by" +msgstr "" + +#: timed/employment/admin.py:34 +msgid "supervising" +msgstr "" + +#: timed/employment/admin.py:47 +msgid "Supervisor" +msgstr "" + +#: timed/employment/admin.py:48 +msgid "Supervisors" +msgstr "" + +#: timed/employment/admin.py:56 +msgid "Employee" +msgstr "" + +#: timed/employment/admin.py:57 +msgid "Employees" +msgstr "" + +#: timed/employment/admin.py:63 +msgid "Worktime per day in hours" +msgstr "" + +#: timed/employment/admin.py:83 timed/employment/serializers.py:248 +msgid "The end date must be after the start date" +msgstr "The end date must be after the start date" + +#: timed/employment/admin.py:93 timed/employment/serializers.py:262 +msgid "A user can't have multiple employments at the same time" +msgstr "A user can't have multiple employments at the same time" + +#: timed/employment/admin.py:113 timed/subscription/admin.py:14 +msgid "Duration in hours" +msgstr "" + +#: timed/employment/admin.py:149 +msgid "Extra fields" +msgstr "" + +#: timed/employment/admin.py:154 +msgid "Disable selected users" +msgstr "" + +#: timed/employment/admin.py:159 +msgid "Enable selected users" +msgstr "" + +#: timed/employment/admin.py:164 +msgid "Disable staff status of selected users" +msgstr "" + +#: timed/employment/admin.py:169 +msgid "Enable staff status of selected users" +msgstr "" + +#: timed/employment/models.py:347 +msgid "last name" +msgstr "" + +#: timed/employment/views.py:96 timed/employment/views.py:108 +#, python-format +msgid "Transfer %(year)s" +msgstr "" + +#: timed/employment/views.py:147 timed/employment/views.py:206 +msgid "Date is invalid" +msgstr "" + +#: timed/employment/views.py:150 timed/employment/views.py:208 +msgid "Date filter needs to be set" +msgstr "" + +#: timed/employment/views.py:234 +msgid "User filter needs to be set" +msgstr "" + +#: timed/employment/views.py:242 +msgid "User is invalid" +msgstr "" + +#: timed/forms.py:31 +msgid "Enter a datetime.timedelta" +msgstr "" + +#: timed/models.py:17 +msgid "Monday" +msgstr "" + +#: timed/models.py:18 +msgid "Tuesday" +msgstr "" + +#: timed/models.py:19 +msgid "Wednesday" +msgstr "" + +#: timed/models.py:20 +msgid "Thursday" +msgstr "" + +#: timed/models.py:21 +msgid "Friday" +msgstr "" + +#: timed/models.py:22 +msgid "Saturday" +msgstr "" + +#: timed/models.py:23 +msgid "Sunday" +msgstr "" + +#: timed/projects/admin.py:47 timed/projects/admin.py:94 +msgid "Estimated time in hours" +msgstr "" + +#: timed/projects/admin.py:87 +msgid "Reviewer" +msgstr "" + +#: timed/projects/admin.py:88 +msgid "Reviewers" +msgstr "" + +#: timed/subscription/models.py:54 +msgid "password" +msgstr "" + +#: timed/templates/login.html:9 +msgid "Please correct the error below." +msgstr "" + +#: timed/templates/login.html:9 +msgid "Please correct the errors below." +msgstr "" + +#: timed/templates/login.html:25 +#, python-format +msgid "" +"You are authenticated as %(username)s, but are not authorized to access this " +"page. Would you like to login to a different account?" +msgstr "" + +#: timed/templates/login.html:46 +msgid "Forgotten your password or username?" +msgstr "" + +#: timed/templates/login.html:50 +msgid "Log in" +msgstr "" + +#: timed/templates/login.html:57 +msgid "Current user:" +msgstr "" + +#: timed/templates/login.html:63 +msgid "Login with SSO" +msgstr "" + +#: timed/tracking/serializers.py:47 +#, fuzzy +#| msgid "A user can only have one active employment" +msgid "A user can only have one active activity" +msgstr "A user can only have one active employment" + +#: timed/tracking/serializers.py:60 +msgid "An activity block may not end before it starts." +msgstr "" + +#: timed/tracking/serializers.py:108 +#, python-brace-format +msgid "Only owner may change {field}" +msgstr "" + +#: timed/tracking/serializers.py:140 +msgid "Only reviewer may verify reports." +msgstr "" + +#: timed/tracking/serializers.py:143 +msgid "You may only verifiy with your own user" +msgstr "" + +#: timed/tracking/serializers.py:147 +msgid "Report can't both be set as `review` and `verified`." +msgstr "" + +#: timed/tracking/serializers.py:151 +msgid "Only reviewers may bill reports." +msgstr "" + +#: timed/tracking/serializers.py:307 +msgid "Only owner may change date" +msgstr "" + +#: timed/tracking/serializers.py:317 +msgid "Only owner may change absence type" +msgstr "" + +#: timed/tracking/serializers.py:335 +msgid "You can't create an absence on an unemployed day." +msgstr "" + +#: timed/tracking/serializers.py:341 +msgid "You can't create an absence on a public holiday" +msgstr "" + +#: timed/tracking/serializers.py:345 +msgid "You can't create an absence on a weekend" +msgstr "" + +#: timed/tracking/views.py:177 +msgid "Editable filter needs to be set for bulk update" +msgstr "" + +#: timed/tracking/views.py:186 timed/tracking/views.py:206 +msgid "Reviewer filter needs to be set to verifying user" +msgstr "" + +#: timed/tracking/views.py:197 +msgid "Reports can't both be set as `review` and `verified`." +msgstr "" + +#: timed/tracking/views.py:257 +#, python-brace-format +msgid "Your request exceeds the maximum allowed entries ({0} > {1})" +msgstr "" diff --git a/backend/timed/mixins.py b/backend/timed/mixins.py new file mode 100644 index 000000000..1d32d83c0 --- /dev/null +++ b/backend/timed/mixins.py @@ -0,0 +1,90 @@ +from rest_framework_json_api import relations + +from timed.serializers import AggregateObject + + +class AggregateQuerysetMixin(object): + """ + Add support for aggregate queryset in view. + + Wrap queryst dicts into aggregate object to support renderer + which expect attributes. + It additionally prefetches related instances represented as id in + aggregate. + + In aggregates only an id of a related field is part of the object. + Instead of loading each single object row by row this mixin prefetches + all resource related fields and injects it before serialization starts. + + Mixin expects the id to be the same key as the resource related + field defined in the serializer. + + To reduce number of queries `prefetch_related_for_field` can be defined + to prefetch related data per field like the following: + >>> from rest_framework.viewsets import ReadOnlyModelViewSet + ... class MyView(ReadOnlyModelViewSet, AggregateQuerysetMixin): + ... # ... + ... prefetch_related_for_field = { + ... 'field_name': ['field_name_prefetch'] + ... } + ... # ... + """ + + def _is_related_field(self, val): + """ + Check whether value is a related field. + + Ignores serializer method fields which define logic separately. + """ + return isinstance(val, relations.ResourceRelatedField) and not isinstance( + val, relations.ManySerializerMethodResourceRelatedField + ) + + def get_serializer(self, data=None, *args, **kwargs): + # no data no wrapping needed + if not data: + return super().get_serializer(data, *args, **kwargs) + + many = kwargs.get("many") + if not many: + data = [data] + + # prefetch data for all related fields + prefetch_per_field = {} + serializer_class = self.get_serializer_class() + for key, value in serializer_class._declared_fields.items(): + if self._is_related_field(value): + source = value.source or key + if many: + obj_ids = data.values_list(source, flat=True) + else: + obj_ids = [data[0][source]] + + qs = value.model.objects.filter(id__in=obj_ids) + qs = qs.select_related() + if hasattr(self, "prefetch_related_for_field"): # pragma: no cover + qs = qs.prefetch_related( + *self.prefetch_related_for_field.get(source, []) + ) + + objects = {obj.id: obj for obj in qs} + prefetch_per_field[source] = objects + + # enhance entry dicts with model instances + data = [ + AggregateObject( + **{ + **entry, + **{ + field: objects[entry.get(field) or entry.get(f"{field}_id")] + for field, objects in prefetch_per_field.items() + }, + } + ) + for entry in data + ] + + if not many: + data = data[0] + + return super().get_serializer(data, *args, **kwargs) diff --git a/backend/timed/models.py b/backend/timed/models.py new file mode 100644 index 000000000..887de5e44 --- /dev/null +++ b/backend/timed/models.py @@ -0,0 +1,29 @@ +"""Basic model and field classes to be used in all apps.""" +from django.utils.translation import gettext_lazy as _ +from multiselectfield import MultiSelectField + + +class WeekdaysField(MultiSelectField): + """ + Multi select field using weekdays as choices. + + Stores weekdays as comma-separated values in database as + iso week day (MON = 1, SUN = 7). + """ + + MO, TU, WE, TH, FR, SA, SU = range(1, 8) + + WEEKDAYS = ( + (MO, _("Monday")), + (TU, _("Tuesday")), + (WE, _("Wednesday")), + (TH, _("Thursday")), + (FR, _("Friday")), + (SA, _("Saturday")), + (SU, _("Sunday")), + ) + + def __init__(self, *args, **kwargs): + """Initialize multi select with choices weekdays.""" + kwargs["choices"] = self.WEEKDAYS + super().__init__(*args, **kwargs) diff --git a/backend/timed/notifications/__init__.py b/backend/timed/notifications/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/timed/notifications/factories.py b/backend/timed/notifications/factories.py new file mode 100644 index 000000000..6cade3d2b --- /dev/null +++ b/backend/timed/notifications/factories.py @@ -0,0 +1,12 @@ +from factory import Faker, SubFactory +from factory.django import DjangoModelFactory + +from timed.notifications.models import Notification + + +class NotificationFactory(DjangoModelFactory): + project = SubFactory("timed.projects.factories.ProjectFactory") + notification_type = Faker("word", ext_word_list=Notification.NOTIFICATION_TYPES) + + class Meta: + model = Notification diff --git a/backend/timed/notifications/management/commands/budget_check.py b/backend/timed/notifications/management/commands/budget_check.py new file mode 100644 index 000000000..0a4442cbb --- /dev/null +++ b/backend/timed/notifications/management/commands/budget_check.py @@ -0,0 +1,98 @@ +from datetime import timedelta + +import redminelib +from django.conf import settings +from django.core.management.base import BaseCommand +from django.db.models import Sum +from django.template.loader import get_template +from django.utils.timezone import now + +from timed.notifications.models import Notification +from timed.projects.models import Project +from timed.tracking.models import Report + +template = get_template("budget_reminder.txt", using="text") + + +class Command(BaseCommand): + help = "Check budget of a project and update corresponding Redmine Project." + + def handle(self, *args, **options): + redmine = redminelib.Redmine( + settings.REDMINE_URL, + key=settings.REDMINE_APIKEY, + ) + + projects = ( + Project.objects.filter( + archived=False, + cost_center__name__contains=settings.BUILD_PROJECTS, + redmine_project__isnull=False, + estimated_time__isnull=False, + estimated_time__gt=timedelta(hours=0), + ) + .exclude(notifications__notification_type=Notification.BUDGET_CHECK_70) + .order_by("name") + ) + + for project in projects.iterator(): + billable_hours = ( + Report.objects.filter(task__project=project, not_billable=False) + .aggregate(billable_hours=Sum("duration")) + .get("billable_hours") + ) + + if not billable_hours: + continue + + billable_hours = billable_hours.total_seconds() / 3600 + estimated_hours = project.estimated_time.total_seconds() / 3600 + budget_percentage = billable_hours / estimated_hours + + if budget_percentage <= 0.3: + continue + try: + issue = redmine.issue.get(project.redmine_project.issue_id) + except redminelib.exceptions.ResourceNotFoundError: + self.stdout.write( + self.style.ERROR( + f"Project {project.name} has an invalid Redmine issue {project.redmine_project.issue_id} assigned. Skipping." + ) + ) + continue + + notification, _ = Notification.objects.get_or_create( + notification_type=Notification.BUDGET_CHECK_30 + if budget_percentage <= 0.7 + else Notification.BUDGET_CHECK_70, + project=project, + ) + + if notification.sent_at: + self.stdout.write( + self.style.NOTICE( + f"Notification {notification.notification_type} for Project {project.name} already sent. Skipping." + ) + ) + continue + + issue.notes = template.render( + { + "estimated_time": estimated_hours, + "billable_hours": billable_hours, + "budget_percentage": 30 + if notification.notification_type == Notification.BUDGET_CHECK_30 + else 70, + } + ) + + try: + issue.save() + notification.sent_at = now() + notification.save() + except redminelib.exceptions.BaseRedmineError: # pragma: no cover + self.stdout.write( + self.style.ERROR( + f"Cannot reach Redmine server! Failed to save Redmine issue {issue.id} and notification {notification}" + ) + ) diff --git a/backend/timed/notifications/management/commands/notify_changed_employments.py b/backend/timed/notifications/management/commands/notify_changed_employments.py new file mode 100644 index 000000000..3a57878da --- /dev/null +++ b/backend/timed/notifications/management/commands/notify_changed_employments.py @@ -0,0 +1,64 @@ +from datetime import timedelta + +from django.conf import settings +from django.core.mail import EmailMessage +from django.core.management.base import BaseCommand +from django.template.loader import get_template +from django.utils import timezone + +from timed.employment.models import Employment +from timed.notifications.models import Notification + +template = get_template("mail/notify_changed_employments.txt", using="text") + + +class Command(BaseCommand): + """ + Notify given email address on changed employments. + + Notifications will be sent when there are employments + which changed in given last days. + """ + + help = "Send notification on given email address on changed employments." + + def add_arguments(self, parser): + parser.add_argument( + "--email", + type=str, + dest="email", + help="Email address notification is sent to.", + ) + parser.add_argument( + "--last-days", + default=7, + type=int, + dest="last_days", + help="Time frame of last days employment changed.", + ) + + def handle(self, *args, **options): + email = options["email"] + last_days = options["last_days"] + + # today is excluded + end = timezone.now().replace(hour=0, minute=0, second=0, microsecond=0) + start = end - timedelta(days=last_days) + + employments = Employment.objects.filter(updated__range=[start, end]) + if employments.exists(): + from_email = settings.DEFAULT_FROM_EMAIL + subject = "[Timed] Employments changed in last {0} days".format(last_days) + body = template.render({"employments": employments}) + message = EmailMessage( + subject=subject, + body=body, + from_email=from_email, + to=[email], + headers=settings.EMAIL_EXTRA_HEADERS, + ) + message.send() + Notification.objects.create( + notification_type=Notification.CHANGED_EMPLOYMENT, + sent_at=timezone.now(), + ) diff --git a/backend/timed/notifications/management/commands/notify_reviewers_unverified.py b/backend/timed/notifications/management/commands/notify_reviewers_unverified.py new file mode 100644 index 000000000..a1ea535f7 --- /dev/null +++ b/backend/timed/notifications/management/commands/notify_reviewers_unverified.py @@ -0,0 +1,184 @@ +from datetime import date, timedelta + +from dateutil.relativedelta import relativedelta +from django.conf import settings +from django.contrib.auth import get_user_model +from django.core.mail import EmailMessage, get_connection +from django.core.management.base import BaseCommand +from django.db.models import Q +from django.template.loader import get_template +from django.utils.timezone import now + +from timed.notifications.models import Notification +from timed.projects.models import CustomerAssignee, ProjectAssignee, TaskAssignee +from timed.tracking.models import Report + +template = get_template("mail/notify_reviewers_unverified.txt", using="text") + + +class Command(BaseCommand): + """ + Notify reviewers of projects with unverified reports. + + Notifications will be sent when reviewer has projects with reports + which are unverified in given time frame. + + Example how it works: + + We have set following options + + Today = Friday 4/8/2017 + Months = 1 + Offset = 5 + + with these set reports would be checked between 1/7/2017 and 31/7/2017. + A notification will be sent to reviewers if there are reports on + projects where they are added as reviewer. + """ + + help = "Notify reviewers of projects with unverified reports." + + def add_arguments(self, parser): + parser.add_argument( + "--months", + default=1, + type=int, + dest="months", + help="Number of months to check unverified reports in.", + ) + parser.add_argument( + "--offset", + default=5, + type=int, + dest="offset", + help="Period will end today minus given offset.", + ) + parser.add_argument( + "--message", + default="", + type=str, + dest="message", + help="Additional message to send if there are unverified reports", + ) + parser.add_argument( + "--cc", + action="append", + dest="cc", + help="List of email addresses where to send a cc", + ) + + def handle(self, *args, **options): + months = options["months"] + offset = options["offset"] + message = options["message"] + cc = options["cc"] + + today = date.today() + # -1 as we also skip today + end = today - timedelta(days=offset - 1) + # -1 days as first day of month is needed + start = end - relativedelta(months=months, days=-1) + + reports = self._get_unverified_reports(start, end) + self._notify_reviewers(start, end, reports, message, cc) + + def _get_unverified_reports(self, start, end): + """ + Get unverified reports. + + Unverified reports are reports on project which have a reviewer + assigned but are not verified in given time frame. + """ + return Report.objects.filter(date__range=[start, end], verified_by__isnull=True) + + def _notify_reviewers(self, start, end, reports, optional_message, cc): + """Notify reviewers on their unverified reports. + + Only the reviewers lowest in the hierarchy should be notified. + If a project has a project assignee and a task assignee with reviewer role, + then only the task assignee should be notified about unverified reports. + """ + User = get_user_model() + reviewers = User.objects.all_reviewers().filter(email__isnull=False) + subject = "[Timed] Verification of reports" + from_email = settings.DEFAULT_FROM_EMAIL + connection = get_connection() + messages = [] + + for reviewer in reviewers: + # unverified reports in which user is customer assignee and responsible reviewer + reports_customer_assignee_is_reviewer = reports.filter( + Q( + task__project__customer_id__in=CustomerAssignee.objects.filter( + is_reviewer=True, user_id=reviewer + ).values("customer_id") + ) + ).exclude( + Q( + task__project_id__in=ProjectAssignee.objects.filter( + is_reviewer=True + ).values("project_id") + ) + | Q( + task_id__in=TaskAssignee.objects.filter(is_reviewer=True).values( + "task_id" + ) + ) + ) + + # unverified reports in which user is project assignee and responsible reviewer + reports_project_assignee_is_reviewer = reports.filter( + Q( + task__project_id__in=ProjectAssignee.objects.filter( + is_reviewer=True, user_id=reviewer + ).values("project_id") + ) + ).exclude( + Q( + task_id__in=TaskAssignee.objects.filter(is_reviewer=True).values( + "task_id" + ) + ) + ) + + # unverified reports in which user task assignee and responsible reviewer + reports_task_assignee_is_reviewer = reports.filter( + Q( + task_id__in=TaskAssignee.objects.filter( + is_reviewer=True, user_id=reviewer + ).values("task_id") + ) + ) + if ( + reports_customer_assignee_is_reviewer + | reports_project_assignee_is_reviewer + | reports_task_assignee_is_reviewer + ).exists(): + body = template.render( + { + # we need start and end date in system format + "start": str(start), + "end": str(end), + "message": optional_message, + "reviewer": reviewer, + "protocol": settings.HOST_PROTOCOL, + "domain": settings.HOST_DOMAIN, + } + ) + + message = EmailMessage( + subject=subject, + body=body, + from_email=from_email, + to=[reviewer.email], + cc=cc, + connection=connection, + headers=settings.EMAIL_EXTRA_HEADERS, + ) + + messages.append(message) + if len(messages) > 0: + connection.send_messages(messages) + Notification.objects.create( + notification_type=Notification.REVIEWER_UNVERIFIED, sent_at=now() + ) diff --git a/backend/timed/notifications/management/commands/notify_supervisors_shorttime.py b/backend/timed/notifications/management/commands/notify_supervisors_shorttime.py new file mode 100644 index 000000000..1cd0d20ff --- /dev/null +++ b/backend/timed/notifications/management/commands/notify_supervisors_shorttime.py @@ -0,0 +1,153 @@ +from datetime import date, timedelta + +from django.conf import settings +from django.contrib.auth import get_user_model +from django.core.mail import EmailMessage, get_connection +from django.core.management.base import BaseCommand +from django.template.loader import get_template +from django.utils.timezone import now + +from timed.notifications.models import Notification + +template = get_template("mail/notify_supervisor_shorttime.txt", using="text") + + +class Command(BaseCommand): + """ + Send notification when supervisees have shorttime in given time frame. + + Example how it works: + + We have set following options + + Today = Thursday 27/7/2017 + Days = 7 + Offset = 5 + Ratio = 0.9 + + with these set shorttime would be checked between 17/7/2017 and 23/7/2017. + A notification will be sent to supervisors if ratio between reported and + expected worktime is lower than 90%. + """ + + help = "Notify supervisors when supervisees have reported shortime." + + def add_arguments(self, parser): + parser.add_argument( + "--days", + default=7, + type=int, + dest="days", + help="Length of period to check shorttime in", + ) + parser.add_argument( + "--offset", + default=5, + type=int, + dest="offset", + help="Period will end today minus given offset.", + ) + parser.add_argument( + "--ratio", + default=0.9, + type=float, + dest="ratio", + help=( + "Ratio between expected and reported time " + "before it is considered shorttime" + ), + ) + + def handle(self, *args, **options): + days = options["days"] + offset = options["offset"] + ratio = options["ratio"] + + today = date.today() + # -1 as we also skip today + end = today - timedelta(days=offset - 1) + start = end - timedelta(days=days - 1) + + supervisees = self._get_supervisees_with_shorttime(start, end, ratio) + self._notify_supervisors(start, end, ratio, supervisees) + + def _decimal_hours(self, duration): + return duration.total_seconds() / 3600 + + def _get_supervisees_with_shorttime(self, start, end, ratio): + """ + Get supervisees which reported less hours than they should have. + + :return: dict mapping all supervisees with shorttime with dict of + reported, expected, delta, actual ratio and balance. + """ + supervisees_shorttime = {} + supervisees = get_user_model().objects.all_supervisees() + + start_year = date(end.year, 1, 1) + + for supervisee in supervisees: + worktime = supervisee.calculate_worktime(start, end) + reported, expected, delta = worktime + if expected == timedelta(0): + continue + + supervisee_ratio = reported / expected + if supervisee_ratio < ratio: + supervisees_shorttime[supervisee.id] = { + "reported": self._decimal_hours(reported), + "expected": self._decimal_hours(expected), + "delta": self._decimal_hours(delta), + "ratio": supervisee_ratio, + "balance": self._decimal_hours( + supervisee.calculate_worktime(start_year, end)[2] + ), + } + + return supervisees_shorttime + + def _notify_supervisors(self, start, end, ratio, supervisees): + """ + Notify supervisors about their supervisees. + + :param supervisees: dict whereas key is id of supervisee and + value as a worktime dict of + reported, expected, delta, ratio and balance + """ + supervisors = get_user_model().objects.all_supervisors() + subject = "[Timed] Report supervisees with shorttime" + from_email = settings.DEFAULT_FROM_EMAIL + mails = [] + + for supervisor in supervisors: + suspects = supervisor.supervisees.filter( + id__in=supervisees.keys() + ).order_by("first_name") + suspects_shorttime = [ + (suspect, supervisees[suspect.id]) for suspect in suspects + ] + if suspects.count() > 0 and supervisor.email: + body = template.render( + { + "start": start, + "end": end, + "ratio": ratio, + "suspects": suspects_shorttime, + } + ) + mails.append( + EmailMessage( + subject=subject, + body=body, + from_email=from_email, + to=[supervisor.email], + headers=settings.EMAIL_EXTRA_HEADERS, + ) + ) + + if len(mails) > 0: + connection = get_connection() + connection.send_messages(mails) + Notification.objects.create( + notification_type=Notification.SUPERVISORS_SHORTTIME, sent_at=now() + ) diff --git a/backend/timed/notifications/migrations/0001_initial.py b/backend/timed/notifications/migrations/0001_initial.py new file mode 100644 index 000000000..3ee6417e1 --- /dev/null +++ b/backend/timed/notifications/migrations/0001_initial.py @@ -0,0 +1,49 @@ +# Generated by Django 3.2.16 on 2022-11-30 08:38 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + initial = True + + dependencies = [ + ("projects", "0015_remaining_effort_task_project"), + ] + + operations = [ + migrations.CreateModel( + name="Notification", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("sent_at", models.DateTimeField(null=True)), + ( + "notification_type", + models.CharField( + choices=[ + ("budget_check_30", "project budget exceeded 30%"), + ("budget_check_70", "project budget exceeded 70%"), + ], + max_length=50, + ), + ), + ( + "project", + models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="notifications", + to="projects.project", + ), + ), + ], + ), + ] diff --git a/backend/timed/notifications/migrations/0002_alter_notification_notification_type.py b/backend/timed/notifications/migrations/0002_alter_notification_notification_type.py new file mode 100644 index 000000000..e93e5f0a8 --- /dev/null +++ b/backend/timed/notifications/migrations/0002_alter_notification_notification_type.py @@ -0,0 +1,30 @@ +# Generated by Django 3.2.16 on 2022-12-12 10:08 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("notifications", "0001_initial"), + ] + + operations = [ + migrations.AlterField( + model_name="notification", + name="notification_type", + field=models.CharField( + choices=[ + ("budget_check_30", "project budget exceeded 30%"), + ("budget_check_70", "project budget exceeded 70%"), + ("changed_employment", "recently changed employment"), + ("reviewers_unverified", "reviewer has reports to verify"), + ( + "supervisors_shorttime", + "supervisor has supervisees with short time", + ), + ("notify_accountants", "notify accountats"), + ], + max_length=50, + ), + ), + ] diff --git a/backend/timed/notifications/migrations/__init__.py b/backend/timed/notifications/migrations/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/timed/notifications/models.py b/backend/timed/notifications/models.py new file mode 100644 index 000000000..00339cebd --- /dev/null +++ b/backend/timed/notifications/models.py @@ -0,0 +1,34 @@ +from django.db import models + +from timed.projects.models import Project + + +class Notification(models.Model): + BUDGET_CHECK_30 = "budget_check_30" + BUDGET_CHECK_70 = "budget_check_70" + CHANGED_EMPLOYMENT = "changed_employment" + REVIEWER_UNVERIFIED = "reviewers_unverified" + SUPERVISORS_SHORTTIME = "supervisors_shorttime" + NOTIFY_ACCOUNTANTS = "notify_accountants" + + NOTIFICATION_TYPE_CHOICES = [ + (BUDGET_CHECK_30, "project budget exceeded 30%"), + (BUDGET_CHECK_70, "project budget exceeded 70%"), + (CHANGED_EMPLOYMENT, "recently changed employment"), + (REVIEWER_UNVERIFIED, "reviewer has reports to verify"), + (SUPERVISORS_SHORTTIME, "supervisor has supervisees with short time"), + (NOTIFY_ACCOUNTANTS, "notify accountats"), + ] + + NOTIFICATION_TYPES = [n for n, _ in NOTIFICATION_TYPE_CHOICES] + + sent_at = models.DateTimeField(null=True) + project = models.ForeignKey( + Project, on_delete=models.CASCADE, null=True, related_name="notifications" + ) + notification_type = models.CharField( + max_length=50, choices=NOTIFICATION_TYPE_CHOICES + ) + + def __str__(self): + return f"Notification: {self.get_notification_type_display()}, id: {self.pk}" diff --git a/backend/timed/notifications/notify_admin.py b/backend/timed/notifications/notify_admin.py new file mode 100644 index 000000000..e3a25b934 --- /dev/null +++ b/backend/timed/notifications/notify_admin.py @@ -0,0 +1,62 @@ +import datetime + +from django.conf import settings +from django.core.mail import EmailMultiAlternatives, get_connection +from django.template.loader import get_template, render_to_string +from django.utils.timezone import now + +from timed.notifications.models import Notification + + +def prepare_and_send_email(project, order_duration): + template_txt = get_template("notify_accountants_order.txt") + from_email = settings.DEFAULT_FROM_EMAIL + connection = get_connection() + messages = [] + + customer = project.customer + + duration = order_duration.split(":") + hours = int(duration[0]) + minutes = int(duration[1]) + hours_added = datetime.timedelta(hours=hours, minutes=minutes) + + hours_total = hours_added + if project.estimated_time is not None: + hours_total += project.estimated_time + + subject = f"Customer Center Credits/Reports: {customer.name} has ordered {hours_added} hours." + + body_txt = template_txt.render( + { + "customer": customer, + "project": project, + "hours_added": hours_added, + "hours_total": hours_total, + } + ) + body_html = render_to_string( + "notify_accountants_order.html", + { + "customer": customer, + "project": project, + "hours_added": hours_added, + "hours_total": hours_total, + }, + ) + + message = EmailMultiAlternatives( + subject=subject, + body=body_txt, + from_email=from_email, + to=[settings.CUSTOMER_CENTER_EMAIL], + connection=connection, + headers=settings.EMAIL_EXTRA_HEADERS, + ) + message.attach_alternative(body_html, "text/html") + + messages.append(message) + connection.send_messages(messages) + Notification.objects.create( + notification_type=Notification.REVIEWER_UNVERIFIED, sent_at=now() + ) diff --git a/backend/timed/notifications/templates/budget_reminder.txt b/backend/timed/notifications/templates/budget_reminder.txt new file mode 100644 index 000000000..50f6be3a6 --- /dev/null +++ b/backend/timed/notifications/templates/budget_reminder.txt @@ -0,0 +1,8 @@ +``` +## Project exceeded {{budget_percentage}}% of budget + +- Billable Hours: {{billable_hours}} +- Budget: {{estimated_time}} + +To PM: Please check the remaining effort estimate. If more budget is needed, reach out to relevant stakeholders. +``` \ No newline at end of file diff --git a/backend/timed/notifications/templates/mail/notify_changed_employments.txt b/backend/timed/notifications/templates/mail/notify_changed_employments.txt new file mode 100644 index 000000000..c0ff000b2 --- /dev/null +++ b/backend/timed/notifications/templates/mail/notify_changed_employments.txt @@ -0,0 +1,3 @@ +Changed employments: +{% for employment in employments %} +{{employment.start_date}} - {{employment.end_date|ljust:10}} {{employment.percentage|stringformat:"s"|add:'%'|rjust:4}} {{employment.user.get_full_name}}{% endfor %} diff --git a/backend/timed/notifications/templates/mail/notify_reviewers_unverified.txt b/backend/timed/notifications/templates/mail/notify_reviewers_unverified.txt new file mode 100644 index 000000000..2cdb22328 --- /dev/null +++ b/backend/timed/notifications/templates/mail/notify_reviewers_unverified.txt @@ -0,0 +1,5 @@ +There are unverified reports which need your attention. + +{{message}} + +Go to <{{protocol}}://{{domain}}/analysis?fromDate={{start}}&toDate={{end}}&reviewer={{reviewer.id}}&editable=1&verified=0> diff --git a/backend/timed/notifications/templates/mail/notify_supervisor_shorttime.txt b/backend/timed/notifications/templates/mail/notify_supervisor_shorttime.txt new file mode 100644 index 000000000..469f95fde --- /dev/null +++ b/backend/timed/notifications/templates/mail/notify_supervisor_shorttime.txt @@ -0,0 +1,7 @@ +{% load humanize %} +Time range: {{start}} - {{end}} +Ratio: {{ratio}} + +{% for suspect, worktime in suspects %} +{{suspect.get_full_name}} {{worktime.reported}}/{{worktime.expected}} (Ratio {{worktime.ratio|floatformat:2}} Delta {{worktime.delta}} Balance {{worktime.balance}}) +{% endfor %} diff --git a/backend/timed/notifications/tests/test_budget_check.py b/backend/timed/notifications/tests/test_budget_check.py new file mode 100644 index 000000000..0b4b45693 --- /dev/null +++ b/backend/timed/notifications/tests/test_budget_check.py @@ -0,0 +1,123 @@ +import datetime + +import pytest +from django.core.management import call_command +from django.utils.timezone import now +from redminelib.exceptions import ResourceNotFoundError + +from timed.notifications.factories import NotificationFactory +from timed.notifications.models import Notification +from timed.redmine.models import RedmineProject + + +@pytest.mark.parametrize( + "duration, percentage_exceeded, notification_count", + [(1, 0, 0), (3, 0, 0), (4, 30, 1), (8, 70, 2), (0, 0, 0)], +) +def test_budget_check_1( + db, mocker, report_factory, duration, percentage_exceeded, notification_count +): + """Test budget check.""" + redmine_instance = mocker.MagicMock() + issue = mocker.MagicMock() + redmine_instance.issue.get.return_value = issue + redmine_class = mocker.patch("redminelib.Redmine") + redmine_class.return_value = redmine_instance + + report = report_factory(duration=datetime.timedelta(hours=duration)) + project = report.task.project + project.estimated_time = datetime.timedelta(hours=10) + project.save() + project.cost_center.name = "DEV_BUILD" + project.cost_center.save() + + if duration == 0: + report.delete() + + if percentage_exceeded == 70: + NotificationFactory( + project=project, notification_type=Notification.BUDGET_CHECK_30 + ) + + report_hours = report.duration.total_seconds() / 3600 + estimated_hours = project.estimated_time.total_seconds() / 3600 + RedmineProject.objects.create(project=project, issue_id=1000) + + call_command("budget_check") + + if percentage_exceeded: + redmine_instance.issue.get.assert_called_once_with(1000) + assert f"Project exceeded {percentage_exceeded}%" in issue.notes + assert f"Billable Hours: {report_hours}" in issue.notes + assert f"Budget: {estimated_hours}\n" in issue.notes + + issue.save.assert_called_once_with() + assert Notification.objects.all().count() == notification_count + + +def test_budget_check_skip_notification(db, capsys, mocker, report_factory): + redmine_instance = mocker.MagicMock() + issue = mocker.MagicMock() + redmine_instance.issue.get.return_value = issue + redmine_class = mocker.patch("redminelib.Redmine") + redmine_class.return_value = redmine_instance + + report = report_factory(duration=datetime.timedelta(hours=5)) + project = report.task.project + project.estimated_time = datetime.timedelta(hours=10) + project.save() + project.cost_center.name = "DEV_BUILD" + project.cost_center.save() + + notification = NotificationFactory( + project=project, notification_type=Notification.BUDGET_CHECK_30, sent_at=now() + ) + + RedmineProject.objects.create(project=project, issue_id=1000) + + call_command("budget_check") + + out, _ = capsys.readouterr() + assert ( + f"Notification {notification.notification_type} for Project {project.name} already sent. Skipping" + in out + ) + + +def test_budget_check_no_estimated_timed(db, mocker, capsys, report_factory): + redmine_instance = mocker.MagicMock() + issue = mocker.MagicMock() + redmine_instance.issue.get.return_value = issue + redmine_class = mocker.patch("redminelib.Redmine") + redmine_class.return_value = redmine_instance + + report = report_factory() + project = report.task.project + project.estimated_time = datetime.timedelta(hours=0) + project.save() + project.cost_center.name = "DEV_BUILD" + project.cost_center.save() + RedmineProject.objects.create(project=report.task.project, issue_id=1000) + + call_command("budget_check") + + assert Notification.objects.count() == 0 + + +def test_budget_check_invalid_issue(db, mocker, capsys, report_factory): + redmine_instance = mocker.MagicMock() + redmine_class = mocker.patch("redminelib.Redmine") + redmine_class.return_value = redmine_instance + redmine_instance.issue.get.side_effect = ResourceNotFoundError() + + report = report_factory(duration=datetime.timedelta(hours=4)) + report.task.project.estimated_time = datetime.timedelta(hours=10) + report.task.project.save() + report.task.project.cost_center.name = "DEV_BUILD" + report.task.project.cost_center.save() + RedmineProject.objects.create(project=report.task.project, issue_id=1000) + + call_command("budget_check") + + out, _ = capsys.readouterr() + assert "issue 1000 assigned" in out diff --git a/backend/timed/notifications/tests/test_notify_changed_employments.py b/backend/timed/notifications/tests/test_notify_changed_employments.py new file mode 100644 index 000000000..d901bcd1e --- /dev/null +++ b/backend/timed/notifications/tests/test_notify_changed_employments.py @@ -0,0 +1,31 @@ +from datetime import date + +from django.core.management import call_command + +from timed.employment.factories import EmploymentFactory +from timed.notifications.models import Notification + + +def test_notify_changed_employments(db, mailoutbox, freezer): + email = "test@example.net" + + # employments changed too far in the past + freezer.move_to("2017-08-27") + EmploymentFactory.create_batch(2) + + # employments which should show up in report + freezer.move_to("2017-09-03") + finished = EmploymentFactory.create(end_date=date(2017, 10, 10), percentage=80) + new = EmploymentFactory.create(percentage=100) + + freezer.move_to("2017-09-04") + call_command("notify_changed_employments", email=email) + + # checks + assert len(mailoutbox) == 1 + mail = mailoutbox[0] + assert mail.to == [email] + print(mail.body) + assert "80% {0}".format(finished.user.get_full_name()) in mail.body + assert "None 100% {0}".format(new.user.get_full_name()) in mail.body + assert Notification.objects.all().count() == 1 diff --git a/backend/timed/notifications/tests/test_notify_reviewers_unverified.py b/backend/timed/notifications/tests/test_notify_reviewers_unverified.py new file mode 100644 index 000000000..b008997da --- /dev/null +++ b/backend/timed/notifications/tests/test_notify_reviewers_unverified.py @@ -0,0 +1,122 @@ +from datetime import date + +import pytest +from django.core.management import call_command + +from timed.employment.factories import UserFactory +from timed.notifications.models import Notification +from timed.projects.factories import ( + ProjectAssigneeFactory, + ProjectFactory, + TaskAssigneeFactory, + TaskFactory, +) +from timed.tracking.factories import ReportFactory + + +@pytest.mark.freeze_time("2017-8-4") +@pytest.mark.parametrize( + "cc,message", + [ + ("", ""), + ("example@example.com", ""), + ("example@example.com", "This is a test"), + ("", "This is a test"), + ], +) +def test_notify_reviewers_with_cc_and_message(db, mailoutbox, cc, message): + """Test time range 2017-7-1 till 2017-7-31.""" + # a reviewer which will be notified + reviewer_work = UserFactory.create() + project_work = ProjectFactory.create() + ProjectAssigneeFactory.create( + user=reviewer_work, project=project_work, is_reviewer=True + ) + task_work = TaskFactory.create(project=project_work) + ReportFactory.create(date=date(2017, 7, 1), task=task_work, verified_by=None) + + # a reviewer which doesn't have any unverfied reports + reviewer_no_work = UserFactory.create() + project_no_work = ProjectFactory.create() + ProjectAssigneeFactory.create( + user=reviewer_no_work, project=project_no_work, is_reviewer=True + ) + task_no_work = TaskFactory.create(project=project_no_work) + ReportFactory.create( + date=date(2017, 7, 1), task=task_no_work, verified_by=reviewer_no_work + ) + + call_command( + "notify_reviewers_unverified", + "--cc={0}".format(cc), + "--message={0}".format(message), + ) + + # checks + assert len(mailoutbox) == 1 + mail = mailoutbox[0] + assert mail.to == [reviewer_work.email] + url = ( + "http://localhost:4200/analysis?fromDate=2017-07-01&" + "toDate=2017-07-31&reviewer=%d&editable=1" + ) % reviewer_work.id + assert url in mail.body + assert message in mail.body + assert mail.cc[0] == cc + + +@pytest.mark.freeze_time("2017-8-4") +def test_notify_reviewers(db, mailoutbox): + """Test time range 2017-7-1 till 2017-7-31.""" + # a reviewer which will be notified + reviewer_work = UserFactory.create() + project_work = ProjectFactory.create() + ProjectAssigneeFactory.create( + user=reviewer_work, project=project_work, is_reviewer=True + ) + task_work = TaskFactory.create(project=project_work) + ReportFactory.create(date=date(2017, 7, 1), task=task_work, verified_by=None) + + call_command("notify_reviewers_unverified") + + # checks + assert len(mailoutbox) == 1 + mail = mailoutbox[0] + assert mail.to == [reviewer_work.email] + url = ( + "http://localhost:4200/analysis?fromDate=2017-07-01&" + "toDate=2017-07-31&reviewer=%d&editable=1" + ) % reviewer_work.id + assert url in mail.body + assert Notification.objects.count() == 1 + + +@pytest.mark.freeze_time("2017-8-4") +def test_notify_reviewers_reviewer_hierarchy(db, mailoutbox): + """Test notification with reviewer hierarchy. + + Test if only the lowest in reviewer hierarchy gets notified. + """ + # user that shouldn't be notified + project_reviewer = UserFactory.create() + # user that should be notified + task_reviewer = UserFactory.create() + project = ProjectFactory.create() + task = TaskFactory.create(project=project) + ProjectAssigneeFactory.create( + user=project_reviewer, project=project, is_reviewer=True + ) + TaskAssigneeFactory.create(user=task_reviewer, task=task, is_reviewer=True) + + ReportFactory.create(date=date(2017, 7, 1), task=task, verified_by=None) + + call_command("notify_reviewers_unverified") + + assert len(mailoutbox) == 1 + mail = mailoutbox[0] + assert mail.to == [task_reviewer.email] + url = ( + "http://localhost:4200/analysis?fromDate=2017-07-01&" + "toDate=2017-07-31&reviewer=%d&editable=1" + ) % task_reviewer.id + assert url in mail.body diff --git a/backend/timed/notifications/tests/test_notify_supervisors_shorttime.py b/backend/timed/notifications/tests/test_notify_supervisors_shorttime.py new file mode 100644 index 000000000..e6c001742 --- /dev/null +++ b/backend/timed/notifications/tests/test_notify_supervisors_shorttime.py @@ -0,0 +1,60 @@ +from datetime import date, timedelta + +import pytest +from dateutil.rrule import DAILY, FR, MO, rrule +from django.core.management import call_command + +from timed.employment.factories import EmploymentFactory, UserFactory +from timed.notifications.models import Notification +from timed.projects.factories import TaskFactory +from timed.tracking.factories import ReportFactory + + +@pytest.mark.freeze_time("2017-7-27") +def test_notify_supervisors(db, mailoutbox): + """Test time range 2017-7-17 till 2017-7-23.""" + start = date(2017, 7, 14) + # supervisee with short time + supervisee = UserFactory.create() + supervisor = UserFactory.create() + supervisee.supervisors.add(supervisor) + + EmploymentFactory.create(user=supervisee, start_date=start, percentage=100) + workdays = rrule( + DAILY, + dtstart=start, + until=date.today(), + # range is excluding last + byweekday=range(MO.weekday, FR.weekday + 1), + ) + task = TaskFactory.create() + for dt in workdays: + ReportFactory.create( + user=supervisee, date=dt, task=task, duration=timedelta(hours=7) + ) + + call_command("notify_supervisors_shorttime") + + # checks + assert len(mailoutbox) == 1 + mail = mailoutbox[0] + assert mail.to == [supervisor.email] + body = mail.body + assert "Time range: July 17, 2017 - July 23, 2017\nRatio: 0.9" in body + expected = ("{0} 35.0/42.5 (Ratio 0.82 Delta -7.5 Balance -9.0)").format( + supervisee.get_full_name() + ) + assert expected in body + assert Notification.objects.count() == 1 + + +def test_notify_supervisors_no_employment(db, mailoutbox): + """Check that supervisees without employment do not notify supervisor.""" + supervisee = UserFactory.create() + supervisor = UserFactory.create() + supervisee.supervisors.add(supervisor) + + call_command("notify_supervisors_shorttime") + + assert len(mailoutbox) == 0 + assert Notification.objects.count() == 0 diff --git a/backend/timed/permissions.py b/backend/timed/permissions.py new file mode 100644 index 000000000..3db7e9168 --- /dev/null +++ b/backend/timed/permissions.py @@ -0,0 +1,348 @@ +# from django.utils import timezone +from datetime import date + +from django.db.models import Q +from rest_framework.permissions import SAFE_METHODS, BasePermission, IsAuthenticated + +from timed.employment import models as employment_models +from timed.projects import models as projects_models +from timed.tracking import models as tracking_models + + +class IsUnverified(BasePermission): + """Allows access only to verified objects.""" + + def has_object_permission(self, request, view, obj): + return obj.verified_by_id is None + + +class IsReadOnly(BasePermission): + """Allows read only methods.""" + + def has_permission(self, request, view): + return request.method in SAFE_METHODS + + def has_object_permission(self, request, view, obj): + return self.has_permission(request, view) + + +class IsDeleteOnly(BasePermission): + """Allows only delete method.""" + + def has_permission(self, request, view): + return request.method == "DELETE" + + def has_object_permission(self, request, view, obj): + return self.has_permission(request, view) + + +class IsNotDelete(BasePermission): + """Disallow delete method.""" + + def has_permission(self, request, view): + return request.method != "DELETE" + + def has_object_permission(self, request, view, obj): + return self.has_permission(request, view) + + +class IsCreateOnly(BasePermission): + """Allows only create method.""" + + def has_permission(self, request, view): + return request.method == "POST" + + def has_object_permission(self, request, view, obj): + return self.has_permission(request, view) + + +class IsUpdateOnly(BasePermission): + """Allows only update method.""" + + def has_permission(self, request, view): + return request.method in ["PATCH", "PUT"] + + def has_object_permission(self, request, view, obj): + return self.has_permission(request, view) + + +class IsAuthenticated(IsAuthenticated): + """ + Support mixing permission IsAuthenticated with object permission. + + This is needed to use IsAuthenticated with rest condition and or + operator. + """ + + def has_object_permission(self, request, view, obj): + return self.has_permission(request, view) + + +class IsOwner(IsAuthenticated): + """Allows access to object only to owners.""" + + def has_object_permission(self, request, view, obj): + if not super().has_object_permission(request, view, obj): # pragma: no cover + return False + + return obj.user_id == request.user.id + + +class IsSupervisor(IsAuthenticated): + """Allows access to object only to supervisors.""" + + def has_permission(self, request, view): + if not super().has_permission(request, view): # pragma: no cover + return False + + return request.user.supervisees.exists() + + def has_object_permission(self, request, view, obj): + if not super().has_object_permission(request, view, obj): # pragma: no cover + return False + + return request.user.supervisees.filter(id=obj.user_id).exists() + + +class IsReviewer(IsAuthenticated): + """Allows access to object only to reviewers.""" + + def has_permission(self, request, view): + if not super().has_permission(request, view): # pragma: no cover + return False + + if ( + request.user.customer_assignees.filter(is_reviewer=True).exists() + or request.user.project_assignees.filter(is_reviewer=True).exists() + or request.user.task_assignees.filter(is_reviewer=True).exists() + ): + return True + return False + + def has_object_permission(self, request, view, obj): + if not super().has_object_permission(request, view, obj): # pragma: no cover + return False + + user = request.user + + if isinstance(obj, tracking_models.Report): + task = obj.task + else: # pragma: no cover + raise RuntimeError("IsReviewer permission called on unsupported model") + return ( + projects_models.Task.objects.filter(pk=task.pk) + .filter( + Q(task_assignees__user=user, task_assignees__is_reviewer=True) + | Q( + project__project_assignees__user=user, + project__project_assignees__is_reviewer=True, + ) + | Q( + project__customer__customer_assignees__user=user, + project__customer__customer_assignees__is_reviewer=True, + ) + ) + .exists() + ) + + +class IsSuperUser(IsAuthenticated): + """Allows access only to superuser.""" + + def has_permission(self, request, view): + if not super().has_permission(request, view): # pragma: no cover + return False + + return request.user.is_superuser + + def has_object_permission(self, request, view, obj): + return self.has_permission(request, view) + + +class IsNotTransferred(IsAuthenticated): + """Allows access only to not transferred objects.""" + + def has_object_permission(self, request, view, obj): + return not obj.transferred + + +class IsInternal(IsAuthenticated): + """Allows access only to internal employees.""" + + def has_permission(self, request, view): + if not super().has_permission(request, view): # pragma: no cover + return False + + employment = request.user.get_active_employment() + if employment: + return not employment.is_external + return False + + def has_object_permission(self, request, view, obj): + if not super().has_object_permission(request, view, obj): # pragma: no cover + return False + + employment = employment_models.Employment.objects.get_at( + user=request.user, date=date.today() + ) + if employment: + return not employment.is_external + return False # pragma: no cover + + +class IsExternal(IsAuthenticated): + """Allows access only to external employees.""" + + def has_permission(self, request, view): + if not super().has_permission(request, view): # pragma: no cover + return False + + employment = request.user.get_active_employment() + if employment: + return employment.is_external + return False + + def has_object_permission(self, request, view, obj): + if not super().has_object_permission(request, view, obj): # pragma: no cover + return False + + employment = employment_models.Employment.objects.get_at( + user=request.user, date=date.today() + ) + if employment: + return employment.is_external + return False # pragma: no cover + + +class IsManager(IsAuthenticated): + """Allows access only to assignees with manager role.""" + + def has_permission(self, request, view): + if not super().has_permission(request, view): # pragma: no cover + return False + + if ( + request.user.customer_assignees.filter(is_manager=True).exists() + or request.user.project_assignees.filter(is_manager=True).exists() + or request.user.task_assignees.filter(is_manager=True).exists() + ): + return True + return False + + def has_object_permission(self, request, view, obj): + if not super().has_object_permission(request, view, obj): # pragma: no cover + return False + + user = request.user + + if isinstance(obj, projects_models.Task): + return ( + projects_models.Task.objects.filter(pk=obj.pk) + .filter( + Q(task_assignees__user=user, task_assignees__is_manager=True) + | Q( + project__project_assignees__user=user, + project__project_assignees__is_manager=True, + ) + | Q( + project__customer__customer_assignees__user=user, + project__customer__customer_assignees__is_manager=True, + ) + ) + .exists() + ) + elif isinstance(obj, projects_models.Project): + return ( + projects_models.Project.objects.filter(pk=obj.pk) + .filter( + Q( + tasks__task_assignees__user=user, + tasks__task_assignees__is_manager=True, + ) + | Q( + project_assignees__user=user, + project_assignees__is_manager=True, + ) + | Q( + customer__customer_assignees__user=user, + customer__customer_assignees__is_manager=True, + ) + ) + .exists() + ) + else: # pragma: no cover + raise RuntimeError("IsManager permission called on unsupported model") + + +class IsResource(IsAuthenticated): + """Allows access only to assignees with resource role.""" + + def has_permission(self, request, view): + if not super().has_permission(request, view): # pragma: no cover + return False + + if ( + request.user.customer_assignees.filter(is_resource=True).exists() + or request.user.project_assignees.filter(is_resource=True).exists() + or request.user.task_assignees.filter(is_resource=True).exists() + ): + return True + return False + + def has_object_permission(self, request, view, obj): + if not super().has_object_permission(request, view, obj): # pragma: no cover + return False + + user = request.user + + if isinstance(obj, tracking_models.Activity) or isinstance( + obj, tracking_models.Report + ): + if obj.task: + return ( + projects_models.Task.objects.filter(pk=obj.task.pk) + .filter( + Q(task_assignees__user=user, task_assignees__is_resource=True) + | Q( + project__project_assignees__user=user, + project__project_assignees__is_resource=True, + ) + | Q( + project__customer__customer_assignees__user=user, + project__customer__customer_assignees__is_resource=True, + ) + ) + .exists() + ) + else: # pragma: no cover + return True + else: # pragma: no cover + raise RuntimeError("IsResource permission called on unsupported model") + + +class IsAccountant(IsAuthenticated): + """Allows access only to accountants.""" + + def has_permission(self, request, view): + if not super().has_permission(request, view): # pragma: no cover + return False + + return request.user.is_accountant + + def has_object_permission(self, request, view, obj): + if not super().has_object_permission(request, view, obj): # pragma: no cover + return False + + return request.user.is_accountant + + +class IsCustomer(IsAuthenticated): + """Allows access only to assignees with customer role.""" + + def has_permission(self, request, view): + if not super().has_permission(request, view): # pragma: no cover + return False + + if request.user.customer_assignees.filter(is_customer=True).exists(): + return True + return False diff --git a/backend/timed/projects/__init__.py b/backend/timed/projects/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/timed/projects/admin.py b/backend/timed/projects/admin.py new file mode 100644 index 000000000..a1de3a669 --- /dev/null +++ b/backend/timed/projects/admin.py @@ -0,0 +1,130 @@ +"""Views for the admin interface.""" + +from django import forms +from django.contrib import admin +from django.forms.models import BaseInlineFormSet +from django.utils.translation import gettext_lazy as _ +from nested_inline.admin import NestedModelAdmin, NestedStackedInline + +from timed.forms import DurationInHoursField +from timed.projects import models +from timed.redmine.admin import RedmineProjectInline +from timed.subscription.admin import CustomerPasswordInline + + +class CustomerAssigneeInline(admin.TabularInline): + autocomplete_fields = ["user"] + model = models.CustomerAssignee + extra = 0 + + +class ProjectAssigneeInline(NestedStackedInline): + autocomplete_fields = ["user"] + model = models.ProjectAssignee + extra = 0 + + +class TaskAssigneeInline(NestedStackedInline): + autocomplete_fields = ["user"] + model = models.TaskAssignee + extra = 1 + + +@admin.register(models.Customer) +class CustomerAdmin(admin.ModelAdmin): + """Customer admin view.""" + + list_display = ["name"] + search_fields = ["name"] + inlines = [CustomerPasswordInline, CustomerAssigneeInline] + + def has_delete_permission(self, request, obj=None): + return obj and not obj.projects.exists() + + +@admin.register(models.BillingType) +class BillingType(admin.ModelAdmin): + list_display = ["name"] + search_fields = ["name"] + + +@admin.register(models.CostCenter) +class CostCenter(admin.ModelAdmin): + list_display = ["name", "reference"] + search_fields = ["name"] + + +class TaskForm(forms.ModelForm): + """ + Task form making sure that initial forms are marked as changed. + + Otherwise when saving project default tasks would not be saved. + """ + + model = models.Task + estimated_time = DurationInHoursField( + label=_("Estimated time in hours"), required=False + ) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + initial = kwargs.get("initial") + if initial: + self.changed_data = ["name"] + + +class TaskInlineFormset(BaseInlineFormSet): + """Task formset defaulting to task templates when project is created.""" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + project = kwargs["instance"] + if project.tasks.count() == 0: + self.initial = [ + {"name": tmpl.name} + for tmpl in models.TaskTemplate.objects.order_by("name") + ] + self.extra += len(self.initial) + + +class TaskInline(NestedStackedInline): + formset = TaskInlineFormset + form = TaskForm + model = models.Task + extra = 0 + inlines = [TaskAssigneeInline] + + def has_delete_permission(self, request, obj=None): + # for some reason obj is parent object and not task + # so this doesn't work + # return obj and not obj.reports.exists() + return False + + +class ProjectForm(forms.ModelForm): + model = models.Project + estimated_time = DurationInHoursField( + label=_("Estimated time in hours"), required=False + ) + + +@admin.register(models.Project) +class ProjectAdmin(NestedModelAdmin): + """Project admin view.""" + + form = ProjectForm + list_display = ["name", "customer"] + list_filter = ["customer"] + search_fields = ["name", "customer__name"] + + inlines = [TaskInline, RedmineProjectInline, ProjectAssigneeInline] + + def has_delete_permission(self, request, obj=None): + return obj and not obj.tasks.exists() + + +@admin.register(models.TaskTemplate) +class TaskTemplateAdmin(admin.ModelAdmin): + """Task template admin view.""" + + list_display = ["name"] diff --git a/backend/timed/projects/apps.py b/backend/timed/projects/apps.py new file mode 100644 index 000000000..7959e1769 --- /dev/null +++ b/backend/timed/projects/apps.py @@ -0,0 +1,10 @@ +"""Configuration for projects app.""" + +from django.apps import AppConfig + + +class ProjectsConfig(AppConfig): + """App configuration for projects app.""" + + name = "timed.projects" + label = "projects" diff --git a/backend/timed/projects/factories.py b/backend/timed/projects/factories.py new file mode 100644 index 000000000..2febbb3ac --- /dev/null +++ b/backend/timed/projects/factories.py @@ -0,0 +1,130 @@ +"""Factories for testing the projects app.""" + +from factory import Faker, SubFactory +from factory.django import DjangoModelFactory + +from timed.projects import models + + +class CustomerFactory(DjangoModelFactory): + """Customer factory.""" + + name = Faker("company") + email = Faker("company_email") + website = Faker("url") + comment = Faker("sentence") + archived = False + + class Meta: + """Meta informations for the customer factory.""" + + model = models.Customer + + +class BillingTypeFactory(DjangoModelFactory): + name = Faker("currency_name") + reference = None + + class Meta: + model = models.BillingType + + +class CostCenterFactory(DjangoModelFactory): + name = Faker("job") + reference = None + + class Meta: + model = models.CostCenter + + +class ProjectFactory(DjangoModelFactory): + """Project factory.""" + + name = Faker("catch_phrase") + estimated_time = Faker("time_delta") + archived = False + billed = False + customer_visible = False + comment = Faker("sentence") + customer = SubFactory("timed.projects.factories.CustomerFactory") + cost_center = SubFactory("timed.projects.factories.CostCenterFactory") + billing_type = SubFactory("timed.projects.factories.BillingTypeFactory") + remaining_effort_tracking = False + amount_offered = Faker("pydecimal", positive=True, left_digits=4, right_digits=2) + amount_invoiced = Faker("pydecimal", positive=True, left_digits=4, right_digits=2) + + class Meta: + """Meta informations for the project factory.""" + + model = models.Project + + +class TaskFactory(DjangoModelFactory): + """Task factory.""" + + name = Faker("company_suffix") + estimated_time = Faker("time_delta") + archived = False + project = SubFactory("timed.projects.factories.ProjectFactory") + cost_center = SubFactory("timed.projects.factories.CostCenterFactory") + + class Meta: + """Meta informations for the task factory.""" + + model = models.Task + + +class TaskTemplateFactory(DjangoModelFactory): + """Task template factory.""" + + name = Faker("sentence") + + class Meta: + """Meta informations for the task template factory.""" + + model = models.TaskTemplate + + +class CustomerAssigneeFactory(DjangoModelFactory): + """CustomerAssignee factory.""" + + user = SubFactory("timed.employment.factories.UserFactory") + customer = SubFactory("timed.projects.factories.CustomerFactory") + is_resource = False + is_reviewer = False + is_manager = False + + class Meta: + """Meta informations for the task template factory.""" + + model = models.CustomerAssignee + + +class ProjectAssigneeFactory(DjangoModelFactory): + """ProjectAssignee factory.""" + + user = SubFactory("timed.employment.factories.UserFactory") + project = SubFactory("timed.projects.factories.ProjectFactory") + is_resource = False + is_reviewer = False + is_manager = False + + class Meta: + """Meta informations for the task template factory.""" + + model = models.ProjectAssignee + + +class TaskAssigneeFactory(DjangoModelFactory): + """CustomerAssignee factory.""" + + user = SubFactory("timed.employment.factories.UserFactory") + task = SubFactory("timed.projects.factories.TaskFactory") + is_resource = False + is_reviewer = False + is_manager = False + + class Meta: + """Meta informations for the task template factory.""" + + model = models.TaskAssignee diff --git a/backend/timed/projects/filters.py b/backend/timed/projects/filters.py new file mode 100644 index 000000000..a26c29713 --- /dev/null +++ b/backend/timed/projects/filters.py @@ -0,0 +1,167 @@ +"""Filters for filtering the data of the projects app endpoints.""" +from datetime import date, timedelta + +from django.db.models import Count, Q +from django_filters.constants import EMPTY_VALUES +from django_filters.rest_framework import BaseInFilter, Filter, FilterSet, NumberFilter + +from timed.projects import models + + +class NumberInFilter(BaseInFilter, NumberFilter): + pass + + +class CustomerFilterSet(FilterSet): + """Filter set for the customers endpoint.""" + + archived = NumberFilter(field_name="archived") + + class Meta: + """Meta information for the customer filter set.""" + + model = models.Customer + fields = ["archived", "reference"] + + +class ProjectFilterSet(FilterSet): + """Filter set for the projects endpoint.""" + + archived = NumberFilter(field_name="archived") + has_manager = NumberFilter(method="filter_has_manager") + has_reviewer = NumberFilter(method="filter_has_reviewer") + customer = NumberInFilter(field_name="customer") + + def filter_has_manager(self, queryset, name, value): + if not value: # pragma: no cover + return queryset + return queryset.filter( + Q( + pk__in=models.ProjectAssignee.objects.filter( + is_manager=True, user_id=value + ).values("project_id"), + ) + | Q( + customer_id__in=models.CustomerAssignee.objects.filter( + is_manager=True, user_id=value + ).values("customer_id"), + ) + ) + + def filter_has_reviewer(self, queryset, name, value): + if not value: # pragma: no cover + return queryset + return queryset.filter( + Q( + pk__in=models.ProjectAssignee.objects.filter( + is_reviewer=True, user_id=value + ).values("project_id"), + ) + | Q( + customer_id__in=models.CustomerAssignee.objects.filter( + is_reviewer=True, user_id=value + ).values("customer_id"), + ) + ) + + class Meta: + """Meta information for the project filter set.""" + + model = models.Project + fields = ["archived", "customer", "billing_type", "cost_center", "reference"] + + +class MyMostFrequentTaskFilter(Filter): + """Filter most frequently used tasks. + + TODO: + From an api and framework standpoint instead of an additional filter it + would be more desirable to assign an ordering field frecency and to + limit by use paging. This is way harder to implement therefore on hold. + """ + + def filter(self, qs, value): + """Filter for given most frequently used tasks. + + Most frequently used tasks are only counted within last + few months as older tasks are not relevant anymore + for today's usage. + + :param QuerySet qs: The queryset to filter + :param int value: number of most frequent items + :return: The filtered queryset + :rtype: QuerySet + """ + if value in EMPTY_VALUES: + return qs + + user = self.parent.request.user + from_date = date.today() - timedelta(days=60) + + qs = qs.filter( + reports__user=user, + reports__date__gt=from_date, + archived=False, + project__archived=False, + ) + qs = qs.annotate(frequency=Count("reports")).order_by("-frequency") + # limit number of results to given value + qs = qs[: int(value)] + + return qs + + +class TaskFilterSet(FilterSet): + """Filter set for the tasks endpoint.""" + + my_most_frequent = MyMostFrequentTaskFilter() + archived = NumberFilter(field_name="archived") + project = NumberInFilter(field_name="project") + + class Meta: + """Meta information for the task filter set.""" + + model = models.Task + fields = ["archived", "project", "my_most_frequent", "reference", "cost_center"] + + +class TaskAssigneeFilterSet(FilterSet): + """Filter set for the task assignees endpoint.""" + + task = NumberFilter(field_name="task") + tasks = NumberInFilter(field_name="task") + user = NumberFilter(field_name="user") + + class Meta: + """Meta information for the task assignee filter set.""" + + model = models.TaskAssignee + fields = ["task", "user", "is_reviewer", "is_manager", "is_resource"] + + +class ProjectAssigneeFilterSet(FilterSet): + """Filter set for the project assignees endpoint.""" + + project = NumberFilter(field_name="project") + projects = NumberInFilter(field_name="project") + user = NumberFilter(field_name="user") + + class Meta: + """Meta information for the project assignee filter set.""" + + model = models.ProjectAssignee + fields = ["project", "user", "is_reviewer", "is_manager", "is_resource"] + + +class CustomerAssigneeFilterSet(FilterSet): + """Filter set for the customer assignees endpoint.""" + + customer = NumberFilter(field_name="customer") + customers = NumberInFilter(field_name="customer") + user = NumberFilter(field_name="user") + + class Meta: + """Meta information for the customer assignee filter set.""" + + model = models.CustomerAssignee + fields = ["customer", "user", "is_reviewer", "is_manager", "is_resource"] diff --git a/backend/timed/projects/migrations/0001_initial.py b/backend/timed/projects/migrations/0001_initial.py new file mode 100644 index 000000000..08453f60e --- /dev/null +++ b/backend/timed/projects/migrations/0001_initial.py @@ -0,0 +1,150 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.4 on 2017-08-17 09:16 +from __future__ import unicode_literals + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + initial = True + + dependencies = [migrations.swappable_dependency(settings.AUTH_USER_MODEL)] + + operations = [ + migrations.CreateModel( + name="BillingType", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("name", models.CharField(max_length=255, unique=True)), + ], + ), + migrations.CreateModel( + name="Customer", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("name", models.CharField(max_length=255)), + ("email", models.EmailField(blank=True, max_length=254)), + ("website", models.URLField(blank=True)), + ("comment", models.TextField(blank=True)), + ("archived", models.BooleanField(default=False)), + ], + ), + migrations.CreateModel( + name="Project", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("name", models.CharField(max_length=255)), + ("comment", models.TextField(blank=True)), + ("archived", models.BooleanField(default=False)), + ("estimated_hours", models.PositiveIntegerField(blank=True, null=True)), + ( + "billing_type", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="projects", + to="projects.BillingType", + ), + ), + ( + "customer", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="projects", + to="projects.Customer", + ), + ), + ( + "reviewers", + models.ManyToManyField( + related_name="reviews", to=settings.AUTH_USER_MODEL + ), + ), + ], + ), + migrations.CreateModel( + name="Task", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("name", models.CharField(max_length=255)), + ("estimated_hours", models.PositiveIntegerField(blank=True, null=True)), + ("archived", models.BooleanField(default=False)), + ( + "project", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="tasks", + to="projects.Project", + ), + ), + ], + ), + migrations.CreateModel( + name="TaskTemplate", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("name", models.CharField(max_length=255)), + ], + ), + migrations.AddIndex( + model_name="customer", + index=models.Index( + fields=["name", "archived"], name="projects_cu_name_e0e97a_idx" + ), + ), + migrations.AddIndex( + model_name="task", + index=models.Index( + fields=["name", "archived"], name="projects_ta_name_dd9620_idx" + ), + ), + migrations.AddIndex( + model_name="project", + index=models.Index( + fields=["name", "archived"], name="projects_pr_name_ac60a8_idx" + ), + ), + ] diff --git a/backend/timed/projects/migrations/0002_auto_20170823_1045.py b/backend/timed/projects/migrations/0002_auto_20170823_1045.py new file mode 100644 index 000000000..4e2852516 --- /dev/null +++ b/backend/timed/projects/migrations/0002_auto_20170823_1045.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.4 on 2017-08-23 08:45 +from __future__ import unicode_literals + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + dependencies = [("projects", "0001_initial")] + + operations = [ + migrations.AlterField( + model_name="project", + name="billing_type", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="projects", + to="projects.BillingType", + ), + ) + ] diff --git a/backend/timed/projects/migrations/0003_auto_20170831_1624.py b/backend/timed/projects/migrations/0003_auto_20170831_1624.py new file mode 100644 index 000000000..2f4f2d06f --- /dev/null +++ b/backend/timed/projects/migrations/0003_auto_20170831_1624.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.4 on 2017-08-31 14:24 +from __future__ import unicode_literals + +from django.db import migrations + + +class Migration(migrations.Migration): + dependencies = [("projects", "0002_auto_20170823_1045")] + + operations = [ + migrations.AlterModelOptions(name="customer", options={"ordering": ["name"]}), + migrations.AlterModelOptions(name="project", options={"ordering": ["name"]}), + migrations.AlterModelOptions(name="task", options={"ordering": ["name"]}), + migrations.AlterModelOptions( + name="tasktemplate", options={"ordering": ["name"]} + ), + ] diff --git a/backend/timed/projects/migrations/0004_auto_20170906_1045.py b/backend/timed/projects/migrations/0004_auto_20170906_1045.py new file mode 100644 index 000000000..7d65e2dc2 --- /dev/null +++ b/backend/timed/projects/migrations/0004_auto_20170906_1045.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.4 on 2017-09-06 08:45 +from __future__ import unicode_literals + +from datetime import timedelta + +from django.db import migrations, models + + +def migrate_estimated_hours(apps, schema_editor): + Project = apps.get_model("projects", "Project") + projects = Project.objects.filter(estimated_hours__isnull=False) + for project in projects: + project.estimated_time = timedelta(hours=project.estimated_hours) + project.save() + + Task = apps.get_model("projects", "Task") + tasks = Task.objects.filter(estimated_hours__isnull=False) + for task in tasks: + task.estimated_time = timedelta(hours=task.estimated_hours) + task.save() + + +class Migration(migrations.Migration): + dependencies = [("projects", "0003_auto_20170831_1624")] + + operations = [ + migrations.AddField( + model_name="project", + name="estimated_time", + field=models.DurationField(blank=True, null=True), + ), + migrations.AddField( + model_name="task", + name="estimated_time", + field=models.DurationField(blank=True, null=True), + ), + migrations.RunPython(migrate_estimated_hours), + migrations.RemoveField(model_name="project", name="estimated_hours"), + migrations.RemoveField(model_name="task", name="estimated_hours"), + ] diff --git a/backend/timed/projects/migrations/0005_auto_20170907_0938.py b/backend/timed/projects/migrations/0005_auto_20170907_0938.py new file mode 100644 index 000000000..6e0543f2f --- /dev/null +++ b/backend/timed/projects/migrations/0005_auto_20170907_0938.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.4 on 2017-09-07 07:38 +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [("projects", "0004_auto_20170906_1045")] + + operations = [ + migrations.AlterModelOptions( + name="billingtype", options={"ordering": ["name"]} + ), + migrations.RemoveIndex( + model_name="customer", name="projects_cu_name_e0e97a_idx" + ), + migrations.RemoveIndex(model_name="task", name="projects_ta_name_dd9620_idx"), + migrations.RemoveIndex( + model_name="project", name="projects_pr_name_ac60a8_idx" + ), + migrations.AddField( + model_name="billingtype", + name="reference", + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.AddField( + model_name="customer", + name="reference", + field=models.CharField( + blank=True, db_index=True, max_length=255, null=True + ), + ), + migrations.AddField( + model_name="project", + name="reference", + field=models.CharField( + blank=True, db_index=True, max_length=255, null=True + ), + ), + migrations.AddField( + model_name="task", + name="reference", + field=models.CharField( + blank=True, db_index=True, max_length=255, null=True + ), + ), + migrations.AlterField( + model_name="customer", + name="name", + field=models.CharField(max_length=255, unique=True), + ), + migrations.AlterField( + model_name="project", + name="name", + field=models.CharField(db_index=True, max_length=255), + ), + ] diff --git a/backend/timed/projects/migrations/0006_auto_20171010_1423.py b/backend/timed/projects/migrations/0006_auto_20171010_1423.py new file mode 100644 index 000000000..be0f3e84e --- /dev/null +++ b/backend/timed/projects/migrations/0006_auto_20171010_1423.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.6 on 2017-10-10 12:23 +from __future__ import unicode_literals + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + dependencies = [("projects", "0005_auto_20170907_0938")] + + operations = [ + migrations.CreateModel( + name="CostCenter", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("name", models.CharField(max_length=255, unique=True)), + ("reference", models.CharField(blank=True, max_length=255, null=True)), + ], + options={"ordering": ["name"]}, + ), + migrations.AddField( + model_name="project", + name="cost_center", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="projects", + to="projects.CostCenter", + ), + ), + migrations.AddField( + model_name="task", + name="cost_center", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="tasks", + to="projects.CostCenter", + ), + ), + ] diff --git a/backend/timed/projects/migrations/0007_project_subscription_project.py b/backend/timed/projects/migrations/0007_project_subscription_project.py new file mode 100644 index 000000000..5d1ccdc64 --- /dev/null +++ b/backend/timed/projects/migrations/0007_project_subscription_project.py @@ -0,0 +1,31 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.13 on 2018-06-29 13:20 +from __future__ import unicode_literals +from django.db.models import Count + +from django.db import migrations, models + + +def migrate_projects(apps, schema_editor): + """Set subsctition_project on Projects with orders.""" + Project = apps.get_model("projects", "Project") + visible_projects = Project.objects.annotate(count_orders=Count("orders")).filter( + archived=False, count_orders__gt=0 + ) + visible_projects.update(customer_visible=True) + + +class Migration(migrations.Migration): + dependencies = [ + ("projects", "0006_auto_20171010_1423"), + ("subscription", "0003_auto_20170907_1151"), + ] + + operations = [ + migrations.AddField( + model_name="project", + name="customer_visible", + field=models.BooleanField(default=False), + ), + migrations.RunPython(migrate_projects), + ] diff --git a/backend/timed/projects/migrations/0008_auto_20190220_1133.py b/backend/timed/projects/migrations/0008_auto_20190220_1133.py new file mode 100644 index 000000000..1017bccb6 --- /dev/null +++ b/backend/timed/projects/migrations/0008_auto_20190220_1133.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.20 on 2019-02-20 10:33 +from __future__ import unicode_literals + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + dependencies = [("projects", "0007_project_subscription_project")] + + operations = [ + migrations.AlterField( + model_name="project", + name="billing_type", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.PROTECT, + related_name="projects", + to="projects.BillingType", + ), + ), + migrations.AlterField( + model_name="project", + name="cost_center", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.PROTECT, + related_name="projects", + to="projects.CostCenter", + ), + ), + ] diff --git a/backend/timed/projects/migrations/0009_auto_20201201_1412.py b/backend/timed/projects/migrations/0009_auto_20201201_1412.py new file mode 100644 index 000000000..522e00f38 --- /dev/null +++ b/backend/timed/projects/migrations/0009_auto_20201201_1412.py @@ -0,0 +1,869 @@ +# Generated by Django 2.2.15 on 2020-12-01 13:12 + +from django.db import migrations +import djmoney.models.fields + + +class Migration(migrations.Migration): + dependencies = [ + ("projects", "0008_auto_20190220_1133"), + ] + + operations = [ + migrations.AddField( + model_name="project", + name="amount_invoiced", + field=djmoney.models.fields.MoneyField( + blank=True, + decimal_places=2, + default_currency="CHF", + max_digits=10, + null=True, + ), + ), + migrations.AddField( + model_name="project", + name="amount_invoiced_currency", + field=djmoney.models.fields.CurrencyField( + choices=[ + ("XUA", "ADB Unit of Account"), + ("AFN", "Afghani"), + ("DZD", "Algerian Dinar"), + ("ARS", "Argentine Peso"), + ("AMD", "Armenian Dram"), + ("AWG", "Aruban Guilder"), + ("AUD", "Australian Dollar"), + ("AZN", "Azerbaijanian Manat"), + ("BSD", "Bahamian Dollar"), + ("BHD", "Bahraini Dinar"), + ("THB", "Baht"), + ("PAB", "Balboa"), + ("BBD", "Barbados Dollar"), + ("BYN", "Belarussian Ruble"), + ("BYR", "Belarussian Ruble"), + ("BZD", "Belize Dollar"), + ("BMD", "Bermudian Dollar (customarily known as Bermuda Dollar)"), + ("BTN", "Bhutanese ngultrum"), + ("VEF", "Bolivar Fuerte"), + ("BOB", "Boliviano"), + ("XBA", "Bond Markets Units European Composite Unit (EURCO)"), + ("BRL", "Brazilian Real"), + ("BND", "Brunei Dollar"), + ("BGN", "Bulgarian Lev"), + ("BIF", "Burundi Franc"), + ("XOF", "CFA Franc BCEAO"), + ("XAF", "CFA franc BEAC"), + ("XPF", "CFP Franc"), + ("CAD", "Canadian Dollar"), + ("CVE", "Cape Verde Escudo"), + ("KYD", "Cayman Islands Dollar"), + ("CLP", "Chilean peso"), + ("XTS", "Codes specifically reserved for testing purposes"), + ("COP", "Colombian peso"), + ("KMF", "Comoro Franc"), + ("CDF", "Congolese franc"), + ("BAM", "Convertible Marks"), + ("NIO", "Cordoba Oro"), + ("CRC", "Costa Rican Colon"), + ("HRK", "Croatian Kuna"), + ("CUP", "Cuban Peso"), + ("CUC", "Cuban convertible peso"), + ("CZK", "Czech Koruna"), + ("GMD", "Dalasi"), + ("DKK", "Danish Krone"), + ("MKD", "Denar"), + ("DJF", "Djibouti Franc"), + ("STD", "Dobra"), + ("DOP", "Dominican Peso"), + ("VND", "Dong"), + ("XCD", "East Caribbean Dollar"), + ("EGP", "Egyptian Pound"), + ("SVC", "El Salvador Colon"), + ("ETB", "Ethiopian Birr"), + ("EUR", "Euro"), + ("XBB", "European Monetary Unit (E.M.U.-6)"), + ("XBD", "European Unit of Account 17(E.U.A.-17)"), + ("XBC", "European Unit of Account 9(E.U.A.-9)"), + ("FKP", "Falkland Islands Pound"), + ("FJD", "Fiji Dollar"), + ("HUF", "Forint"), + ("GHS", "Ghana Cedi"), + ("GIP", "Gibraltar Pound"), + ("XAU", "Gold"), + ("XFO", "Gold-Franc"), + ("PYG", "Guarani"), + ("GNF", "Guinea Franc"), + ("GYD", "Guyana Dollar"), + ("HTG", "Haitian gourde"), + ("HKD", "Hong Kong Dollar"), + ("UAH", "Hryvnia"), + ("ISK", "Iceland Krona"), + ("INR", "Indian Rupee"), + ("IRR", "Iranian Rial"), + ("IQD", "Iraqi Dinar"), + ("IMP", "Isle of Man Pound"), + ("JMD", "Jamaican Dollar"), + ("JOD", "Jordanian Dinar"), + ("KES", "Kenyan Shilling"), + ("PGK", "Kina"), + ("LAK", "Kip"), + ("KWD", "Kuwaiti Dinar"), + ("AOA", "Kwanza"), + ("MMK", "Kyat"), + ("GEL", "Lari"), + ("LVL", "Latvian Lats"), + ("LBP", "Lebanese Pound"), + ("ALL", "Lek"), + ("HNL", "Lempira"), + ("SLL", "Leone"), + ("LSL", "Lesotho loti"), + ("LRD", "Liberian Dollar"), + ("LYD", "Libyan Dinar"), + ("SZL", "Lilangeni"), + ("LTL", "Lithuanian Litas"), + ("MGA", "Malagasy Ariary"), + ("MWK", "Malawian Kwacha"), + ("MYR", "Malaysian Ringgit"), + ("TMM", "Manat"), + ("MUR", "Mauritius Rupee"), + ("MZN", "Metical"), + ("MXV", "Mexican Unidad de Inversion (UDI)"), + ("MXN", "Mexican peso"), + ("MDL", "Moldovan Leu"), + ("MAD", "Moroccan Dirham"), + ("BOV", "Mvdol"), + ("NGN", "Naira"), + ("ERN", "Nakfa"), + ("NAD", "Namibian Dollar"), + ("NPR", "Nepalese Rupee"), + ("ANG", "Netherlands Antillian Guilder"), + ("ILS", "New Israeli Sheqel"), + ("RON", "New Leu"), + ("TWD", "New Taiwan Dollar"), + ("NZD", "New Zealand Dollar"), + ("KPW", "North Korean Won"), + ("NOK", "Norwegian Krone"), + ("PEN", "Nuevo Sol"), + ("MRO", "Ouguiya"), + ("TOP", "Paanga"), + ("PKR", "Pakistan Rupee"), + ("XPD", "Palladium"), + ("MOP", "Pataca"), + ("PHP", "Philippine Peso"), + ("XPT", "Platinum"), + ("GBP", "Pound Sterling"), + ("BWP", "Pula"), + ("QAR", "Qatari Rial"), + ("GTQ", "Quetzal"), + ("ZAR", "Rand"), + ("OMR", "Rial Omani"), + ("KHR", "Riel"), + ("MVR", "Rufiyaa"), + ("IDR", "Rupiah"), + ("RUB", "Russian Ruble"), + ("RWF", "Rwanda Franc"), + ("XDR", "SDR"), + ("SHP", "Saint Helena Pound"), + ("SAR", "Saudi Riyal"), + ("RSD", "Serbian Dinar"), + ("SCR", "Seychelles Rupee"), + ("XAG", "Silver"), + ("SGD", "Singapore Dollar"), + ("SBD", "Solomon Islands Dollar"), + ("KGS", "Som"), + ("SOS", "Somali Shilling"), + ("TJS", "Somoni"), + ("SSP", "South Sudanese Pound"), + ("LKR", "Sri Lanka Rupee"), + ("XSU", "Sucre"), + ("SDG", "Sudanese Pound"), + ("SRD", "Surinam Dollar"), + ("SEK", "Swedish Krona"), + ("CHF", "Swiss Franc"), + ("SYP", "Syrian Pound"), + ("BDT", "Taka"), + ("WST", "Tala"), + ("TZS", "Tanzanian Shilling"), + ("KZT", "Tenge"), + ( + "XXX", + "The codes assigned for transactions where no currency is involved", + ), + ("TTD", "Trinidad and Tobago Dollar"), + ("MNT", "Tugrik"), + ("TND", "Tunisian Dinar"), + ("TRY", "Turkish Lira"), + ("TMT", "Turkmenistan New Manat"), + ("TVD", "Tuvalu dollar"), + ("AED", "UAE Dirham"), + ("XFU", "UIC-Franc"), + ("USD", "US Dollar"), + ("USN", "US Dollar (Next day)"), + ("UGX", "Uganda Shilling"), + ("CLF", "Unidad de Fomento"), + ("COU", "Unidad de Valor Real"), + ("UYI", "Uruguay Peso en Unidades Indexadas (URUIURUI)"), + ("UYU", "Uruguayan peso"), + ("UZS", "Uzbekistan Sum"), + ("VUV", "Vatu"), + ("CHE", "WIR Euro"), + ("CHW", "WIR Franc"), + ("KRW", "Won"), + ("YER", "Yemeni Rial"), + ("JPY", "Yen"), + ("CNY", "Yuan Renminbi"), + ("ZMK", "Zambian Kwacha"), + ("ZMW", "Zambian Kwacha"), + ("ZWD", "Zimbabwe Dollar A/06"), + ("ZWN", "Zimbabwe dollar A/08"), + ("ZWL", "Zimbabwe dollar A/09"), + ("PLN", "Zloty"), + ], + default="CHF", + editable=False, + max_length=3, + ), + ), + migrations.AddField( + model_name="project", + name="amount_offered", + field=djmoney.models.fields.MoneyField( + blank=True, + decimal_places=2, + default_currency="CHF", + max_digits=10, + null=True, + ), + ), + migrations.AddField( + model_name="project", + name="amount_offered_currency", + field=djmoney.models.fields.CurrencyField( + choices=[ + ("XUA", "ADB Unit of Account"), + ("AFN", "Afghani"), + ("DZD", "Algerian Dinar"), + ("ARS", "Argentine Peso"), + ("AMD", "Armenian Dram"), + ("AWG", "Aruban Guilder"), + ("AUD", "Australian Dollar"), + ("AZN", "Azerbaijanian Manat"), + ("BSD", "Bahamian Dollar"), + ("BHD", "Bahraini Dinar"), + ("THB", "Baht"), + ("PAB", "Balboa"), + ("BBD", "Barbados Dollar"), + ("BYN", "Belarussian Ruble"), + ("BYR", "Belarussian Ruble"), + ("BZD", "Belize Dollar"), + ("BMD", "Bermudian Dollar (customarily known as Bermuda Dollar)"), + ("BTN", "Bhutanese ngultrum"), + ("VEF", "Bolivar Fuerte"), + ("BOB", "Boliviano"), + ("XBA", "Bond Markets Units European Composite Unit (EURCO)"), + ("BRL", "Brazilian Real"), + ("BND", "Brunei Dollar"), + ("BGN", "Bulgarian Lev"), + ("BIF", "Burundi Franc"), + ("XOF", "CFA Franc BCEAO"), + ("XAF", "CFA franc BEAC"), + ("XPF", "CFP Franc"), + ("CAD", "Canadian Dollar"), + ("CVE", "Cape Verde Escudo"), + ("KYD", "Cayman Islands Dollar"), + ("CLP", "Chilean peso"), + ("XTS", "Codes specifically reserved for testing purposes"), + ("COP", "Colombian peso"), + ("KMF", "Comoro Franc"), + ("CDF", "Congolese franc"), + ("BAM", "Convertible Marks"), + ("NIO", "Cordoba Oro"), + ("CRC", "Costa Rican Colon"), + ("HRK", "Croatian Kuna"), + ("CUP", "Cuban Peso"), + ("CUC", "Cuban convertible peso"), + ("CZK", "Czech Koruna"), + ("GMD", "Dalasi"), + ("DKK", "Danish Krone"), + ("MKD", "Denar"), + ("DJF", "Djibouti Franc"), + ("STD", "Dobra"), + ("DOP", "Dominican Peso"), + ("VND", "Dong"), + ("XCD", "East Caribbean Dollar"), + ("EGP", "Egyptian Pound"), + ("SVC", "El Salvador Colon"), + ("ETB", "Ethiopian Birr"), + ("EUR", "Euro"), + ("XBB", "European Monetary Unit (E.M.U.-6)"), + ("XBD", "European Unit of Account 17(E.U.A.-17)"), + ("XBC", "European Unit of Account 9(E.U.A.-9)"), + ("FKP", "Falkland Islands Pound"), + ("FJD", "Fiji Dollar"), + ("HUF", "Forint"), + ("GHS", "Ghana Cedi"), + ("GIP", "Gibraltar Pound"), + ("XAU", "Gold"), + ("XFO", "Gold-Franc"), + ("PYG", "Guarani"), + ("GNF", "Guinea Franc"), + ("GYD", "Guyana Dollar"), + ("HTG", "Haitian gourde"), + ("HKD", "Hong Kong Dollar"), + ("UAH", "Hryvnia"), + ("ISK", "Iceland Krona"), + ("INR", "Indian Rupee"), + ("IRR", "Iranian Rial"), + ("IQD", "Iraqi Dinar"), + ("IMP", "Isle of Man Pound"), + ("JMD", "Jamaican Dollar"), + ("JOD", "Jordanian Dinar"), + ("KES", "Kenyan Shilling"), + ("PGK", "Kina"), + ("LAK", "Kip"), + ("KWD", "Kuwaiti Dinar"), + ("AOA", "Kwanza"), + ("MMK", "Kyat"), + ("GEL", "Lari"), + ("LVL", "Latvian Lats"), + ("LBP", "Lebanese Pound"), + ("ALL", "Lek"), + ("HNL", "Lempira"), + ("SLL", "Leone"), + ("LSL", "Lesotho loti"), + ("LRD", "Liberian Dollar"), + ("LYD", "Libyan Dinar"), + ("SZL", "Lilangeni"), + ("LTL", "Lithuanian Litas"), + ("MGA", "Malagasy Ariary"), + ("MWK", "Malawian Kwacha"), + ("MYR", "Malaysian Ringgit"), + ("TMM", "Manat"), + ("MUR", "Mauritius Rupee"), + ("MZN", "Metical"), + ("MXV", "Mexican Unidad de Inversion (UDI)"), + ("MXN", "Mexican peso"), + ("MDL", "Moldovan Leu"), + ("MAD", "Moroccan Dirham"), + ("BOV", "Mvdol"), + ("NGN", "Naira"), + ("ERN", "Nakfa"), + ("NAD", "Namibian Dollar"), + ("NPR", "Nepalese Rupee"), + ("ANG", "Netherlands Antillian Guilder"), + ("ILS", "New Israeli Sheqel"), + ("RON", "New Leu"), + ("TWD", "New Taiwan Dollar"), + ("NZD", "New Zealand Dollar"), + ("KPW", "North Korean Won"), + ("NOK", "Norwegian Krone"), + ("PEN", "Nuevo Sol"), + ("MRO", "Ouguiya"), + ("TOP", "Paanga"), + ("PKR", "Pakistan Rupee"), + ("XPD", "Palladium"), + ("MOP", "Pataca"), + ("PHP", "Philippine Peso"), + ("XPT", "Platinum"), + ("GBP", "Pound Sterling"), + ("BWP", "Pula"), + ("QAR", "Qatari Rial"), + ("GTQ", "Quetzal"), + ("ZAR", "Rand"), + ("OMR", "Rial Omani"), + ("KHR", "Riel"), + ("MVR", "Rufiyaa"), + ("IDR", "Rupiah"), + ("RUB", "Russian Ruble"), + ("RWF", "Rwanda Franc"), + ("XDR", "SDR"), + ("SHP", "Saint Helena Pound"), + ("SAR", "Saudi Riyal"), + ("RSD", "Serbian Dinar"), + ("SCR", "Seychelles Rupee"), + ("XAG", "Silver"), + ("SGD", "Singapore Dollar"), + ("SBD", "Solomon Islands Dollar"), + ("KGS", "Som"), + ("SOS", "Somali Shilling"), + ("TJS", "Somoni"), + ("SSP", "South Sudanese Pound"), + ("LKR", "Sri Lanka Rupee"), + ("XSU", "Sucre"), + ("SDG", "Sudanese Pound"), + ("SRD", "Surinam Dollar"), + ("SEK", "Swedish Krona"), + ("CHF", "Swiss Franc"), + ("SYP", "Syrian Pound"), + ("BDT", "Taka"), + ("WST", "Tala"), + ("TZS", "Tanzanian Shilling"), + ("KZT", "Tenge"), + ( + "XXX", + "The codes assigned for transactions where no currency is involved", + ), + ("TTD", "Trinidad and Tobago Dollar"), + ("MNT", "Tugrik"), + ("TND", "Tunisian Dinar"), + ("TRY", "Turkish Lira"), + ("TMT", "Turkmenistan New Manat"), + ("TVD", "Tuvalu dollar"), + ("AED", "UAE Dirham"), + ("XFU", "UIC-Franc"), + ("USD", "US Dollar"), + ("USN", "US Dollar (Next day)"), + ("UGX", "Uganda Shilling"), + ("CLF", "Unidad de Fomento"), + ("COU", "Unidad de Valor Real"), + ("UYI", "Uruguay Peso en Unidades Indexadas (URUIURUI)"), + ("UYU", "Uruguayan peso"), + ("UZS", "Uzbekistan Sum"), + ("VUV", "Vatu"), + ("CHE", "WIR Euro"), + ("CHW", "WIR Franc"), + ("KRW", "Won"), + ("YER", "Yemeni Rial"), + ("JPY", "Yen"), + ("CNY", "Yuan Renminbi"), + ("ZMK", "Zambian Kwacha"), + ("ZMW", "Zambian Kwacha"), + ("ZWD", "Zimbabwe Dollar A/06"), + ("ZWN", "Zimbabwe dollar A/08"), + ("ZWL", "Zimbabwe dollar A/09"), + ("PLN", "Zloty"), + ], + default="CHF", + editable=False, + max_length=3, + ), + ), + migrations.AddField( + model_name="task", + name="amount_invoiced", + field=djmoney.models.fields.MoneyField( + blank=True, + decimal_places=2, + default_currency="CHF", + max_digits=10, + null=True, + ), + ), + migrations.AddField( + model_name="task", + name="amount_invoiced_currency", + field=djmoney.models.fields.CurrencyField( + choices=[ + ("XUA", "ADB Unit of Account"), + ("AFN", "Afghani"), + ("DZD", "Algerian Dinar"), + ("ARS", "Argentine Peso"), + ("AMD", "Armenian Dram"), + ("AWG", "Aruban Guilder"), + ("AUD", "Australian Dollar"), + ("AZN", "Azerbaijanian Manat"), + ("BSD", "Bahamian Dollar"), + ("BHD", "Bahraini Dinar"), + ("THB", "Baht"), + ("PAB", "Balboa"), + ("BBD", "Barbados Dollar"), + ("BYN", "Belarussian Ruble"), + ("BYR", "Belarussian Ruble"), + ("BZD", "Belize Dollar"), + ("BMD", "Bermudian Dollar (customarily known as Bermuda Dollar)"), + ("BTN", "Bhutanese ngultrum"), + ("VEF", "Bolivar Fuerte"), + ("BOB", "Boliviano"), + ("XBA", "Bond Markets Units European Composite Unit (EURCO)"), + ("BRL", "Brazilian Real"), + ("BND", "Brunei Dollar"), + ("BGN", "Bulgarian Lev"), + ("BIF", "Burundi Franc"), + ("XOF", "CFA Franc BCEAO"), + ("XAF", "CFA franc BEAC"), + ("XPF", "CFP Franc"), + ("CAD", "Canadian Dollar"), + ("CVE", "Cape Verde Escudo"), + ("KYD", "Cayman Islands Dollar"), + ("CLP", "Chilean peso"), + ("XTS", "Codes specifically reserved for testing purposes"), + ("COP", "Colombian peso"), + ("KMF", "Comoro Franc"), + ("CDF", "Congolese franc"), + ("BAM", "Convertible Marks"), + ("NIO", "Cordoba Oro"), + ("CRC", "Costa Rican Colon"), + ("HRK", "Croatian Kuna"), + ("CUP", "Cuban Peso"), + ("CUC", "Cuban convertible peso"), + ("CZK", "Czech Koruna"), + ("GMD", "Dalasi"), + ("DKK", "Danish Krone"), + ("MKD", "Denar"), + ("DJF", "Djibouti Franc"), + ("STD", "Dobra"), + ("DOP", "Dominican Peso"), + ("VND", "Dong"), + ("XCD", "East Caribbean Dollar"), + ("EGP", "Egyptian Pound"), + ("SVC", "El Salvador Colon"), + ("ETB", "Ethiopian Birr"), + ("EUR", "Euro"), + ("XBB", "European Monetary Unit (E.M.U.-6)"), + ("XBD", "European Unit of Account 17(E.U.A.-17)"), + ("XBC", "European Unit of Account 9(E.U.A.-9)"), + ("FKP", "Falkland Islands Pound"), + ("FJD", "Fiji Dollar"), + ("HUF", "Forint"), + ("GHS", "Ghana Cedi"), + ("GIP", "Gibraltar Pound"), + ("XAU", "Gold"), + ("XFO", "Gold-Franc"), + ("PYG", "Guarani"), + ("GNF", "Guinea Franc"), + ("GYD", "Guyana Dollar"), + ("HTG", "Haitian gourde"), + ("HKD", "Hong Kong Dollar"), + ("UAH", "Hryvnia"), + ("ISK", "Iceland Krona"), + ("INR", "Indian Rupee"), + ("IRR", "Iranian Rial"), + ("IQD", "Iraqi Dinar"), + ("IMP", "Isle of Man Pound"), + ("JMD", "Jamaican Dollar"), + ("JOD", "Jordanian Dinar"), + ("KES", "Kenyan Shilling"), + ("PGK", "Kina"), + ("LAK", "Kip"), + ("KWD", "Kuwaiti Dinar"), + ("AOA", "Kwanza"), + ("MMK", "Kyat"), + ("GEL", "Lari"), + ("LVL", "Latvian Lats"), + ("LBP", "Lebanese Pound"), + ("ALL", "Lek"), + ("HNL", "Lempira"), + ("SLL", "Leone"), + ("LSL", "Lesotho loti"), + ("LRD", "Liberian Dollar"), + ("LYD", "Libyan Dinar"), + ("SZL", "Lilangeni"), + ("LTL", "Lithuanian Litas"), + ("MGA", "Malagasy Ariary"), + ("MWK", "Malawian Kwacha"), + ("MYR", "Malaysian Ringgit"), + ("TMM", "Manat"), + ("MUR", "Mauritius Rupee"), + ("MZN", "Metical"), + ("MXV", "Mexican Unidad de Inversion (UDI)"), + ("MXN", "Mexican peso"), + ("MDL", "Moldovan Leu"), + ("MAD", "Moroccan Dirham"), + ("BOV", "Mvdol"), + ("NGN", "Naira"), + ("ERN", "Nakfa"), + ("NAD", "Namibian Dollar"), + ("NPR", "Nepalese Rupee"), + ("ANG", "Netherlands Antillian Guilder"), + ("ILS", "New Israeli Sheqel"), + ("RON", "New Leu"), + ("TWD", "New Taiwan Dollar"), + ("NZD", "New Zealand Dollar"), + ("KPW", "North Korean Won"), + ("NOK", "Norwegian Krone"), + ("PEN", "Nuevo Sol"), + ("MRO", "Ouguiya"), + ("TOP", "Paanga"), + ("PKR", "Pakistan Rupee"), + ("XPD", "Palladium"), + ("MOP", "Pataca"), + ("PHP", "Philippine Peso"), + ("XPT", "Platinum"), + ("GBP", "Pound Sterling"), + ("BWP", "Pula"), + ("QAR", "Qatari Rial"), + ("GTQ", "Quetzal"), + ("ZAR", "Rand"), + ("OMR", "Rial Omani"), + ("KHR", "Riel"), + ("MVR", "Rufiyaa"), + ("IDR", "Rupiah"), + ("RUB", "Russian Ruble"), + ("RWF", "Rwanda Franc"), + ("XDR", "SDR"), + ("SHP", "Saint Helena Pound"), + ("SAR", "Saudi Riyal"), + ("RSD", "Serbian Dinar"), + ("SCR", "Seychelles Rupee"), + ("XAG", "Silver"), + ("SGD", "Singapore Dollar"), + ("SBD", "Solomon Islands Dollar"), + ("KGS", "Som"), + ("SOS", "Somali Shilling"), + ("TJS", "Somoni"), + ("SSP", "South Sudanese Pound"), + ("LKR", "Sri Lanka Rupee"), + ("XSU", "Sucre"), + ("SDG", "Sudanese Pound"), + ("SRD", "Surinam Dollar"), + ("SEK", "Swedish Krona"), + ("CHF", "Swiss Franc"), + ("SYP", "Syrian Pound"), + ("BDT", "Taka"), + ("WST", "Tala"), + ("TZS", "Tanzanian Shilling"), + ("KZT", "Tenge"), + ( + "XXX", + "The codes assigned for transactions where no currency is involved", + ), + ("TTD", "Trinidad and Tobago Dollar"), + ("MNT", "Tugrik"), + ("TND", "Tunisian Dinar"), + ("TRY", "Turkish Lira"), + ("TMT", "Turkmenistan New Manat"), + ("TVD", "Tuvalu dollar"), + ("AED", "UAE Dirham"), + ("XFU", "UIC-Franc"), + ("USD", "US Dollar"), + ("USN", "US Dollar (Next day)"), + ("UGX", "Uganda Shilling"), + ("CLF", "Unidad de Fomento"), + ("COU", "Unidad de Valor Real"), + ("UYI", "Uruguay Peso en Unidades Indexadas (URUIURUI)"), + ("UYU", "Uruguayan peso"), + ("UZS", "Uzbekistan Sum"), + ("VUV", "Vatu"), + ("CHE", "WIR Euro"), + ("CHW", "WIR Franc"), + ("KRW", "Won"), + ("YER", "Yemeni Rial"), + ("JPY", "Yen"), + ("CNY", "Yuan Renminbi"), + ("ZMK", "Zambian Kwacha"), + ("ZMW", "Zambian Kwacha"), + ("ZWD", "Zimbabwe Dollar A/06"), + ("ZWN", "Zimbabwe dollar A/08"), + ("ZWL", "Zimbabwe dollar A/09"), + ("PLN", "Zloty"), + ], + default="CHF", + editable=False, + max_length=3, + ), + ), + migrations.AddField( + model_name="task", + name="amount_offered", + field=djmoney.models.fields.MoneyField( + blank=True, + decimal_places=2, + default_currency="CHF", + max_digits=10, + null=True, + ), + ), + migrations.AddField( + model_name="task", + name="amount_offered_currency", + field=djmoney.models.fields.CurrencyField( + choices=[ + ("XUA", "ADB Unit of Account"), + ("AFN", "Afghani"), + ("DZD", "Algerian Dinar"), + ("ARS", "Argentine Peso"), + ("AMD", "Armenian Dram"), + ("AWG", "Aruban Guilder"), + ("AUD", "Australian Dollar"), + ("AZN", "Azerbaijanian Manat"), + ("BSD", "Bahamian Dollar"), + ("BHD", "Bahraini Dinar"), + ("THB", "Baht"), + ("PAB", "Balboa"), + ("BBD", "Barbados Dollar"), + ("BYN", "Belarussian Ruble"), + ("BYR", "Belarussian Ruble"), + ("BZD", "Belize Dollar"), + ("BMD", "Bermudian Dollar (customarily known as Bermuda Dollar)"), + ("BTN", "Bhutanese ngultrum"), + ("VEF", "Bolivar Fuerte"), + ("BOB", "Boliviano"), + ("XBA", "Bond Markets Units European Composite Unit (EURCO)"), + ("BRL", "Brazilian Real"), + ("BND", "Brunei Dollar"), + ("BGN", "Bulgarian Lev"), + ("BIF", "Burundi Franc"), + ("XOF", "CFA Franc BCEAO"), + ("XAF", "CFA franc BEAC"), + ("XPF", "CFP Franc"), + ("CAD", "Canadian Dollar"), + ("CVE", "Cape Verde Escudo"), + ("KYD", "Cayman Islands Dollar"), + ("CLP", "Chilean peso"), + ("XTS", "Codes specifically reserved for testing purposes"), + ("COP", "Colombian peso"), + ("KMF", "Comoro Franc"), + ("CDF", "Congolese franc"), + ("BAM", "Convertible Marks"), + ("NIO", "Cordoba Oro"), + ("CRC", "Costa Rican Colon"), + ("HRK", "Croatian Kuna"), + ("CUP", "Cuban Peso"), + ("CUC", "Cuban convertible peso"), + ("CZK", "Czech Koruna"), + ("GMD", "Dalasi"), + ("DKK", "Danish Krone"), + ("MKD", "Denar"), + ("DJF", "Djibouti Franc"), + ("STD", "Dobra"), + ("DOP", "Dominican Peso"), + ("VND", "Dong"), + ("XCD", "East Caribbean Dollar"), + ("EGP", "Egyptian Pound"), + ("SVC", "El Salvador Colon"), + ("ETB", "Ethiopian Birr"), + ("EUR", "Euro"), + ("XBB", "European Monetary Unit (E.M.U.-6)"), + ("XBD", "European Unit of Account 17(E.U.A.-17)"), + ("XBC", "European Unit of Account 9(E.U.A.-9)"), + ("FKP", "Falkland Islands Pound"), + ("FJD", "Fiji Dollar"), + ("HUF", "Forint"), + ("GHS", "Ghana Cedi"), + ("GIP", "Gibraltar Pound"), + ("XAU", "Gold"), + ("XFO", "Gold-Franc"), + ("PYG", "Guarani"), + ("GNF", "Guinea Franc"), + ("GYD", "Guyana Dollar"), + ("HTG", "Haitian gourde"), + ("HKD", "Hong Kong Dollar"), + ("UAH", "Hryvnia"), + ("ISK", "Iceland Krona"), + ("INR", "Indian Rupee"), + ("IRR", "Iranian Rial"), + ("IQD", "Iraqi Dinar"), + ("IMP", "Isle of Man Pound"), + ("JMD", "Jamaican Dollar"), + ("JOD", "Jordanian Dinar"), + ("KES", "Kenyan Shilling"), + ("PGK", "Kina"), + ("LAK", "Kip"), + ("KWD", "Kuwaiti Dinar"), + ("AOA", "Kwanza"), + ("MMK", "Kyat"), + ("GEL", "Lari"), + ("LVL", "Latvian Lats"), + ("LBP", "Lebanese Pound"), + ("ALL", "Lek"), + ("HNL", "Lempira"), + ("SLL", "Leone"), + ("LSL", "Lesotho loti"), + ("LRD", "Liberian Dollar"), + ("LYD", "Libyan Dinar"), + ("SZL", "Lilangeni"), + ("LTL", "Lithuanian Litas"), + ("MGA", "Malagasy Ariary"), + ("MWK", "Malawian Kwacha"), + ("MYR", "Malaysian Ringgit"), + ("TMM", "Manat"), + ("MUR", "Mauritius Rupee"), + ("MZN", "Metical"), + ("MXV", "Mexican Unidad de Inversion (UDI)"), + ("MXN", "Mexican peso"), + ("MDL", "Moldovan Leu"), + ("MAD", "Moroccan Dirham"), + ("BOV", "Mvdol"), + ("NGN", "Naira"), + ("ERN", "Nakfa"), + ("NAD", "Namibian Dollar"), + ("NPR", "Nepalese Rupee"), + ("ANG", "Netherlands Antillian Guilder"), + ("ILS", "New Israeli Sheqel"), + ("RON", "New Leu"), + ("TWD", "New Taiwan Dollar"), + ("NZD", "New Zealand Dollar"), + ("KPW", "North Korean Won"), + ("NOK", "Norwegian Krone"), + ("PEN", "Nuevo Sol"), + ("MRO", "Ouguiya"), + ("TOP", "Paanga"), + ("PKR", "Pakistan Rupee"), + ("XPD", "Palladium"), + ("MOP", "Pataca"), + ("PHP", "Philippine Peso"), + ("XPT", "Platinum"), + ("GBP", "Pound Sterling"), + ("BWP", "Pula"), + ("QAR", "Qatari Rial"), + ("GTQ", "Quetzal"), + ("ZAR", "Rand"), + ("OMR", "Rial Omani"), + ("KHR", "Riel"), + ("MVR", "Rufiyaa"), + ("IDR", "Rupiah"), + ("RUB", "Russian Ruble"), + ("RWF", "Rwanda Franc"), + ("XDR", "SDR"), + ("SHP", "Saint Helena Pound"), + ("SAR", "Saudi Riyal"), + ("RSD", "Serbian Dinar"), + ("SCR", "Seychelles Rupee"), + ("XAG", "Silver"), + ("SGD", "Singapore Dollar"), + ("SBD", "Solomon Islands Dollar"), + ("KGS", "Som"), + ("SOS", "Somali Shilling"), + ("TJS", "Somoni"), + ("SSP", "South Sudanese Pound"), + ("LKR", "Sri Lanka Rupee"), + ("XSU", "Sucre"), + ("SDG", "Sudanese Pound"), + ("SRD", "Surinam Dollar"), + ("SEK", "Swedish Krona"), + ("CHF", "Swiss Franc"), + ("SYP", "Syrian Pound"), + ("BDT", "Taka"), + ("WST", "Tala"), + ("TZS", "Tanzanian Shilling"), + ("KZT", "Tenge"), + ( + "XXX", + "The codes assigned for transactions where no currency is involved", + ), + ("TTD", "Trinidad and Tobago Dollar"), + ("MNT", "Tugrik"), + ("TND", "Tunisian Dinar"), + ("TRY", "Turkish Lira"), + ("TMT", "Turkmenistan New Manat"), + ("TVD", "Tuvalu dollar"), + ("AED", "UAE Dirham"), + ("XFU", "UIC-Franc"), + ("USD", "US Dollar"), + ("USN", "US Dollar (Next day)"), + ("UGX", "Uganda Shilling"), + ("CLF", "Unidad de Fomento"), + ("COU", "Unidad de Valor Real"), + ("UYI", "Uruguay Peso en Unidades Indexadas (URUIURUI)"), + ("UYU", "Uruguayan peso"), + ("UZS", "Uzbekistan Sum"), + ("VUV", "Vatu"), + ("CHE", "WIR Euro"), + ("CHW", "WIR Franc"), + ("KRW", "Won"), + ("YER", "Yemeni Rial"), + ("JPY", "Yen"), + ("CNY", "Yuan Renminbi"), + ("ZMK", "Zambian Kwacha"), + ("ZMW", "Zambian Kwacha"), + ("ZWD", "Zimbabwe Dollar A/06"), + ("ZWN", "Zimbabwe dollar A/08"), + ("ZWL", "Zimbabwe dollar A/09"), + ("PLN", "Zloty"), + ], + default="CHF", + editable=False, + max_length=3, + ), + ), + ] diff --git a/backend/timed/projects/migrations/0010_project_billed.py b/backend/timed/projects/migrations/0010_project_billed.py new file mode 100644 index 000000000..1448d63fc --- /dev/null +++ b/backend/timed/projects/migrations/0010_project_billed.py @@ -0,0 +1,17 @@ +# Generated by Django 2.2.15 on 2020-12-01 14:51 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("projects", "0009_auto_20201201_1412"), + ] + + operations = [ + migrations.AddField( + model_name="project", + name="billed", + field=models.BooleanField(default=False), + ), + ] diff --git a/backend/timed/projects/migrations/0011_auto_20210419_1459.py b/backend/timed/projects/migrations/0011_auto_20210419_1459.py new file mode 100644 index 000000000..1de4770a6 --- /dev/null +++ b/backend/timed/projects/migrations/0011_auto_20210419_1459.py @@ -0,0 +1,141 @@ +# Generated by Django 3.1.7 on 2021-04-19 12:59 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ("projects", "0010_project_billed"), + ] + + operations = [ + migrations.CreateModel( + name="TaskAssignee", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("is_resource", models.BooleanField(default=False)), + ("is_reviewer", models.BooleanField(default=False)), + ("is_manager", models.BooleanField(default=False)), + ( + "task", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="task_assignees", + to="projects.task", + ), + ), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="task_assignees", + to=settings.AUTH_USER_MODEL, + ), + ), + ], + ), + migrations.CreateModel( + name="ProjectAssignee", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("is_resource", models.BooleanField(default=False)), + ("is_reviewer", models.BooleanField(default=False)), + ("is_manager", models.BooleanField(default=False)), + ( + "project", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="project_assignees", + to="projects.project", + ), + ), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="project_assignees", + to=settings.AUTH_USER_MODEL, + ), + ), + ], + ), + migrations.CreateModel( + name="CustomerAssignee", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("is_resource", models.BooleanField(default=False)), + ("is_reviewer", models.BooleanField(default=False)), + ("is_manager", models.BooleanField(default=False)), + ( + "customer", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="customer_assignees", + to="projects.customer", + ), + ), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="customer_assignees", + to=settings.AUTH_USER_MODEL, + ), + ), + ], + ), + migrations.AddField( + model_name="customer", + name="assignees", + field=models.ManyToManyField( + related_name="assigned_to_customers", + through="projects.CustomerAssignee", + to=settings.AUTH_USER_MODEL, + ), + ), + migrations.AddField( + model_name="project", + name="assignees", + field=models.ManyToManyField( + related_name="assigned_to_projects", + through="projects.ProjectAssignee", + to=settings.AUTH_USER_MODEL, + ), + ), + migrations.AddField( + model_name="task", + name="assignees", + field=models.ManyToManyField( + related_name="assigned_to_tasks", + through="projects.TaskAssignee", + to=settings.AUTH_USER_MODEL, + ), + ), + ] diff --git a/backend/timed/projects/migrations/0012_migrate_reviewers_to_assignees.py b/backend/timed/projects/migrations/0012_migrate_reviewers_to_assignees.py new file mode 100644 index 000000000..637ebbc54 --- /dev/null +++ b/backend/timed/projects/migrations/0012_migrate_reviewers_to_assignees.py @@ -0,0 +1,25 @@ +from django.db import migrations + + +def migrate_reviewers(apps, schema_editor): + """Migrate reviewers from projects to assignees""" + Project = apps.get_model("projects", "Project") + ProjectAssignee = apps.get_model("projects", "ProjectAssignee") + projects = Project.objects.all() + + for project in projects: + for reviewer in project.reviewers.all(): + project_assignee = ProjectAssignee( + user=reviewer, project=project, is_reviewer=True, is_manager=True + ) + project_assignee.save() + + +class Migration(migrations.Migration): + dependencies = [ + ("projects", "0011_auto_20210419_1459"), + ] + + operations = [ + migrations.RunPython(migrate_reviewers), + ] diff --git a/backend/timed/projects/migrations/0013_remove_project_reviewers.py b/backend/timed/projects/migrations/0013_remove_project_reviewers.py new file mode 100644 index 000000000..146721523 --- /dev/null +++ b/backend/timed/projects/migrations/0013_remove_project_reviewers.py @@ -0,0 +1,16 @@ +# Generated by Django 3.1.7 on 2021-08-03 11:15 + +from django.db import migrations + + +class Migration(migrations.Migration): + dependencies = [ + ("projects", "0012_migrate_reviewers_to_assignees"), + ] + + operations = [ + migrations.RemoveField( + model_name="project", + name="reviewers", + ), + ] diff --git a/backend/timed/projects/migrations/0014_add_is_customer_role_to_assignees.py b/backend/timed/projects/migrations/0014_add_is_customer_role_to_assignees.py new file mode 100644 index 000000000..2f947a086 --- /dev/null +++ b/backend/timed/projects/migrations/0014_add_is_customer_role_to_assignees.py @@ -0,0 +1,27 @@ +# Generated by Django 3.1.7 on 2021-11-25 14:19 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("projects", "0013_remove_project_reviewers"), + ] + + operations = [ + migrations.AddField( + model_name="customerassignee", + name="is_customer", + field=models.BooleanField(default=False), + ), + migrations.AddField( + model_name="projectassignee", + name="is_customer", + field=models.BooleanField(default=False), + ), + migrations.AddField( + model_name="taskassignee", + name="is_customer", + field=models.BooleanField(default=False), + ), + ] diff --git a/backend/timed/projects/migrations/0015_remaining_effort_task_project.py b/backend/timed/projects/migrations/0015_remaining_effort_task_project.py new file mode 100644 index 000000000..645e12287 --- /dev/null +++ b/backend/timed/projects/migrations/0015_remaining_effort_task_project.py @@ -0,0 +1,28 @@ +# Generated by Django 3.2.13 on 2022-08-04 11:36 + +import datetime +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("projects", "0014_add_is_customer_role_to_assignees"), + ] + + operations = [ + migrations.AddField( + model_name="project", + name="remaining_effort_tracking", + field=models.BooleanField(default=False), + ), + migrations.AddField( + model_name="project", + name="total_remaining_effort", + field=models.DurationField(default=datetime.timedelta(0)), + ), + migrations.AddField( + model_name="task", + name="most_recent_remaining_effort", + field=models.DurationField(blank=True, null=True), + ), + ] diff --git a/backend/timed/projects/migrations/__init__.py b/backend/timed/projects/migrations/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/timed/projects/models.py b/backend/timed/projects/models.py new file mode 100644 index 000000000..a8b6acb54 --- /dev/null +++ b/backend/timed/projects/models.py @@ -0,0 +1,279 @@ +"""Models for the projects app.""" + +from datetime import timedelta + +from django.conf import settings +from django.db import models +from django.db.models import Q +from django.db.models.signals import pre_save +from django.dispatch import receiver +from djmoney.models.fields import MoneyField + +from timed.tracking.models import Report + + +class Customer(models.Model): + """Customer model. + + A customer is a person or company which will pay the work + reported on their projects. + """ + + name = models.CharField(max_length=255, unique=True) + reference = models.CharField(max_length=255, db_index=True, blank=True, null=True) + email = models.EmailField(blank=True) + website = models.URLField(blank=True) + comment = models.TextField(blank=True) + archived = models.BooleanField(default=False) + assignees = models.ManyToManyField( + settings.AUTH_USER_MODEL, + through="CustomerAssignee", + related_name="assigned_to_customers", + ) + + def __str__(self): + """Represent the model as a string. + + :return: The string representation + :rtype: str + """ + return self.name + + class Meta: + """Meta informations for the customer model.""" + + ordering = ["name"] + + +class CostCenter(models.Model): + """Cost center defining how cost of projects and tasks are allocated.""" + + name = models.CharField(max_length=255, unique=True) + reference = models.CharField(max_length=255, blank=True, null=True) + + def __str__(self): + return self.name + + class Meta: + ordering = ["name"] + + +class BillingType(models.Model): + """Billing type defining how a project, resp. reports are being billed.""" + + name = models.CharField(max_length=255, unique=True) + reference = models.CharField(max_length=255, blank=True, null=True) + + def __str__(self): + return self.name + + class Meta: + ordering = ["name"] + + +class Project(models.Model): + """Project model. + + A project is an offer in most cases. It has multiple tasks and + belongs to a customer. + """ + + name = models.CharField(max_length=255, db_index=True) + reference = models.CharField(max_length=255, db_index=True, blank=True, null=True) + comment = models.TextField(blank=True) + archived = models.BooleanField(default=False) + billed = models.BooleanField(default=False) + estimated_time = models.DurationField(blank=True, null=True) + customer = models.ForeignKey( + "projects.Customer", on_delete=models.CASCADE, related_name="projects" + ) + billing_type = models.ForeignKey( + BillingType, + on_delete=models.PROTECT, + blank=True, + null=True, + related_name="projects", + ) + cost_center = models.ForeignKey( + CostCenter, + on_delete=models.PROTECT, + blank=True, + null=True, + related_name="projects", + ) + customer_visible = models.BooleanField(default=False) + amount_offered = MoneyField( + max_digits=10, decimal_places=2, default_currency="CHF", blank=True, null=True + ) + amount_invoiced = MoneyField( + max_digits=10, decimal_places=2, default_currency="CHF", blank=True, null=True + ) + assignees = models.ManyToManyField( + settings.AUTH_USER_MODEL, + through="ProjectAssignee", + related_name="assigned_to_projects", + ) + remaining_effort_tracking = models.BooleanField(default=False) + total_remaining_effort = models.DurationField(default=timedelta(0)) + + def __str__(self): + """Represent the model as a string. + + :return: The string representation + :rtype: str + """ + return "{0} > {1}".format(self.customer, self.name) + + class Meta: + ordering = ["name"] + + +class Task(models.Model): + """Task model. + + A task is a certain activity type on a project. Users can + report their activities and reports on it. + """ + + name = models.CharField(max_length=255) + reference = models.CharField(max_length=255, db_index=True, blank=True, null=True) + estimated_time = models.DurationField(blank=True, null=True) + archived = models.BooleanField(default=False) + project = models.ForeignKey( + "projects.Project", on_delete=models.CASCADE, related_name="tasks" + ) + cost_center = models.ForeignKey( + CostCenter, + on_delete=models.SET_NULL, + blank=True, + null=True, + related_name="tasks", + ) + amount_offered = MoneyField( + max_digits=10, decimal_places=2, default_currency="CHF", blank=True, null=True + ) + amount_invoiced = MoneyField( + max_digits=10, decimal_places=2, default_currency="CHF", blank=True, null=True + ) + assignees = models.ManyToManyField( + settings.AUTH_USER_MODEL, + through="TaskAssignee", + related_name="assigned_to_tasks", + ) + most_recent_remaining_effort = models.DurationField(blank=True, null=True) + + def __str__(self): + """Represent the model as a string. + + :return: The string representation + :rtype: str + """ + return "{0} > {1}".format(self.project, self.name) + + class Meta: + """Meta informations for the task model.""" + + ordering = ["name"] + + +class TaskTemplate(models.Model): + """Task template model. + + A task template is a global template of a task which should + be generated for every project. + """ + + name = models.CharField(max_length=255) + + def __str__(self): + """Represent the model as a string. + + :return: The string representation + :rtype: str + """ + return self.name + + class Meta: + ordering = ["name"] + + +class CustomerAssignee(models.Model): + """Customer assignee model. + + Customer assignee is an employee that is assigned to a specific customer. + """ + + user = models.ForeignKey( + settings.AUTH_USER_MODEL, + on_delete=models.CASCADE, + related_name="customer_assignees", + ) + customer = models.ForeignKey( + "projects.Customer", on_delete=models.CASCADE, related_name="customer_assignees" + ) + is_resource = models.BooleanField(default=False) + is_reviewer = models.BooleanField(default=False) + is_manager = models.BooleanField(default=False) + is_customer = models.BooleanField(default=False) + + +class ProjectAssignee(models.Model): + """Project assignee model. + + Project assignee is an employee that is assigned to a specific project. + """ + + user = models.ForeignKey( + settings.AUTH_USER_MODEL, + on_delete=models.CASCADE, + related_name="project_assignees", + ) + project = models.ForeignKey( + "projects.Project", on_delete=models.CASCADE, related_name="project_assignees" + ) + is_resource = models.BooleanField(default=False) + is_reviewer = models.BooleanField(default=False) + is_manager = models.BooleanField(default=False) + is_customer = models.BooleanField(default=False) + + +class TaskAssignee(models.Model): + """Task assignee model. + + Task assignee is an employee that is assigned to a specific task. + """ + + user = models.ForeignKey( + settings.AUTH_USER_MODEL, + on_delete=models.CASCADE, + related_name="task_assignees", + ) + task = models.ForeignKey( + "projects.Task", on_delete=models.CASCADE, related_name="task_assignees" + ) + is_resource = models.BooleanField(default=False) + is_reviewer = models.BooleanField(default=False) + is_manager = models.BooleanField(default=False) + is_customer = models.BooleanField(default=False) + + +@receiver(pre_save, sender=Project) +def update_billed_flag_on_reports(sender, instance, **kwargs): + """Update billed flag on all reports from the updated project. + + Only update reports if the billed flag on the project was changed. + Setting the billed flag to True on a project in Django Admin should set + all existing reports to billed=True. Same goes for setting the flag to billed=False. + The billed flag should primarily be set in frontend. + This is only a quicker way for the accountants to update all reports at once. + """ + # ignore signal when loading a fixture + if kwargs.get("raw", False): # pragma: no cover + return + + # check whether the project was created or is being updated + if instance.pk: + if instance.billed != Project.objects.get(id=instance.id).billed: + Report.objects.filter(Q(task__project=instance)).update( + billed=instance.billed + ) diff --git a/backend/timed/projects/serializers.py b/backend/timed/projects/serializers.py new file mode 100644 index 000000000..8a79c39c9 --- /dev/null +++ b/backend/timed/projects/serializers.py @@ -0,0 +1,263 @@ +"""Serializers for the projects app.""" +from datetime import timedelta + +from django.db.models import Q, Sum +from django.utils.duration import duration_string +from rest_framework_json_api.relations import ResourceRelatedField +from rest_framework_json_api.serializers import ModelSerializer, ValidationError + +from timed.projects import models +from timed.tracking.models import Report + + +class CustomerSerializer(ModelSerializer): + """Customer serializer.""" + + class Meta: + """Meta information for the customer serializer.""" + + model = models.Customer + fields = [ + "name", + "reference", + "email", + "website", + "comment", + "archived", + ] + + +class BillingTypeSerializer(ModelSerializer): + class Meta: + model = models.BillingType + fields = ["name", "reference"] + + +class CostCenterSerializer(ModelSerializer): + class Meta: + model = models.CostCenter + fields = ["name", "reference"] + + +class ProjectSerializer(ModelSerializer): + """Project serializer.""" + + customer = ResourceRelatedField(queryset=models.Customer.objects.all()) + billing_type = ResourceRelatedField(queryset=models.BillingType.objects.all()) + + included_serializers = { + "customer": "timed.projects.serializers.CustomerSerializer", + "billing_type": "timed.projects.serializers.BillingTypeSerializer", + "cost_center": "timed.projects.serializers.CostCenterSerializer", + } + + def get_root_meta(self, resource, many): + if not many: + queryset = Report.objects.filter(task__project=self.instance) + data = queryset.aggregate(spent_time=Sum("duration")) + data["spent_time"] = duration_string(data["spent_time"] or timedelta(0)) + + billable_data = queryset.filter(not_billable=False, review=False).aggregate( + spent_billable=Sum("duration") + ) + data["spent_billable"] = duration_string( + billable_data["spent_billable"] or timedelta(0) + ) + return data + + return {} + + def validate_remaining_effort_tracking(self, value): + user = self.context["request"].user + project = self.instance + if not ( + user.is_superuser + or user.is_accountant + or models.Project.objects.filter( + Q( + project_assignees__user=user, + project_assignees__is_manager=True, + project_assignees__project=project, + ) + | Q( + customer__customer_assignees__user=user, + customer__customer_assignees__is_manager=True, + customer__customer_assignees__customer=project.customer, + ) + ).exists() + ): + raise ValidationError( + "Only managers, accountants and superuser may activate remaining effort tracking!" + ) + return value + + class Meta: + """Meta information for the project serializer.""" + + model = models.Project + fields = [ + "name", + "reference", + "comment", + "estimated_time", + "archived", + "billed", + "customer", + "billing_type", + "cost_center", + "customer_visible", + "remaining_effort_tracking", + "total_remaining_effort", + ] + + +class TaskSerializer(ModelSerializer): + """Task serializer.""" + + project = ResourceRelatedField(queryset=models.Project.objects.all()) + + included_serializers = { + "activities": "timed.tracking.serializers.ActivitySerializer", + "project": "timed.projects.serializers.ProjectSerializer", + "cost_center": "timed.projects.serializers.CostCenterSerializer", + } + + def get_root_meta(self, resource, many): + if not many: + queryset = Report.objects.filter(task=self.instance) + data = queryset.aggregate(spent_time=Sum("duration")) + data["spent_time"] = duration_string(data["spent_time"] or timedelta(0)) + return data + + return {} + + def validate(self, data): + """Validate the role of the user. + + Check if the user is a manager on the corresponding + project or customer when he wants to create a new task. + + Check if the user is a manager on the task or + the corresponding project or customer when he wants to update the task. + """ + request = self.context["request"] + user = request.user + # check if user is manager when updating a task + if self.instance: + if ( + user.is_superuser + or models.Task.objects.filter(id=self.instance.id) + .filter( + Q( + task_assignees__user=user, + task_assignees__is_manager=True, + ) + | Q( + project__project_assignees__user=user, + project__project_assignees__is_manager=True, + ) + | Q( + project__customer__customer_assignees__user=user, + project__customer__customer_assignees__is_manager=True, + ) + ) + .exists() + ): + return data + # check if user is manager when creating a task + elif ( + user.is_superuser + or models.Project.objects.filter(pk=data["project"].id) + .filter( + Q( + project_assignees__user=user, + project_assignees__is_manager=True, + ) + | Q( + customer__customer_assignees__user=user, + customer__customer_assignees__is_manager=True, + ) + ) + .exists() + ): + return data + + class Meta: + """Meta information for the task serializer.""" + + model = models.Task + fields = [ + "name", + "reference", + "estimated_time", + "archived", + "project", + "cost_center", + "most_recent_remaining_effort", + ] + + +class CustomerAssigneeSerializer(ModelSerializer): + """Customer assignee serializer.""" + + included_serializers = { + "user": "timed.employment.serializers.UserSerializer", + "customer": "timed.projects.serializers.CustomerSerializer", + } + + class Meta: + """Meta information for the customer assignee serializer.""" + + model = models.CustomerAssignee + fields = [ + "user", + "customer", + "is_reviewer", + "is_manager", + "is_resource", + "is_customer", + ] + + +class ProjectAssigneeSerializer(ModelSerializer): + """Project assignee serializer.""" + + included_serializers = { + "user": "timed.employment.serializers.UserSerializer", + "project": "timed.projects.serializers.ProjectSerializer", + } + + class Meta: + """Meta information for the project assignee serializer.""" + + model = models.ProjectAssignee + fields = [ + "user", + "project", + "is_reviewer", + "is_manager", + "is_resource", + "is_customer", + ] + + +class TaskAssigneeSerializer(ModelSerializer): + """Task assignees serializer.""" + + included_serializers = { + "user": "timed.employment.serializers.UserSerializer", + "task": "timed.projects.serializers.TaskSerializer", + } + + class Meta: + """Meta information for the task assignee serializer.""" + + model = models.TaskAssignee + fields = [ + "user", + "task", + "is_reviewer", + "is_manager", + "is_resource", + "is_customer", + ] diff --git a/backend/timed/projects/tests/__init__.py b/backend/timed/projects/tests/__init__.py new file mode 100644 index 000000000..6e031999e --- /dev/null +++ b/backend/timed/projects/tests/__init__.py @@ -0,0 +1 @@ +# noqa: D104 diff --git a/backend/timed/projects/tests/test_billing_type.py b/backend/timed/projects/tests/test_billing_type.py new file mode 100644 index 000000000..f4e9fcb21 --- /dev/null +++ b/backend/timed/projects/tests/test_billing_type.py @@ -0,0 +1,58 @@ +import pytest +from django.urls import reverse +from rest_framework.status import HTTP_200_OK, HTTP_403_FORBIDDEN + +from timed.conftest import setup_customer_and_employment_status +from timed.projects import factories, models + + +@pytest.mark.parametrize( + "is_employed, is_external, is_customer_assignee, is_customer, customer_visible, expected, status_code", + [ + (False, False, True, False, False, 0, HTTP_403_FORBIDDEN), + (False, False, True, True, False, 0, HTTP_200_OK), + (False, False, True, True, True, 1, HTTP_200_OK), + (True, False, False, False, False, 1, HTTP_200_OK), + (True, True, False, False, False, 1, HTTP_403_FORBIDDEN), + (True, False, True, False, False, 1, HTTP_200_OK), + (True, True, True, False, False, 1, HTTP_403_FORBIDDEN), + (True, False, True, True, False, 1, HTTP_200_OK), + (True, True, True, True, False, 0, HTTP_200_OK), + (True, False, True, True, True, 1, HTTP_200_OK), + (True, True, True, True, True, 1, HTTP_200_OK), + ], +) +def test_billing_type_list( + auth_client, + is_employed, + is_external, + is_customer_assignee, + is_customer, + customer_visible, + expected, + status_code, +): + user = auth_client.user + setup_customer_and_employment_status( + user=user, + is_assignee=is_customer_assignee, + is_customer=is_customer, + is_employed=is_employed, + is_external=is_external, + ) + if is_customer_assignee: + customer = models.Customer.objects.get(customer_assignees__user=user) + else: + customer = factories.CustomerFactory.create() + project = factories.ProjectFactory.create( + customer_visible=customer_visible, customer=customer + ) + + url = reverse("billing-type-list") + + res = auth_client.get(url) + assert res.status_code == status_code + if res.status_code == HTTP_200_OK and expected: + json = res.json() + assert len(json["data"]) == expected + assert json["data"][0]["id"] == str(project.billing_type.id) diff --git a/backend/timed/projects/tests/test_cost_center.py b/backend/timed/projects/tests/test_cost_center.py new file mode 100644 index 000000000..08d6422ed --- /dev/null +++ b/backend/timed/projects/tests/test_cost_center.py @@ -0,0 +1,39 @@ +import pytest +from django.urls import reverse +from rest_framework.status import HTTP_200_OK, HTTP_403_FORBIDDEN + +from timed.conftest import setup_customer_and_employment_status +from timed.projects.factories import CostCenterFactory + + +@pytest.mark.parametrize( + "is_employed, is_customer_assignee, is_customer, status_code", + [ + (False, True, False, HTTP_403_FORBIDDEN), + (False, True, True, HTTP_403_FORBIDDEN), + (True, False, False, HTTP_200_OK), + (True, True, False, HTTP_200_OK), + (True, True, True, HTTP_200_OK), + ], +) +def test_cost_center_list( + auth_client, is_employed, is_customer_assignee, is_customer, status_code +): + user = auth_client.user + cost_center = CostCenterFactory.create() + setup_customer_and_employment_status( + user=user, + is_assignee=is_customer_assignee, + is_customer=is_customer, + is_employed=is_employed, + is_external=False, + ) + + url = reverse("cost-center-list") + + res = auth_client.get(url) + assert res.status_code == status_code + if res.status_code == HTTP_200_OK: + json = res.json() + assert len(json["data"]) == 1 + assert json["data"][0]["id"] == str(cost_center.id) diff --git a/backend/timed/projects/tests/test_customer.py b/backend/timed/projects/tests/test_customer.py new file mode 100644 index 000000000..6a901a806 --- /dev/null +++ b/backend/timed/projects/tests/test_customer.py @@ -0,0 +1,94 @@ +"""Tests for the customers endpoint.""" + +import pytest +from django.urls import reverse +from rest_framework import status + +from timed.projects.factories import CustomerAssigneeFactory, CustomerFactory + + +def test_customer_list_not_archived(internal_employee_client): + CustomerFactory.create(archived=True) + customer = CustomerFactory.create(archived=False) + + url = reverse("customer-list") + + response = internal_employee_client.get(url, data={"archived": 0}) + assert response.status_code == status.HTTP_200_OK + + json = response.json() + assert len(json["data"]) == 1 + assert json["data"][0]["id"] == str(customer.id) + + +def test_customer_detail(internal_employee_client): + customer = CustomerFactory.create() + + url = reverse("customer-detail", args=[customer.id]) + + response = internal_employee_client.get(url) + assert response.status_code == status.HTTP_200_OK + + +def test_customer_create(auth_client): + url = reverse("customer-list") + + response = auth_client.post(url) + assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED + + +def test_customer_update(auth_client): + customer = CustomerFactory.create() + + url = reverse("customer-detail", args=[customer.id]) + + response = auth_client.patch(url) + assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED + + +def test_customer_delete(auth_client): + customer = CustomerFactory.create() + + url = reverse("customer-detail", args=[customer.id]) + + response = auth_client.delete(url) + assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED + + +@pytest.mark.parametrize("is_assigned, expected", [(True, 1), (False, 0)]) +def test_customer_list_external_employee( + external_employee_client, is_assigned, expected +): + CustomerFactory.create_batch(4) + customer = CustomerFactory.create() + if is_assigned: + customer.assignees.add(external_employee_client.user) + + url = reverse("customer-list") + + response = external_employee_client.get(url) + assert response.status_code == status.HTTP_200_OK + + json = response.json() + assert len(json["data"]) == expected + + +@pytest.mark.parametrize( + "is_customer, expected", + [(True, 1), (False, 0)], +) +def test_customer_list_no_employment(auth_client, is_customer, expected): + CustomerFactory.create_batch(4) + customer = CustomerFactory.create() + if is_customer: + CustomerAssigneeFactory.create( + user=auth_client.user, is_customer=True, customer=customer + ) + + url = reverse("customer-list") + + response = auth_client.get(url) + assert response.status_code == status.HTTP_200_OK + + json = response.json() + assert len(json["data"]) == expected diff --git a/backend/timed/projects/tests/test_customer_assignee.py b/backend/timed/projects/tests/test_customer_assignee.py new file mode 100644 index 000000000..73c7232d3 --- /dev/null +++ b/backend/timed/projects/tests/test_customer_assignee.py @@ -0,0 +1,44 @@ +import pytest +from django.urls import reverse +from rest_framework.status import HTTP_200_OK + +from timed.conftest import setup_customer_and_employment_status +from timed.projects.factories import CustomerAssigneeFactory + + +@pytest.mark.parametrize( + "is_employed, is_external, is_customer_assignee, is_customer, expected", + [ + (False, False, True, False, 0), + (False, False, True, True, 0), + (True, True, False, False, 0), + (True, False, False, False, 1), + (True, True, True, False, 0), + (True, False, True, False, 2), + (True, True, True, True, 0), + (True, False, True, True, 2), + ], +) +def test_customer_assignee_list( + auth_client, is_employed, is_external, is_customer_assignee, is_customer, expected +): + customer_assignee = CustomerAssigneeFactory.create() + user = auth_client.user + setup_customer_and_employment_status( + user=user, + is_assignee=is_customer_assignee, + is_customer=is_customer, + is_employed=is_employed, + is_external=is_external, + ) + url = reverse("customer-assignee-list") + + res = auth_client.get(url) + assert res.status_code == HTTP_200_OK + json = res.json() + assert len(json["data"]) == expected + if expected: + assert json["data"][0]["id"] == str(customer_assignee.id) + assert json["data"][0]["relationships"]["customer"]["data"]["id"] == str( + customer_assignee.customer.id + ) diff --git a/backend/timed/projects/tests/test_project.py b/backend/timed/projects/tests/test_project.py new file mode 100644 index 000000000..febb48d45 --- /dev/null +++ b/backend/timed/projects/tests/test_project.py @@ -0,0 +1,251 @@ +"""Tests for the projects endpoint.""" +from datetime import timedelta + +import pytest +from django.urls import reverse +from rest_framework import status + +from timed.employment.factories import UserFactory +from timed.projects.factories import ( + CustomerAssigneeFactory, + ProjectAssigneeFactory, + ProjectFactory, + TaskAssigneeFactory, + TaskFactory, +) +from timed.projects.serializers import ProjectSerializer + + +def test_project_list_not_archived(internal_employee_client): + project = ProjectFactory.create(archived=False) + ProjectFactory.create(archived=True) + + url = reverse("project-list") + + response = internal_employee_client.get(url, data={"archived": 0}) + assert response.status_code == status.HTTP_200_OK + + json = response.json() + assert len(json["data"]) == 1 + assert json["data"][0]["id"] == str(project.id) + + +def test_project_list_include( + internal_employee_client, django_assert_num_queries, project +): + user = UserFactory.create() + ProjectAssigneeFactory.create(user=user, project=project, is_reviewer=True) + + url = reverse("project-list") + + with django_assert_num_queries(5): + response = internal_employee_client.get( + url, + data={"include": ",".join(ProjectSerializer.included_serializers.keys())}, + ) + assert response.status_code == status.HTTP_200_OK + + json = response.json() + assert len(json["data"]) == 1 + assert json["data"][0]["id"] == str(project.id) + + +def test_project_detail_no_auth(db, client, project): + url = reverse("project-detail", args=[project.id]) + + res = client.get(url) + assert res.status_code == status.HTTP_401_UNAUTHORIZED + + +def test_project_detail_no_reports(internal_employee_client, project): + url = reverse("project-detail", args=[project.id]) + + res = internal_employee_client.get(url) + + assert res.status_code == status.HTTP_200_OK + json = res.json() + + assert json["meta"]["spent-time"] == "00:00:00" + assert json["meta"]["spent-billable"] == "00:00:00" + + +def test_project_detail_with_reports( + internal_employee_client, project, task, report_factory +): + rep1, rep2, rep3, *_ = report_factory.create_batch( + 10, task=task, duration=timedelta(hours=1) + ) + rep1.not_billable = True + rep1.save() + rep2.review = True + rep2.save() + rep3.not_billable = True + rep3.review = True + rep3.save() + + url = reverse("project-detail", args=[project.id]) + + res = internal_employee_client.get(url) + + assert res.status_code == status.HTTP_200_OK + json = res.json() + + assert json["meta"]["spent-time"] == "10:00:00" + assert json["meta"]["spent-billable"] == "07:00:00" + + +def test_project_create(auth_client): + url = reverse("project-list") + + response = auth_client.post(url) + assert response.status_code == status.HTTP_403_FORBIDDEN + + +def test_project_update(auth_client, project): + url = reverse("project-detail", args=[project.id]) + + response = auth_client.patch(url) + assert response.status_code == status.HTTP_403_FORBIDDEN + + +def test_project_delete(auth_client, project): + url = reverse("project-detail", args=[project.id]) + + response = auth_client.delete(url) + assert response.status_code == status.HTTP_403_FORBIDDEN + + +@pytest.mark.parametrize("is_assigned, expected", [(True, 1), (False, 0)]) +def test_project_list_external_employee( + external_employee_client, is_assigned, expected +): + ProjectFactory.create_batch(4) + project = ProjectFactory.create() + if is_assigned: + project.assignees.add(external_employee_client.user) + + url = reverse("project-list") + + response = external_employee_client.get(url) + assert response.status_code == status.HTTP_200_OK + + json = response.json() + assert len(json["data"]) == expected + + +def test_project_filter(internal_employee_client): + user = internal_employee_client.user + proj1, proj2, *_ = ProjectFactory.create_batch(4) + ProjectAssigneeFactory.create(project=proj1, user=user, is_reviewer=True) + ProjectAssigneeFactory.create(project=proj1, user=user, is_manager=True) + + url = reverse("project-list") + + response = internal_employee_client.get(url, data={"has_manager": user.id}) + assert response.status_code == status.HTTP_200_OK + + json = response.json() + assert len(json["data"]) == 1 + + response = internal_employee_client.get(url, data={"has_reviewer": user.id}) + assert response.status_code == status.HTTP_200_OK + + json = response.json() + assert len(json["data"]) == 1 + + +def test_project_multi_number_value_filter(internal_employee_client): + proj1, proj2, *_ = ProjectFactory.create_batch(4) + + url = reverse("project-list") + + response = internal_employee_client.get( + url, {"customer": (",").join([str(proj1.customer.id), str(proj2.customer.id)])} + ) + assert response.status_code == status.HTTP_200_OK + + json = response.json() + assert len(json["data"]) == 2 + + +def test_project_update_billed_flag(internal_employee_client, report_factory): + report = report_factory.create() + project = report.task.project + assert not report.billed + + project.billed = True + project.save() + + report.refresh_from_db() + assert report.billed + + project.billed = False + project.save() + + report.refresh_from_db() + assert not report.billed + + +@pytest.mark.parametrize( + "is_customer, project__customer_visible, expected", + [ + (True, True, 1), + (True, False, 0), + (False, True, 0), + (False, False, 0), + ], +) +def test_project_list_no_employment(auth_client, project, is_customer, expected): + ProjectFactory.create_batch(4) + if is_customer: + CustomerAssigneeFactory.create( + user=auth_client.user, is_customer=True, customer=project.customer + ) + + url = reverse("project-list") + + response = auth_client.get(url) + assert response.status_code == status.HTTP_200_OK + + json = response.json() + assert len(json["data"]) == expected + + +@pytest.mark.parametrize( + "assignee_level, status_code", + [ + ("customer", status.HTTP_200_OK), + ("project", status.HTTP_200_OK), + ("task", status.HTTP_400_BAD_REQUEST), + (None, status.HTTP_403_FORBIDDEN), + ], +) +def test_project_activate_remaining_effort( + internal_employee_client, assignee_level, status_code +): + task = TaskFactory.create() + user = internal_employee_client.user + + if assignee_level == "customer": + CustomerAssigneeFactory( + user=user, customer=task.project.customer, is_manager=True + ) + elif assignee_level == "project": + ProjectAssigneeFactory(user=user, project=task.project, is_manager=True) + elif assignee_level == "task": + TaskAssigneeFactory(user=user, task=task, is_manager=True) + + data = { + "data": { + "type": "projects", + "id": task.project.id, + "attributes": { + "remaining_effort_tracking": True, + }, + } + } + + url = reverse("project-detail", args=[task.project.id]) + + response = internal_employee_client.patch(url, data) + assert response.status_code == status_code diff --git a/backend/timed/projects/tests/test_project_assignee.py b/backend/timed/projects/tests/test_project_assignee.py new file mode 100644 index 000000000..6fc5eec22 --- /dev/null +++ b/backend/timed/projects/tests/test_project_assignee.py @@ -0,0 +1,44 @@ +import pytest +from django.urls import reverse +from rest_framework.status import HTTP_200_OK + +from timed.conftest import setup_customer_and_employment_status +from timed.projects.factories import ProjectAssigneeFactory + + +@pytest.mark.parametrize( + "is_employed, is_external, is_customer_assignee, is_customer, expected", + [ + (False, False, True, False, 0), + (False, False, True, True, 0), + (True, True, False, False, 0), + (True, False, False, False, 1), + (True, True, True, False, 0), + (True, False, True, False, 1), + (True, True, True, True, 0), + (True, False, True, True, 1), + ], +) +def test_project_assignee_list( + auth_client, is_employed, is_external, is_customer_assignee, is_customer, expected +): + user = auth_client.user + setup_customer_and_employment_status( + user=user, + is_assignee=is_customer_assignee, + is_customer=is_customer, + is_employed=is_employed, + is_external=is_external, + ) + project_assignee = ProjectAssigneeFactory.create() + url = reverse("project-assignee-list") + + res = auth_client.get(url) + assert res.status_code == HTTP_200_OK + json = res.json() + assert len(json["data"]) == expected + if expected: + assert json["data"][0]["id"] == str(project_assignee.id) + assert json["data"][0]["relationships"]["project"]["data"]["id"] == str( + project_assignee.project.id + ) diff --git a/backend/timed/projects/tests/test_task.py b/backend/timed/projects/tests/test_task.py new file mode 100644 index 000000000..8fa560496 --- /dev/null +++ b/backend/timed/projects/tests/test_task.py @@ -0,0 +1,245 @@ +"""Tests for the tasks endpoint.""" +from datetime import date, timedelta + +import pytest +from django.urls import reverse +from rest_framework import status + +from timed.employment.factories import EmploymentFactory +from timed.projects.factories import ( + CustomerAssigneeFactory, + ProjectFactory, + TaskFactory, +) + + +def test_task_list_not_archived(internal_employee_client, task_factory): + task = task_factory(archived=False) + task_factory(archived=True) + url = reverse("task-list") + + response = internal_employee_client.get(url, data={"archived": 0}) + assert response.status_code == status.HTTP_200_OK + + json = response.json() + assert len(json["data"]) == 1 + assert json["data"][0]["id"] == str(task.id) + + +def test_task_my_most_frequent(internal_employee_client, task_factory, report_factory): + user = internal_employee_client.user + tasks = task_factory.create_batch(6) + + report_date = date.today() - timedelta(days=20) + old_report_date = date.today() - timedelta(days=90) + + # tasks[0] should appear as most frequently used task + report_factory.create_batch(5, date=report_date, user=user, task=tasks[0]) + # tasks[1] should appear as secondly most frequently used task + report_factory.create_batch(4, date=report_date, user=user, task=tasks[1]) + # tasks[2] should not appear in result, as too far in the past + report_factory.create_batch(4, date=old_report_date, user=user, task=tasks[2]) + # tasks[3] should not appear in result, as project is archived + tasks[3].project.archived = True + tasks[3].project.save() + report_factory.create_batch(4, date=report_date, user=user, task=tasks[3]) + # tasks[4] should not appear in result, as task is archived + tasks[4].archived = True + tasks[4].save() + report_factory.create_batch(4, date=report_date, user=user, task=tasks[4]) + + url = reverse("task-list") + + response = internal_employee_client.get(url, {"my_most_frequent": "10"}) + assert response.status_code == status.HTTP_200_OK + + data = response.json()["data"] + assert len(data) == 2 + assert data[0]["id"] == str(tasks[0].id) + assert data[1]["id"] == str(tasks[1].id) + + +def test_task_detail(internal_employee_client, task): + url = reverse("task-detail", args=[task.id]) + + response = internal_employee_client.get(url) + assert response.status_code == status.HTTP_200_OK + + +@pytest.mark.parametrize( + "project_assignee__is_resource, project_assignee__is_manager, project_assignee__is_reviewer, customer_assignee__is_manager, expected", + [ + (True, False, False, False, status.HTTP_403_FORBIDDEN), + (False, True, False, False, status.HTTP_201_CREATED), + (False, False, True, False, status.HTTP_403_FORBIDDEN), + (False, False, False, True, status.HTTP_201_CREATED), + ], +) +def test_task_create( + internal_employee_client, project, project_assignee, customer_assignee, expected +): + user = internal_employee_client.user + project_assignee.user = user + project_assignee.save() + if customer_assignee.is_manager: + customer_assignee.customer = project.customer + customer_assignee.user = user + customer_assignee.save() + + url = reverse("task-list") + + data = { + "data": { + "attributes": {"name": "test task"}, + "relationships": { + "project": {"data": {"type": "projects", "id": project.pk}} + }, + "type": "tasks", + } + } + response = internal_employee_client.post(url, data=data) + assert response.status_code == expected + + +@pytest.mark.parametrize( + "task_assignee__is_resource, task_assignee__is_manager, task_assignee__is_reviewer, project_assignee__is_reviewer, project_assignee__is_manager, different_project, expected", + [ + (True, False, False, False, False, False, status.HTTP_403_FORBIDDEN), + (False, True, False, False, False, False, status.HTTP_200_OK), + (False, False, True, False, False, False, status.HTTP_403_FORBIDDEN), + (False, False, False, True, False, False, status.HTTP_403_FORBIDDEN), + (False, False, False, False, True, False, status.HTTP_200_OK), + (False, False, False, False, True, True, status.HTTP_403_FORBIDDEN), + (False, False, False, False, False, False, status.HTTP_403_FORBIDDEN), + ], +) +def test_task_update( + auth_client, task, task_assignee, project_assignee, different_project, expected +): + user = auth_client.user + EmploymentFactory.create(user=user) + task_assignee.task = task + task_assignee.user = user + task_assignee.save() + if different_project: + project = ProjectFactory.create() + project_assignee.project = project + project_assignee.user = user + project_assignee.save() + + data = { + "data": { + "type": "tasks", + "id": task.id, + "attributes": {"name": "Test Task"}, + } + } + + url = reverse("task-detail", args=[task.id]) + + response = auth_client.patch(url, data) + assert response.status_code == expected + + +@pytest.mark.parametrize( + "project_assignee__is_resource, project_assignee__is_manager, project_assignee__is_reviewer, expected", + [ + (True, False, False, status.HTTP_403_FORBIDDEN), + (False, True, False, status.HTTP_204_NO_CONTENT), + (False, False, True, status.HTTP_403_FORBIDDEN), + ], +) +def test_task_delete(auth_client, task, project_assignee, expected): + user = auth_client.user + project_assignee.project = task.project + project_assignee.user = user + project_assignee.save() + EmploymentFactory.create(user=user) + + url = reverse("task-detail", args=[task.id]) + + response = auth_client.delete(url) + assert response.status_code == expected + + +def test_task_detail_no_reports(internal_employee_client, task): + url = reverse("task-detail", args=[task.id]) + + res = internal_employee_client.get(url) + + assert res.status_code == status.HTTP_200_OK + + json = res.json() + assert json["meta"]["spent-time"] == "00:00:00" + + +def test_task_detail_with_reports(internal_employee_client, task, report_factory): + report_factory.create_batch(5, task=task, duration=timedelta(minutes=30)) + + url = reverse("task-detail", args=[task.id]) + + res = internal_employee_client.get(url) + + assert res.status_code == status.HTTP_200_OK + + json = res.json() + assert json["meta"]["spent-time"] == "02:30:00" + + +@pytest.mark.parametrize("is_assigned, expected", [(True, 1), (False, 0)]) +def test_task_list_external_employee(external_employee_client, is_assigned, expected): + TaskFactory.create_batch(4) + task = TaskFactory.create() + if is_assigned: + task.assignees.add(external_employee_client.user) + + url = reverse("task-list") + + response = external_employee_client.get(url) + assert response.status_code == status.HTTP_200_OK + + json = response.json() + assert len(json["data"]) == expected + + +@pytest.mark.parametrize( + "is_customer, customer_visible, expected", + [ + (True, True, 1), + (True, False, 0), + (False, False, 0), + (False, True, 0), + ], +) +def test_task_list_no_employment(auth_client, is_customer, customer_visible, expected): + TaskFactory.create_batch(4) + task = TaskFactory.create() + if is_customer: + CustomerAssigneeFactory.create( + user=auth_client.user, is_customer=True, customer=task.project.customer + ) + if customer_visible: + task.project.customer_visible = True + task.project.save() + + url = reverse("task-list") + + response = auth_client.get(url) + assert response.status_code == status.HTTP_200_OK + + json = response.json() + assert len(json["data"]) == expected + + +def test_task_multi_number_value_filter(internal_employee_client): + task1, task2, *_ = TaskFactory.create_batch(4) + + url = reverse("task-list") + + response = internal_employee_client.get( + url, {"project": (",").join([str(task1.project.id), str(task2.project.id)])} + ) + assert response.status_code == status.HTTP_200_OK + + json = response.json() + assert len(json["data"]) == 2 diff --git a/backend/timed/projects/tests/test_task_assignee.py b/backend/timed/projects/tests/test_task_assignee.py new file mode 100644 index 000000000..522350e2a --- /dev/null +++ b/backend/timed/projects/tests/test_task_assignee.py @@ -0,0 +1,44 @@ +import pytest +from django.urls import reverse +from rest_framework.status import HTTP_200_OK + +from timed.conftest import setup_customer_and_employment_status +from timed.projects.factories import TaskAssigneeFactory + + +@pytest.mark.parametrize( + "is_employed, is_external, is_customer_assignee, is_customer, expected", + [ + (False, False, True, False, 0), + (False, False, True, True, 0), + (True, True, False, False, 0), + (True, False, False, False, 1), + (True, True, True, False, 0), + (True, False, True, False, 1), + (True, True, True, True, 0), + (True, False, True, True, 1), + ], +) +def test_task_assignee_list( + auth_client, is_employed, is_external, is_customer_assignee, is_customer, expected +): + user = auth_client.user + setup_customer_and_employment_status( + user=user, + is_assignee=is_customer_assignee, + is_customer=is_customer, + is_employed=is_employed, + is_external=is_external, + ) + task_assignee = TaskAssigneeFactory.create() + url = reverse("task-assignee-list") + + res = auth_client.get(url) + assert res.status_code == HTTP_200_OK + json = res.json() + assert len(json["data"]) == expected + if expected: + assert json["data"][0]["id"] == str(task_assignee.id) + assert json["data"][0]["relationships"]["task"]["data"]["id"] == str( + task_assignee.task.id + ) diff --git a/backend/timed/projects/urls.py b/backend/timed/projects/urls.py new file mode 100644 index 000000000..534ba1dbf --- /dev/null +++ b/backend/timed/projects/urls.py @@ -0,0 +1,19 @@ +"""URL to view mapping for the projects app.""" + +from django.conf import settings +from rest_framework.routers import SimpleRouter + +from timed.projects import views + +r = SimpleRouter(trailing_slash=settings.APPEND_SLASH) + +r.register(r"projects", views.ProjectViewSet, "project") +r.register(r"customers", views.CustomerViewSet, "customer") +r.register(r"tasks", views.TaskViewSet, "task") +r.register(r"billing-types", views.BillingTypeViewSet, "billing-type") +r.register(r"cost-centers", views.CostCenterViewSet, "cost-center") +r.register(r"task-assignees", views.TaskAsssigneeViewSet, "task-assignee") +r.register(r"project-assignees", views.ProjectAsssigneeViewSet, "project-assignee") +r.register(r"customer-assignees", views.CustomerAsssigneeViewSet, "customer-assignee") + +urlpatterns = r.urls diff --git a/backend/timed/projects/views.py b/backend/timed/projects/views.py new file mode 100644 index 000000000..fb849776f --- /dev/null +++ b/backend/timed/projects/views.py @@ -0,0 +1,253 @@ +"""Viewsets for the projects app.""" + +from django.db.models import Q +from rest_framework.viewsets import ModelViewSet, ReadOnlyModelViewSet + +from timed.permissions import ( + IsAccountant, + IsAuthenticated, + IsCustomer, + IsInternal, + IsManager, + IsReadOnly, + IsSuperUser, + IsUpdateOnly, +) +from timed.projects import filters, models, serializers + + +class CustomerViewSet(ReadOnlyModelViewSet): + """Customer view set.""" + + serializer_class = serializers.CustomerSerializer + filterset_class = filters.CustomerFilterSet + ordering = "name" + + def get_queryset(self): + """Prefetch related data. + + If an employee is external, get only assigned customers. + + :return: The customers + :rtype: QuerySet + """ + user = self.request.user + queryset = models.Customer.objects.prefetch_related("projects") + current_employment = user.get_active_employment() + + if current_employment is None: + if models.CustomerAssignee.objects.filter( + user=user, is_customer=True + ).exists(): + return queryset.filter(assignees=user) + elif not current_employment.is_external: # pragma: no cover + return queryset + elif current_employment.is_external: + return queryset.filter( + Q(assignees=user) + | Q(projects__assignees=user) + | Q(projects__tasks__assignees=user) + ) + return queryset.none() + + +class BillingTypeViewSet(ReadOnlyModelViewSet): + serializer_class = serializers.BillingTypeSerializer + ordering = "name" + permission_classes = [ + # superuser may edit all billing types + IsSuperUser + # internal employees may read all billing types + | IsAuthenticated & (IsInternal | IsCustomer) & IsReadOnly + ] + + def get_queryset(self): + """Get billing types depending on the user's role. + + Internal employees should see all billing types. + Customers should only see billing types that are used in customer visible projects. + """ + user = self.request.user + queryset = models.BillingType.objects.all() + + current_employment = user.get_active_employment() + + if current_employment: + if ( + current_employment.is_external + and models.CustomerAssignee.objects.filter( + user=user, is_customer=True + ).exists() + ): + return queryset.filter( + projects__customer_visible=True, + projects__customer__customer_assignees__user=user, + projects__customer__customer_assignees__is_customer=True, + ) + return queryset + else: + if models.CustomerAssignee.objects.filter( + user=user, is_customer=True + ).exists(): + return queryset.filter( + projects__customer_visible=True, + projects__customer__customer_assignees__user=user, + projects__customer__customer_assignees__is_customer=True, + ) + + +class CostCenterViewSet(ReadOnlyModelViewSet): + serializer_class = serializers.CostCenterSerializer + ordering = "name" + permission_classes = [ + # superuser may edit all cost centers + IsSuperUser + # internal employees may read all cost centers + | IsAuthenticated & IsInternal & IsReadOnly + ] + + def get_queryset(self): + return models.CostCenter.objects.all() + + +class ProjectViewSet(ModelViewSet): + """Project view set.""" + + serializer_class = serializers.ProjectSerializer + filterset_class = filters.ProjectFilterSet + ordering_fields = ("customer__name", "name") + ordering = "name" + queryset = models.Project.objects.all() + permission_classes = [ + # superuser may edit all projects + IsSuperUser + # accountants may edit all projects + | IsAccountant + # managers may edit only assigned projects + | IsManager & IsUpdateOnly + # all authenticated users may read all tasks + | IsAuthenticated & IsReadOnly + ] + + def get_queryset(self): + """Get only assigned projects, if an employee is external.""" + user = self.request.user + queryset = ( + super() + .get_queryset() + .select_related("customer", "billing_type", "cost_center") + ) + current_employment = user.get_active_employment() + + if current_employment is None: + if models.CustomerAssignee.objects.filter( + user=user, is_customer=True + ).exists(): + return queryset.filter(customer__assignees=user, customer_visible=True) + elif not current_employment.is_external: # pragma: no cover + return queryset + elif current_employment.is_external: + return queryset.filter( + Q(assignees=user) + | Q(tasks__assignees=user) + | Q(customer__assignees=user) + ) + return queryset.none() + + +class TaskViewSet(ModelViewSet): + """Task view set.""" + + serializer_class = serializers.TaskSerializer + filterset_class = filters.TaskFilterSet + queryset = models.Task.objects.select_related("project", "cost_center") + ordering = "name" + permission_classes = [ + # superuser may edit all tasks + IsSuperUser + # managers may edit all tasks + | IsManager + # all authenticated users may read all tasks + | IsAuthenticated & IsReadOnly + ] + + def filter_queryset(self, queryset): + """Specific filter queryset options.""" + # my most frequent filter uses LIMIT so default ordering + # needs to be disabled to avoid exception + # see TODO filters.MyMostFrequentTaskFilter to avoid this + if "my_most_frequent" in self.request.query_params: + self.ordering = None + + return super().filter_queryset(queryset) + + def get_queryset(self): + """Get only assigned tasks, if an employee is external.""" + user = self.request.user + queryset = super().get_queryset().select_related("project", "cost_center") + current_employment = user.get_active_employment() + + if current_employment is None: + if models.CustomerAssignee.objects.filter( + user=user, is_customer=True + ).exists(): + return queryset.filter( + project__customer__assignees=user, project__customer_visible=True + ) + elif not current_employment.is_external: + return queryset + elif current_employment.is_external: + return queryset.filter( + Q(assignees=user) + | Q(project__assignees=user) + | Q(project__customer__assignees=user) + ) + return queryset.none() + + +class TaskAsssigneeViewSet(ReadOnlyModelViewSet): + serializer_class = serializers.TaskAssigneeSerializer + filterset_class = filters.TaskAssigneeFilterSet + + def get_queryset(self): + """Don't show task assignees to customers.""" + user = self.request.user + + queryset = models.TaskAssignee.objects.select_related("task", "user") + + current_employment = user.get_active_employment() + if current_employment is None or current_employment.is_external: + return queryset.none() + return queryset + + +class ProjectAsssigneeViewSet(ReadOnlyModelViewSet): + serializer_class = serializers.ProjectAssigneeSerializer + filterset_class = filters.ProjectAssigneeFilterSet + + def get_queryset(self): + """Don't show project assignees to customers.""" + user = self.request.user + + queryset = models.ProjectAssignee.objects.select_related("project", "user") + + current_employment = user.get_active_employment() + if current_employment is None or current_employment.is_external: + return queryset.none() + return queryset + + +class CustomerAsssigneeViewSet(ReadOnlyModelViewSet): + serializer_class = serializers.CustomerAssigneeSerializer + filterset_class = filters.CustomerAssigneeFilterSet + + def get_queryset(self): + """Don't show customer assignees to customers.""" + user = self.request.user + + queryset = models.CustomerAssignee.objects.select_related("customer", "user") + + current_employment = user.get_active_employment() + if current_employment is None or current_employment.is_external: + return queryset.none() + return queryset diff --git a/backend/timed/redmine/__init__.py b/backend/timed/redmine/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/timed/redmine/admin.py b/backend/timed/redmine/admin.py new file mode 100644 index 000000000..f3e123884 --- /dev/null +++ b/backend/timed/redmine/admin.py @@ -0,0 +1,7 @@ +from nested_inline.admin import NestedStackedInline + +from timed.redmine.models import RedmineProject + + +class RedmineProjectInline(NestedStackedInline): + model = RedmineProject diff --git a/backend/timed/redmine/management/commands/import_project_data.py b/backend/timed/redmine/management/commands/import_project_data.py new file mode 100644 index 000000000..9da4b245a --- /dev/null +++ b/backend/timed/redmine/management/commands/import_project_data.py @@ -0,0 +1,72 @@ +import redminelib +from django.conf import settings +from django.core.management.base import BaseCommand + +from timed.projects.models import Project + + +class Command(BaseCommand): # pragma: no cover + help = "Update projects" + + def add_arguments(self, parser): + parser.add_argument( + "--pretend", + action="store_true", + help="Pretend mode for testing", + ) + + def handle(self, *args, **options): + redmine = redminelib.Redmine( + settings.REDMINE_URL, + key=settings.REDMINE_APIKEY, + ) + + open_redmine_projects = list( + redmine.issue.filter( + tracker_id=6, status="open", project_id=settings.REDMINE_BUILD_PROJECT + ) + ) + closed_redmine_projects = list( + redmine.issue.filter( + tracker_id=6, + status_id=5, + closed_on="y", + project_id=settings.REDMINE_BUILD_PROJECT, + ) + ) + + redmine_projects = open_redmine_projects + closed_redmine_projects + + pretend = options["pretend"] + + for redmine_project in redmine_projects: + timed_project = Project.objects.filter( + redmine_project__issue_id=redmine_project.id, + ).first() + + if not timed_project: + continue + + custom_fields = list(redmine_project.custom_fields.values()) + + amount_offered = next( + item for item in custom_fields if item["name"] == "Offeriert" + ) + + amount_invoiced = next( + item for item in custom_fields if item["name"] == "Verrechnet" + ) + + timed_project.amount_offered = ( + amount_offered.get("value") or timed_project.amount_offered + ) + timed_project.amount_invoiced = ( + amount_invoiced.get("value") or timed_project.amount_invoiced + ) + if not pretend: + timed_project.save() + self.stdout.write( + self.style.SUCCESS( + f"Updating project {timed_project.name} #{redmine_project.id} with amount offered {timed_project.amount_offered} and amount invoiced {timed_project.amount_invoiced}" + ) + ) diff --git a/backend/timed/redmine/management/commands/redmine_report.py b/backend/timed/redmine/management/commands/redmine_report.py new file mode 100644 index 000000000..37451e4d6 --- /dev/null +++ b/backend/timed/redmine/management/commands/redmine_report.py @@ -0,0 +1,92 @@ +import sys +from datetime import timedelta + +import redminelib +from django.conf import settings +from django.core.management.base import BaseCommand +from django.db.models import Count, Sum +from django.template.loader import get_template +from django.utils import timezone + +from timed.projects.models import Project +from timed.tracking.models import Report + +template = get_template("redmine/weekly_report.txt", using="text") + + +class Command(BaseCommand): + help = "Update associated Redmine projects and send reports to watchers." + + def add_arguments(self, parser): + parser.add_argument( + "--last-days", + dest="last_days", + default=7, + help="Build report of number of last days", + type=int, + ) + + def handle(self, *args, **options): + redmine = redminelib.Redmine( + settings.REDMINE_URL, + key=settings.REDMINE_APIKEY, + ) + + last_days = options["last_days"] + # today is excluded + end = timezone.now().replace(hour=0, minute=0, second=0, microsecond=0) + start = end - timedelta(days=last_days) + + # get projects with reports in given last days + affected_projects = ( + Project.objects.filter( + archived=False, + redmine_project__isnull=False, + tasks__reports__updated__range=[start, end], + ) + .annotate(count_reports=Count("tasks__reports")) + .filter(count_reports__gt=0) + .values("id") + ) + # calculate total hours + projects = ( + Project.objects.filter(id__in=affected_projects) + .order_by("name") + .annotate(total_hours=Sum("tasks__reports__duration")) + ) + + for project in projects: + estimated_hours = ( + project.estimated_time.total_seconds() / 3600 + if project.estimated_time + else 0.0 + ) + total_hours = project.total_hours.total_seconds() / 3600 + try: + issue = redmine.issue.get(project.redmine_project.issue_id) + reports = Report.objects.filter( + task__project=project, updated__range=[start, end] + ).order_by("date") + hours = reports.aggregate(hours=Sum("duration"))["hours"] + + issue.notes = template.render( + { + "project": project, + "hours": hours.total_seconds() / 3600, + "last_days": last_days, + "total_hours": total_hours, + "estimated_hours": estimated_hours, + "reports": reports, + } + ) + issue.custom_fields = [ + {"id": settings.REDMINE_SPENTHOURS_FIELD, "value": total_hours} + ] + issue.save() + except redminelib.exceptions.BaseRedmineError: + sys.stderr.write( + "Project {0} has an invalid Redmine " + "issue {1} assigned. Skipping".format( + project.name, project.redmine_project.issue_id + ) + ) diff --git a/backend/timed/redmine/management/commands/update_project_expenditure.py b/backend/timed/redmine/management/commands/update_project_expenditure.py new file mode 100644 index 000000000..11d2d95e2 --- /dev/null +++ b/backend/timed/redmine/management/commands/update_project_expenditure.py @@ -0,0 +1,91 @@ +import redminelib +from django.conf import settings +from django.core.management.base import BaseCommand +from django.db.models import Case, Count, Sum, When + +from timed.projects.models import Project + + +class Command(BaseCommand): + help = "Update expenditures on associated Redmine projects." + + def add_arguments(self, parser): + parser.add_argument( + "--pretend", + action="store_true", + help="Pretend mode for testing", + ) + + def handle(self, *args, **options): + redmine = redminelib.Redmine( + settings.REDMINE_URL, + key=settings.REDMINE_APIKEY, + ) + + affected_projects = ( + Project.objects.filter( + archived=False, + redmine_project__isnull=False, + ) + .annotate(count_reports=Count("tasks__reports")) + .annotate( + total_hours=Case( + When(count_reports__gt=0, then=Sum("tasks__reports__duration")), + default=None, + ) + ) + ) + + pretend = options["pretend"] + + for project in affected_projects.iterator(): + estimated_hours = ( + project.estimated_time.total_seconds() / 3600 + if project.estimated_time + else 0.0 + ) + try: + issue = redmine.issue.get(project.redmine_project.issue_id) + except redminelib.exceptions.BaseRedmineError as e: + self.stdout.write( + self.style.ERROR( + f"Failed retrieving Project {project.name} with Redmine issue {project.redmine_project.issue_id} assigned. Skipping.\n{e}" + ) + ) + continue + issue.estimated_hours = estimated_hours + + amount_offered = ( + project.amount_offered and float(project.amount_offered.amount) + ) or 0.0 + amount_invoiced = ( + project.amount_invoiced and float(project.amount_invoiced.amount) + ) or 0.0 + + # fields not active in Redmine projects settings won't be saved + issue.custom_fields = [ + { + "id": settings.REDMINE_AMOUNT_OFFERED_FIELD, + "value": amount_offered, + }, + { + "id": settings.REDMINE_AMOUNT_INVOICED_FIELD, + "value": amount_invoiced, + }, + ] + if not pretend: + try: + issue.save() + continue + except redminelib.exceptions.BaseRedmineError as e: # pragma: no cover + self.stdout.write( + self.style.ERROR( + f"Failed to save Project {project.name} with Redmine issue {issue.id}!\n{e}" + ) + ) + + self.stdout.write( + self.style.SUCCESS( + f"Updating Redmine issue {project.redmine_project.issue_id} with estimated time {estimated_hours}, amount offered {amount_offered}, amount invoiced {amount_invoiced}" + ) + ) diff --git a/backend/timed/redmine/migrations/0001_initial.py b/backend/timed/redmine/migrations/0001_initial.py new file mode 100644 index 000000000..b0f673a3a --- /dev/null +++ b/backend/timed/redmine/migrations/0001_initial.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.3 on 2017-07-31 10:53 +from __future__ import unicode_literals + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + initial = True + + dependencies = [("projects", "0001_initial")] + + operations = [ + migrations.CreateModel( + name="RedmineProject", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("issue_id", models.PositiveIntegerField()), + ( + "project", + models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, + related_name="redmine_project", + to="projects.Project", + ), + ), + ], + ) + ] diff --git a/backend/timed/redmine/migrations/__init__.py b/backend/timed/redmine/migrations/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/timed/redmine/models.py b/backend/timed/redmine/models.py new file mode 100644 index 000000000..1c84de5bc --- /dev/null +++ b/backend/timed/redmine/models.py @@ -0,0 +1,16 @@ +from django.db import models + +from timed.projects.models import Project + + +class RedmineProject(models.Model): + """ + Definition of a Redmine Project. + + Defines what Timed project belongs to what Redmine issue. + """ + + project = models.OneToOneField( + Project, on_delete=models.CASCADE, related_name="redmine_project" + ) + issue_id = models.PositiveIntegerField() diff --git a/backend/timed/redmine/templates/redmine/weekly_report.txt b/backend/timed/redmine/templates/redmine/weekly_report.txt new file mode 100644 index 000000000..b92ac82cf --- /dev/null +++ b/backend/timed/redmine/templates/redmine/weekly_report.txt @@ -0,0 +1,13 @@ +{% load float_hours %} +``` +Customer: {{project.customer.name}} +Project: {{project.name}} +Hours in last {{last_days}} days: {{hours}} +Total hours: {{total_hours}} +Estimated hours: {{estimated_hours}} + + +Reported in last {{last_days}} days: +{% for report in reports %} +{{report.date}} {{report.duration|float_hours|floatformat:2}} {% if report.not_billable %}{{"[NB]"|ljust:"6"}}{% else %}{{""|ljust:"6"}}{% endif %}{% if report.review %}{{"[Rev]"|ljust:"6"}}{% else %}{{""|ljust:"6"}}{% endif %} {{report.user.get_full_name|ljust:20}} {{report.task.name|ljust:"20"}} {{report.comment|ljust:"100"}}{% endfor %} +``` diff --git a/backend/timed/redmine/templatetags/__init__.py b/backend/timed/redmine/templatetags/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/timed/redmine/templatetags/float_hours.py b/backend/timed/redmine/templatetags/float_hours.py new file mode 100644 index 000000000..c6e22b047 --- /dev/null +++ b/backend/timed/redmine/templatetags/float_hours.py @@ -0,0 +1,9 @@ +from django import template + +register = template.Library() + + +@register.filter +def float_hours(duration): + """Convert timedelta to floating hours.""" + return duration.total_seconds() / 3600 diff --git a/backend/timed/redmine/tests/__init__.py b/backend/timed/redmine/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/timed/redmine/tests/test_redmine_report.py b/backend/timed/redmine/tests/test_redmine_report.py new file mode 100644 index 000000000..81b7e78f9 --- /dev/null +++ b/backend/timed/redmine/tests/test_redmine_report.py @@ -0,0 +1,120 @@ +import pytest +from django.core.management import call_command +from redminelib.exceptions import ResourceNotFoundError + +from timed.redmine.models import RedmineProject + + +@pytest.mark.parametrize("not_billable", [False, True]) +@pytest.mark.parametrize("review", [False, True]) +def test_redmine_report(db, freezer, mocker, report_factory, not_billable, review): + """ + Test redmine report. + + Simulate reports added on Friday 2017-07-28 and cronjob run on + Monday 2017-07-31. + """ + redmine_instance = mocker.MagicMock() + issue = mocker.MagicMock() + redmine_instance.issue.get.return_value = issue + redmine_class = mocker.patch("redminelib.Redmine") + redmine_class.return_value = redmine_instance + + freezer.move_to("2017-07-28") + report = report_factory( + not_billable=not_billable, + review=review, + ) + report_hours = report.duration.total_seconds() / 3600 + estimated_hours = report.task.project.estimated_time.total_seconds() / 3600 + RedmineProject.objects.create(project=report.task.project, issue_id=1000) + # report not attached to redmine + other = report_factory() + + freezer.move_to("2017-07-31") + call_command("redmine_report", last_days=7) + + redmine_instance.issue.get.assert_called_once_with(1000) + assert issue.custom_fields == [{"id": 0, "value": report_hours}] + assert "Total hours: {0}".format(report_hours) in issue.notes + assert "Estimated hours: {0}".format(estimated_hours) in issue.notes + assert "Hours in last 7 days: {0}\n".format(report_hours) in issue.notes + assert report.comment in issue.notes + assert "[NB]" in issue.notes or not not_billable + assert "[Rev]" in issue.notes or not review + + assert other.comment not in issue.notes, "Only one new line after report line" + issue.save.assert_called_once_with() + + +def test_redmine_report_no_estimated_time(db, freezer, mocker, task, report_factory): + redmine_instance = mocker.MagicMock() + issue = mocker.MagicMock() + redmine_instance.issue.get.return_value = issue + redmine_class = mocker.patch("redminelib.Redmine") + redmine_class.return_value = redmine_instance + + freezer.move_to("2017-07-28") + task.project.estimated_time = None + task.project.save() + report = report_factory(task=task) + RedmineProject.objects.create(project=report.task.project, issue_id=1000) + + freezer.move_to("2017-07-31") + call_command("redmine_report", last_days=7) + + redmine_instance.issue.get.assert_called_once_with(1000) + issue.save.assert_called_once_with() + + +def test_redmine_report_invalid_issue(db, freezer, mocker, capsys, report_factory): + """Test case when issue is not available.""" + redmine_instance = mocker.MagicMock() + redmine_class = mocker.patch("redminelib.Redmine") + redmine_class.return_value = redmine_instance + redmine_instance.issue.get.side_effect = ResourceNotFoundError() + + freezer.move_to("2017-07-28") + report = report_factory() + RedmineProject.objects.create(project=report.task.project, issue_id=1000) + + freezer.move_to("2017-07-31") + call_command("redmine_report", last_days=7) + + _, err = capsys.readouterr() + assert "issue 1000 assigned" in err + + +def test_redmine_report_calculate_total_hours( + db, freezer, mocker, task, report_factory +): + redmine_instance = mocker.MagicMock() + issue = mocker.MagicMock() + redmine_instance.issue.get.return_value = issue + redmine_class = mocker.patch("redminelib.Redmine") + redmine_class.return_value = redmine_instance + + freezer.move_to("2017-07-15") + reports = report_factory.create_batch(10, task=task) + + freezer.move_to("2017-07-24") + reports_last_seven_days = report_factory.create_batch(10, task=task) + + total_hours_last_seven_days = 0 + for report in reports_last_seven_days: + total_hours_last_seven_days += report.duration.total_seconds() / 3600 + + total_hours = 0 + for report in reports + reports_last_seven_days: + total_hours += report.duration.total_seconds() / 3600 + + RedmineProject.objects.create(project=task.project, issue_id=1000) + + freezer.move_to("2017-07-31") + call_command("redmine_report", last_days=7) + + redmine_instance.issue.get.assert_called_once_with(1000) + assert "Total hours: {0}".format(total_hours) in issue.notes + assert ( + "Hours in last 7 days: {0}\n".format(total_hours_last_seven_days) in issue.notes + ) diff --git a/backend/timed/redmine/tests/test_update_project_expenditure.py b/backend/timed/redmine/tests/test_update_project_expenditure.py new file mode 100644 index 000000000..23a166d78 --- /dev/null +++ b/backend/timed/redmine/tests/test_update_project_expenditure.py @@ -0,0 +1,60 @@ +import datetime + +import pytest +from django.core.management import call_command +from redminelib.exceptions import ResourceNotFoundError + +from timed.redmine.models import RedmineProject + + +@pytest.mark.parametrize("pretend", [True, False]) +@pytest.mark.parametrize("amount_offered", [None, 100.00, 0]) +def test_update_project_expenditure( + db, mocker, capsys, report_factory, pretend, amount_offered +): + redmine_instance = mocker.MagicMock() + issue = mocker.MagicMock() + redmine_instance.issue.get.return_value = issue + redmine_class = mocker.patch("redminelib.Redmine") + redmine_class.return_value = redmine_instance + + report = report_factory() + project = report.task.project + project.estimated_time = datetime.timedelta(hours=10) + project.amount_offered = amount_offered + project.save() + + RedmineProject.objects.create(project=report.task.project, issue_id=1000) + + call_command("update_project_expenditure", pretend=pretend) + + offered = (project.amount_offered and project.amount_offered.amount) or 0.0 + + if not pretend: + redmine_instance.issue.get.assert_called_once_with(1000) + assert issue.estimated_hours == project.estimated_time.total_seconds() / 3600 + assert issue.custom_fields[0]["value"] == offered + assert issue.custom_fields[1]["value"] == project.amount_invoiced.amount + issue.save.assert_called_once_with() + else: + out, _ = capsys.readouterr() + assert "Redmine issue 1000" in out + assert f"amount offered {offered}" in out + assert f"amount invoiced {project.amount_invoiced.amount}" in out + + +def test_update_project_expenditure_invalid_issue( + db, freezer, mocker, capsys, report_factory +): + redmine_instance = mocker.MagicMock() + redmine_class = mocker.patch("redminelib.Redmine") + redmine_class.return_value = redmine_instance + redmine_instance.issue.get.side_effect = ResourceNotFoundError() + + report = report_factory(duration=datetime.timedelta(hours=4)) + RedmineProject.objects.create(project=report.task.project, issue_id=1000) + + call_command("update_project_expenditure") + + out, _ = capsys.readouterr() + assert "issue 1000 assigned. Skipping." in out diff --git a/backend/timed/reports/__init__.py b/backend/timed/reports/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/timed/reports/filters.py b/backend/timed/reports/filters.py new file mode 100644 index 000000000..113e26b2a --- /dev/null +++ b/backend/timed/reports/filters.py @@ -0,0 +1,166 @@ +from django.db.models import DurationField, F, Q, Sum, Value +from django.db.models.functions import Coalesce +from django_filters.rest_framework import ( + BaseInFilter, + DateFilter, + FilterSet, + NumberFilter, +) + +from timed.projects.models import CustomerAssignee, ProjectAssignee, TaskAssignee + + +class StatisticFiltersetBase: + def filter_has_reviewer(self, queryset, name, value): + if not value: # pragma: no cover + return queryset + + task_prefix = self._refs["task_prefix"] + project_prefix = self._refs["project_prefix"] + customer_prefix = self._refs["customer_prefix"] + + customer_assignees = CustomerAssignee.objects.filter( + is_reviewer=True, user_id=value + ).values("customer_id") + + project_assignees = ProjectAssignee.objects.filter( + is_reviewer=True, user_id=value + ).values("project_id") + task_assignees = TaskAssignee.objects.filter( + is_reviewer=True, user_id=value + ).values("task_id") + + the_filter = ( + Q(**{f"{customer_prefix}pk__in": customer_assignees}) + | Q(**{f"{project_prefix}pk__in": project_assignees}) + | Q(**{f"{task_prefix}id__in": task_assignees}) + ) + return queryset.filter_aggregate(the_filter).filter_base(the_filter) + + def filter_cost_center(self, queryset, name, value): + """ + Filter report by cost center. + + The filter behaves slightly different depending on what the + statistic summarizes: + * When viewing the statistic over customers, the work durations are + filtered (either project or task) + * When viewing the statistic over project, only the projects + are filtered + * When viewing the statistic over tasks, only the tasks + are filtered + """ + + # TODO Discuss: Is this the desired behaviour by our users? + + if not value: # pragma: no cover + return queryset + + is_customer = not self._refs["customer_prefix"] + + task_prefix = self._refs["task_prefix"] + project_prefix = self._refs["project_prefix"] + + filter_q = Q(**{f"{task_prefix}cost_center": value}) | Q( + **{ + f"{project_prefix}cost_center": value, + f"{task_prefix}cost_center__isnull": True, + } + ) + + if is_customer: + # Customer mode: We only need to filter aggregates, + # as the customer has no cost center + return queryset.filter_aggregate(filter_q) + else: + # Project or task: Filter both to get the correct result + return queryset.filter_base(filter_q).filter_aggregate(filter_q) + + def filter_queryset(self, queryset): + qs = super().filter_queryset(queryset) + + duration_ref = self._refs["reports_ref"] + "__duration" + + full_qs = qs._base.annotate( + duration=Coalesce( + Sum(duration_ref, filter=qs._agg_filters), + Value("00:00:00", DurationField(null=False)), + ), + pk=F("id"), + ) + result = full_qs.values() + # Useful for QS debugging + # print(result.query) + return result + + +def StatisticFiltersetBuilder(name, reports_ref, project_ref, customer_ref, task_ref): + reports_prefix = f"{reports_ref}__" if reports_ref else "" + project_prefix = f"{project_ref}__" if project_ref else "" + customer_prefix = f"{customer_ref}__" if customer_ref else "" + task_prefix = f"{task_ref}__" if task_ref else "" + + return type( + name, + (StatisticFiltersetBase, FilterSet), + { + "_refs": { + "reports_prefix": reports_prefix, + "project_prefix": project_prefix, + "customer_prefix": customer_prefix, + "task_prefix": task_prefix, + "reports_ref": reports_ref, + "project_ref": project_ref, + "customer_ref": customer_ref, + "task_ref": task_ref, + }, + "from_date": DateFilter( + field_name=f"{reports_prefix}date", lookup_expr="gte" + ), + "to_date": DateFilter( + field_name=f"{reports_prefix}date", lookup_expr="lte" + ), + "project": NumberFilter(field_name=f"{project_prefix}pk"), + "customer": NumberFilter(field_name=f"{customer_prefix}pk"), + "review": NumberFilter(field_name=f"{reports_prefix}review"), + "not_billable": NumberFilter(field_name=f"{reports_prefix}not_billable"), + "billed": NumberFilter(field_name=f"{reports_prefix}billed"), + "verified": NumberFilter( + field_name=f"{reports_prefix}verified_by_id", + lookup_expr="isnull", + exclude=True, + ), + "verifier": NumberFilter(field_name=f"{reports_prefix}verified_by"), + "billing_type": NumberFilter(field_name=f"{project_prefix}billing_type"), + "user": NumberFilter(field_name=f"{reports_prefix}user_id"), + "rejected": NumberFilter(field_name=f"{reports_prefix}rejected"), + "id": BaseInFilter(), + "cost_center": NumberFilter(method="filter_cost_center"), + "reviewer": NumberFilter(method="filter_has_reviewer"), + }, + ) + + +CustomerStatisticFilterSet = StatisticFiltersetBuilder( + "CustomerStatisticFilterSet", + reports_ref="projects__tasks__reports", + project_ref="projects", + task_ref="projects__tasks", + customer_ref="", +) + +ProjectStatisticFilterSet = StatisticFiltersetBuilder( + "ProjectStatisticFilterSet", + reports_ref="tasks__reports", + project_ref="", + task_ref="tasks", + customer_ref="customer", +) + +TaskStatisticFilterSet = StatisticFiltersetBuilder( + "TaskStatisticFilterSet", + reports_ref="reports", + project_ref="project", + task_ref="", + customer_ref="project__customer", +) diff --git a/backend/timed/reports/serializers.py b/backend/timed/reports/serializers.py new file mode 100644 index 000000000..abbf4901a --- /dev/null +++ b/backend/timed/reports/serializers.py @@ -0,0 +1,77 @@ +from django.contrib.auth import get_user_model +from rest_framework_json_api import relations +from rest_framework_json_api.serializers import ( + CharField, + DecimalField, + DurationField, + IntegerField, + Serializer, +) + +from timed.projects.models import Customer, Project +from timed.serializers import TotalTimeRootMetaMixin + + +class YearStatisticSerializer(TotalTimeRootMetaMixin, Serializer): + duration = DurationField() + year = IntegerField() + + class Meta: + resource_name = "year-statistics" + + +class MonthStatisticSerializer(TotalTimeRootMetaMixin, Serializer): + duration = DurationField() + year = IntegerField() + month = IntegerField() + + class Meta: + resource_name = "month-statistics" + + +class CustomerStatisticSerializer(TotalTimeRootMetaMixin, Serializer): + duration = DurationField() + name = CharField(read_only=True) + + class Meta: + resource_name = "customer-statistics" + + +class ProjectStatisticSerializer(TotalTimeRootMetaMixin, Serializer): + duration = DurationField() + name = CharField() + amount_offered = DecimalField(max_digits=None, decimal_places=2) + amount_offered_currency = CharField() + amount_invoiced = DecimalField(max_digits=None, decimal_places=2) + amount_invoiced_currency = CharField() + customer = relations.ResourceRelatedField(model=Customer, read_only=True) + estimated_time = DurationField(read_only=True) + total_remaining_effort = DurationField(read_only=True) + + included_serializers = {"customer": "timed.projects.serializers.CustomerSerializer"} + + class Meta: + resource_name = "project-statistics" + + +class TaskStatisticSerializer(TotalTimeRootMetaMixin, Serializer): + name = CharField(read_only=True) + most_recent_remaining_effort = DurationField(read_only=True) + duration = DurationField(read_only=True) + project = relations.ResourceRelatedField(model=Project, read_only=True) + estimated_time = DurationField(read_only=True) + + included_serializers = {"project": "timed.projects.serializers.ProjectSerializer"} + + class Meta: + resource_name = "task-statistics" + + +class UserStatisticSerializer(TotalTimeRootMetaMixin, Serializer): + duration = DurationField(read_only=True) + user = relations.ResourceRelatedField(model=get_user_model(), read_only=True) + + included_serializers = {"user": "timed.employment.serializers.UserSerializer"} + + class Meta: + resource_name = "user-statistics" diff --git a/backend/timed/reports/templates/workreport.ots b/backend/timed/reports/templates/workreport.ots new file mode 100644 index 000000000..304c21ecf Binary files /dev/null and b/backend/timed/reports/templates/workreport.ots differ diff --git a/backend/timed/reports/tests/__init__.py b/backend/timed/reports/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/timed/reports/tests/test_customer_statistic.py b/backend/timed/reports/tests/test_customer_statistic.py new file mode 100644 index 000000000..e3972cf7b --- /dev/null +++ b/backend/timed/reports/tests/test_customer_statistic.py @@ -0,0 +1,154 @@ +from datetime import timedelta + +import pytest +from django.urls import reverse +from rest_framework import status + +from timed.conftest import setup_customer_and_employment_status +from timed.employment.factories import EmploymentFactory, UserFactory +from timed.projects.factories import CostCenterFactory, TaskAssigneeFactory, TaskFactory +from timed.tracking.factories import ReportFactory + + +@pytest.mark.parametrize( + "is_employed, is_customer_assignee, is_customer, expected, status_code", + [ + (False, True, False, 1, status.HTTP_403_FORBIDDEN), + (False, True, True, 1, status.HTTP_403_FORBIDDEN), + (True, False, False, 3, status.HTTP_200_OK), + (True, True, False, 3, status.HTTP_200_OK), + (True, True, True, 3, status.HTTP_200_OK), + ], +) +def test_customer_statistic_list( + auth_client, + is_employed, + is_customer_assignee, + is_customer, + expected, + status_code, + django_assert_num_queries, +): + user = auth_client.user + + assignee, employment = setup_customer_and_employment_status( + user=user, + is_assignee=is_customer_assignee, + is_customer=is_customer, + is_employed=is_employed, + is_external=False, + ) + + # Statistics returns all the customers, not only those + # with reports. So we must get this one into the expected + # list as well + third_customer = assignee.customer if assignee else None + + report = ReportFactory.create(duration=timedelta(hours=1)) + ReportFactory.create(duration=timedelta(hours=2), task=report.task) + report2 = ReportFactory.create(duration=timedelta(hours=4)) + + url = reverse("customer-statistic-list") + with django_assert_num_queries(expected): + result = auth_client.get(url, data={"ordering": "duration"}) + assert result.status_code == status_code + + if status_code == status.HTTP_200_OK: + json = result.json() + expected_data = [ + { + "type": "customer-statistics", + "id": str(report.task.project.customer.id), + "attributes": { + "duration": "03:00:00", + "name": report.task.project.customer.name, + }, + }, + { + "type": "customer-statistics", + "id": str(report2.task.project.customer.id), + "attributes": { + "duration": "04:00:00", + "name": report2.task.project.customer.name, + }, + }, + ] + if third_customer: + expected_data = [ + { + "type": "customer-statistics", + "id": str(third_customer.pk), + "attributes": { + "duration": "00:00:00", + "name": third_customer.name, + }, + } + ] + expected_data + assert json["data"] == expected_data + assert json["meta"]["total-time"] == "07:00:00" + + +@pytest.mark.parametrize( + "filter, expected_result", + [("from_date", 5), ("customer", 3), ("cost_center", 3), ("reviewer", 3)], +) +def test_customer_statistic_filtered(auth_client, filter, expected_result): + user = auth_client.user + setup_customer_and_employment_status( + user=user, + is_assignee=True, + is_customer=True, + is_employed=True, + is_external=False, + ) + + cost_center = CostCenterFactory() + task_z = TaskFactory.create(name="Z", cost_center=cost_center) + task_test = TaskFactory.create(name="Test") + reviewer = TaskAssigneeFactory(user=UserFactory(), task=task_test, is_reviewer=True) + + ReportFactory.create(duration=timedelta(hours=1), date="2022-08-05", task=task_test) + ReportFactory.create(duration=timedelta(hours=2), date="2022-08-30", task=task_test) + ReportFactory.create(duration=timedelta(hours=3), date="2022-09-01", task=task_z) + + filter_values = { + "from_date": "2022-08-20", # last two reports + "customer": str(task_test.project.customer.pk), # first two + "cost_center": str(cost_center.pk), # first two + "reviewer": str(reviewer.user.pk), # first two + } + the_filter = {filter: filter_values[filter]} + + url = reverse("customer-statistic-list") + result = auth_client.get( + url, + data={"ordering": "name", **the_filter}, + ) + assert result.status_code == status.HTTP_200_OK + + json = result.json() + + assert json["meta"]["total-time"] == f"{expected_result:02}:00:00" + + +@pytest.mark.parametrize( + "is_employed, expected, status_code", + [ + (True, 5, status.HTTP_200_OK), + (False, 1, status.HTTP_403_FORBIDDEN), + ], +) +def test_customer_statistic_detail( + auth_client, is_employed, expected, status_code, django_assert_num_queries +): + if is_employed: + EmploymentFactory.create(user=auth_client.user) + report = ReportFactory.create(duration=timedelta(hours=1)) + + url = reverse("customer-statistic-detail", args=[report.task.project.customer.id]) + with django_assert_num_queries(expected): + result = auth_client.get(url, data={"ordering": "duration"}) + assert result.status_code == status_code + if status_code == status.HTTP_200_OK: + json = result.json() + assert json["data"]["attributes"]["duration"] == "01:00:00" diff --git a/backend/timed/reports/tests/test_month_statistic.py b/backend/timed/reports/tests/test_month_statistic.py new file mode 100644 index 000000000..2ae274717 --- /dev/null +++ b/backend/timed/reports/tests/test_month_statistic.py @@ -0,0 +1,55 @@ +from datetime import date, timedelta + +import pytest +from django.urls import reverse +from rest_framework import status + +from timed.conftest import setup_customer_and_employment_status +from timed.tracking.factories import ReportFactory + + +@pytest.mark.parametrize( + "is_employed, is_customer_assignee, is_customer, expected", + [ + (False, True, False, status.HTTP_403_FORBIDDEN), + (False, True, True, status.HTTP_403_FORBIDDEN), + (True, False, False, status.HTTP_200_OK), + (True, True, False, status.HTTP_200_OK), + (True, True, True, status.HTTP_200_OK), + ], +) +def test_month_statistic_list( + auth_client, is_employed, is_customer_assignee, is_customer, expected +): + user = auth_client.user + setup_customer_and_employment_status( + user=user, + is_assignee=is_customer_assignee, + is_customer=is_customer, + is_employed=is_employed, + is_external=False, + ) + + ReportFactory.create(duration=timedelta(hours=1), date=date(2016, 1, 1)) + ReportFactory.create(duration=timedelta(hours=1), date=date(2015, 12, 4)) + ReportFactory.create(duration=timedelta(hours=2), date=date(2015, 12, 31)) + + url = reverse("month-statistic-list") + result = auth_client.get(url, data={"ordering": "year,month"}) + assert result.status_code == expected + if expected == status.HTTP_200_OK: + json = result.json() + expected_json = [ + { + "type": "month-statistics", + "id": "201512", + "attributes": {"year": 2015, "month": 12, "duration": "03:00:00"}, + }, + { + "type": "month-statistics", + "id": "201601", + "attributes": {"year": 2016, "month": 1, "duration": "01:00:00"}, + }, + ] + assert json["data"] == expected_json + assert json["meta"]["total-time"] == "04:00:00" diff --git a/backend/timed/reports/tests/test_project_statistic.py b/backend/timed/reports/tests/test_project_statistic.py new file mode 100644 index 000000000..4345cece9 --- /dev/null +++ b/backend/timed/reports/tests/test_project_statistic.py @@ -0,0 +1,147 @@ +from datetime import timedelta + +import pytest +from django.urls import reverse +from rest_framework import status + +from timed.conftest import setup_customer_and_employment_status +from timed.employment.factories import UserFactory +from timed.projects.factories import CostCenterFactory, TaskAssigneeFactory, TaskFactory +from timed.tracking.factories import ReportFactory + + +@pytest.mark.parametrize( + "is_employed, is_customer_assignee, is_customer, expected, status_code", + [ + (False, True, False, 1, status.HTTP_403_FORBIDDEN), + (False, True, True, 1, status.HTTP_403_FORBIDDEN), + (True, False, False, 4, status.HTTP_200_OK), + (True, True, False, 4, status.HTTP_200_OK), + (True, True, True, 4, status.HTTP_200_OK), + ], +) +def test_project_statistic_list( + auth_client, + is_employed, + is_customer_assignee, + is_customer, + expected, + status_code, + django_assert_num_queries, +): + user = auth_client.user + setup_customer_and_employment_status( + user=user, + is_assignee=is_customer_assignee, + is_customer=is_customer, + is_employed=is_employed, + is_external=False, + ) + report = ReportFactory.create(duration=timedelta(hours=1)) + project = report.task.project + ReportFactory.create(duration=timedelta(hours=2), task=report.task) + report2 = ReportFactory.create(duration=timedelta(hours=4)) + project_2 = report2.task.project + task = TaskFactory(project=report.task.project) + ReportFactory.create(duration=timedelta(hours=2), task=task) + + url = reverse("project-statistic-list") + with django_assert_num_queries(expected): + result = auth_client.get( + url, data={"ordering": "duration", "include": "customer"} + ) + assert result.status_code == status_code + + if status_code == status.HTTP_200_OK: + json = result.json() + expected_json = [ + { + "type": "project-statistics", + "id": str(report2.task.project.id), + "attributes": { + "duration": "04:00:00", + "name": report2.task.project.name, + "amount-offered": str(project_2.amount_offered.amount), + "amount-offered-currency": project_2.amount_offered_currency, + "amount-invoiced": str(project_2.amount_invoiced.amount), + "amount-invoiced-currency": project_2.amount_invoiced_currency, + "estimated-time": "00:00:00", + "total-remaining-effort": "00:00:00", + }, + "relationships": { + "customer": { + "data": { + "type": "customers", + "id": str(project_2.customer.id), + } + } + }, + }, + { + "type": "project-statistics", + "id": str(report.task.project.id), + "attributes": { + "duration": "05:00:00", + "name": report.task.project.name, + "amount-offered": str(project.amount_offered.amount), + "amount-offered-currency": project.amount_offered_currency, + "amount-invoiced": str(project.amount_invoiced.amount), + "amount-invoiced-currency": project.amount_invoiced_currency, + "estimated-time": "00:00:00", + "total-remaining-effort": "00:00:00", + }, + "relationships": { + "customer": { + "data": { + "type": "customers", + "id": str(project.customer.id), + } + } + }, + }, + ] + assert json["data"] == expected_json + assert json["meta"]["total-time"] == "09:00:00" + + +@pytest.mark.parametrize( + "filter, expected_result", + [("from_date", 5), ("customer", 3), ("cost_center", 3), ("reviewer", 3)], +) +def test_project_statistic_filtered(auth_client, filter, expected_result): + user = auth_client.user + setup_customer_and_employment_status( + user=user, + is_assignee=True, + is_customer=True, + is_employed=True, + is_external=False, + ) + + cost_center = CostCenterFactory() + task_z = TaskFactory.create(name="Z", cost_center=cost_center) + task_test = TaskFactory.create(name="Test") + reviewer = TaskAssigneeFactory(user=UserFactory(), task=task_test, is_reviewer=True) + + ReportFactory.create(duration=timedelta(hours=1), date="2022-08-05", task=task_test) + ReportFactory.create(duration=timedelta(hours=2), date="2022-08-30", task=task_test) + ReportFactory.create(duration=timedelta(hours=3), date="2022-09-01", task=task_z) + + filter_values = { + "from_date": "2022-08-20", # last two reports + "customer": str(task_test.project.customer.pk), # first two + "cost_center": str(cost_center.pk), # last one + "reviewer": str(reviewer.user.pk), # first two + } + the_filter = {filter: filter_values[filter]} + + url = reverse("project-statistic-list") + result = auth_client.get( + url, + data={"ordering": "name", "include": "customer", **the_filter}, + ) + assert result.status_code == status.HTTP_200_OK + + json = result.json() + + assert json["meta"]["total-time"] == f"{expected_result:02}:00:00" diff --git a/backend/timed/reports/tests/test_task_statistic.py b/backend/timed/reports/tests/test_task_statistic.py new file mode 100644 index 000000000..94918e208 --- /dev/null +++ b/backend/timed/reports/tests/test_task_statistic.py @@ -0,0 +1,139 @@ +from datetime import timedelta + +import pytest +from django.urls import reverse +from rest_framework import status + +from timed.conftest import setup_customer_and_employment_status +from timed.employment.factories import UserFactory +from timed.projects.factories import CostCenterFactory, TaskAssigneeFactory, TaskFactory +from timed.tracking.factories import ReportFactory + + +@pytest.mark.parametrize( + "is_employed, is_customer_assignee, is_customer, expected, status_code", + [ + (False, True, False, 1, status.HTTP_403_FORBIDDEN), + (False, True, True, 1, status.HTTP_403_FORBIDDEN), + (True, False, False, 4, status.HTTP_200_OK), + (True, True, False, 4, status.HTTP_200_OK), + (True, True, True, 4, status.HTTP_200_OK), + ], +) +def test_task_statistic_list( + auth_client, + is_employed, + is_customer_assignee, + is_customer, + expected, + status_code, + django_assert_num_queries, +): + user = auth_client.user + setup_customer_and_employment_status( + user=user, + is_assignee=is_customer_assignee, + is_customer=is_customer, + is_employed=is_employed, + is_external=False, + ) + task_z = TaskFactory.create(name="Z") + task_test = TaskFactory.create(name="Test") + ReportFactory.create(duration=timedelta(hours=1), task=task_test) + ReportFactory.create(duration=timedelta(hours=2), task=task_test) + ReportFactory.create(duration=timedelta(hours=2), task=task_z) + + url = reverse("task-statistic-list") + with django_assert_num_queries(expected): + result = auth_client.get( + url, + data={ + "ordering": "name", + "include": "project,project.customer", + }, + ) + assert result.status_code == status_code + + if status_code == status.HTTP_200_OK: + json = result.json() + expected_json = [ + { + "type": "task-statistics", + "id": str(task_test.id), + "attributes": { + "duration": "03:00:00", + "name": str(task_test.name), + "most-recent-remaining-effort": None, + "estimated-time": "00:00:00", + }, + "relationships": { + "project": { + "data": {"id": str(task_test.project.id), "type": "projects"} + } + }, + }, + { + "type": "task-statistics", + "id": str(task_z.id), + "attributes": { + "duration": "02:00:00", + "name": str(task_z.name), + "most-recent-remaining-effort": None, + "estimated-time": "00:00:00", + }, + "relationships": { + "project": { + "data": {"id": str(task_z.project.id), "type": "projects"} + } + }, + }, + ] + assert json["data"] == expected_json + assert json["meta"]["total-time"] == "05:00:00" + + +@pytest.mark.parametrize( + "filter, expected_result", + [("from_date", 5), ("customer", 3), ("cost_center", 3), ("reviewer", 3)], +) +def test_task_statistic_filtered( + auth_client, + filter, + expected_result, +): + user = auth_client.user + setup_customer_and_employment_status( + user=user, + is_assignee=True, + is_customer=True, + is_employed=True, + is_external=False, + ) + + cost_center = CostCenterFactory() + task_z = TaskFactory.create(name="Z", cost_center=cost_center) + task_test = TaskFactory.create(name="Test") + reviewer = TaskAssigneeFactory(user=UserFactory(), task=task_test, is_reviewer=True) + + ReportFactory.create(duration=timedelta(hours=1), date="2022-08-05", task=task_test) + ReportFactory.create(duration=timedelta(hours=2), date="2022-08-30", task=task_test) + ReportFactory.create(duration=timedelta(hours=3), date="2022-09-01", task=task_z) + + filter_values = { + "from_date": "2022-08-20", # last two reports + "customer": str(task_test.project.customer.pk), # first two + "cost_center": str(cost_center.pk), # first two + "reviewer": str(reviewer.user.pk), # first two + } + the_filter = {filter: filter_values[filter]} + + url = reverse("task-statistic-list") + result = auth_client.get( + url, + data={"ordering": "name", "include": "project,project.customer", **the_filter}, + ) + assert result.status_code == status.HTTP_200_OK + + json = result.json() + + assert json["meta"]["total-time"] == f"{expected_result:02}:00:00" diff --git a/backend/timed/reports/tests/test_user_statistic.py b/backend/timed/reports/tests/test_user_statistic.py new file mode 100644 index 000000000..50ebcc857 --- /dev/null +++ b/backend/timed/reports/tests/test_user_statistic.py @@ -0,0 +1,62 @@ +from datetime import timedelta + +import pytest +from django.urls import reverse +from rest_framework import status + +from timed.conftest import setup_customer_and_employment_status +from timed.tracking.factories import ReportFactory + + +@pytest.mark.parametrize( + "is_employed, is_customer_assignee, is_customer, status_code", + [ + (False, True, False, status.HTTP_403_FORBIDDEN), + (False, True, True, status.HTTP_403_FORBIDDEN), + (True, False, False, status.HTTP_200_OK), + (True, True, False, status.HTTP_200_OK), + (True, True, True, status.HTTP_200_OK), + ], +) +def test_user_statistic_list( + auth_client, is_employed, is_customer_assignee, is_customer, status_code +): + user = auth_client.user + setup_customer_and_employment_status( + user=user, + is_assignee=is_customer_assignee, + is_customer=is_customer, + is_employed=is_employed, + is_external=False, + ) + ReportFactory.create(duration=timedelta(hours=1), user=user) + ReportFactory.create(duration=timedelta(hours=2), user=user) + report = ReportFactory.create(duration=timedelta(hours=2)) + + url = reverse("user-statistic-list") + result = auth_client.get(url, data={"ordering": "duration", "include": "user"}) + assert result.status_code == status_code + + if status_code == status.HTTP_200_OK: + json = result.json() + expected_json = [ + { + "type": "user-statistics", + "id": str(report.user.id), + "attributes": {"duration": "02:00:00"}, + "relationships": { + "user": {"data": {"id": str(report.user.id), "type": "users"}} + }, + }, + { + "type": "user-statistics", + "id": str(user.id), + "attributes": {"duration": "03:00:00"}, + "relationships": { + "user": {"data": {"id": str(user.id), "type": "users"}} + }, + }, + ] + assert json["data"] == expected_json + assert len(json["included"]) == 2 + assert json["meta"]["total-time"] == "05:00:00" diff --git a/backend/timed/reports/tests/test_work_report.py b/backend/timed/reports/tests/test_work_report.py new file mode 100644 index 000000000..9830a65c2 --- /dev/null +++ b/backend/timed/reports/tests/test_work_report.py @@ -0,0 +1,189 @@ +import io +from datetime import date +from zipfile import ZipFile + +import ezodf +import pytest +from django.urls import reverse +from rest_framework import status + +from timed.conftest import setup_customer_and_employment_status +from timed.employment.factories import EmploymentFactory +from timed.projects.factories import CustomerFactory, ProjectFactory, TaskFactory +from timed.reports.views import WorkReportViewSet +from timed.tracking.factories import ReportFactory + + +@pytest.mark.freeze_time("2017-09-01") +@pytest.mark.parametrize( + "is_employed, is_customer_assignee, is_customer, expected, status_code", + [ + (False, True, False, 1, status.HTTP_400_BAD_REQUEST), + (False, True, True, 1, status.HTTP_400_BAD_REQUEST), + (True, False, False, 4, status.HTTP_200_OK), + (True, True, False, 4, status.HTTP_200_OK), + (True, True, True, 4, status.HTTP_200_OK), + ], +) +def test_work_report_single_project( + auth_client, + is_employed, + is_customer_assignee, + is_customer, + expected, + status_code, + django_assert_num_queries, +): + user = auth_client.user + setup_customer_and_employment_status( + user=user, + is_assignee=is_customer_assignee, + is_customer=is_customer, + is_employed=is_employed, + is_external=False, + ) + # spaces should be replaced with underscore + customer = CustomerFactory.create(name="Customer Name") + # slashes should be dropped from file name + project = ProjectFactory.create(customer=customer, name="Project/") + task = TaskFactory.create(project=project) + ReportFactory.create_batch( + 5, + user=user, + verified_by=user, + task=task, + date=date(2017, 8, 17), + not_billable=True, + ) + ReportFactory.create_batch( + 5, + user=user, + verified_by=user, + task=task, + date=date(2017, 8, 17), + not_billable=False, + ) + + url = reverse("work-report-list") + with django_assert_num_queries(expected): + res = auth_client.get( + url, + data={ + "user": auth_client.user.id, + "from_date": "2017-08-01", + "to_date": "2017-08-31", + "verified": 1, + }, + ) + assert res.status_code == status_code + + if status_code == status.HTTP_200_OK: + assert "1708-20170901-Customer_Name-Project.ods" in (res["Content-Disposition"]) + + content = io.BytesIO(res.content) + doc = ezodf.opendoc(content) + table = doc.sheets[0] + assert table["C5"].value == "2017-08-01" + assert table["C6"].value == "2017-08-31" + assert table["C9"].value == "Test User" + assert table["C10"].value == "Test User" + + +@pytest.mark.freeze_time("2017-09-01") +@pytest.mark.parametrize( + "is_employed, status_code, expected", + [ + (True, status.HTTP_200_OK, 3), + (False, status.HTTP_400_BAD_REQUEST, 1), + ], +) +def test_work_report_multiple_projects( + auth_client, is_employed, status_code, expected, django_assert_num_queries +): + NUM_PROJECTS = 2 + + user = auth_client.user + if is_employed: + EmploymentFactory.create(user=user) + customer = CustomerFactory.create(name="Customer") + report_date = date(2017, 8, 17) + for i in range(NUM_PROJECTS): + project = ProjectFactory.create(customer=customer, name="Project{0}".format(i)) + task = TaskFactory.create(project=project) + ReportFactory.create_batch(10, user=user, task=task, date=report_date) + + url = reverse("work-report-list") + with django_assert_num_queries(expected): + res = auth_client.get(url, data={"user": auth_client.user.id, "verified": 0}) + assert res.status_code == status_code + if status_code == status.HTTP_200_OK: + assert "20170901-WorkReports.zip" in (res["Content-Disposition"]) + + content = io.BytesIO(res.content) + with ZipFile(content, "r") as zipfile: + for i in range(NUM_PROJECTS): + ods_content = zipfile.read( + "1708-20170901-Customer-Project{0}.ods".format(i) + ) + doc = ezodf.opendoc(io.BytesIO(ods_content)) + table = doc.sheets[0] + assert table["C5"].value == "2017-08-17" + assert table["C6"].value == "2017-08-17" + + +def test_work_report_empty(auth_client): + url = reverse("work-report-list") + res = auth_client.get(url, data={"user": auth_client.user.id}) + assert res.status_code == status.HTTP_400_BAD_REQUEST + + +@pytest.mark.parametrize( + "customer_name,project_name,expected", + [ + ("Customer Name", "Project/", "1708-20170818-Customer_Name-Project.ods"), + ("Customer-Name", "Project", "1708-20170818-Customer-Name-Project.ods"), + ("Customer$Name", "Project", "1708-20170818-CustomerName-Project.ods"), + ], +) +def test_generate_work_report_name(db, customer_name, project_name, expected): + test_date = date(2017, 8, 18) + view = WorkReportViewSet() + + # spaces should be replaced with underscore + customer = CustomerFactory.create(name=customer_name) + # slashes should be dropped from file name + project = ProjectFactory.create(customer=customer, name=project_name) + + name = view._generate_workreport_name(test_date, test_date, project) + assert name == expected + + +@pytest.mark.freeze_time("2017-09-01") +@pytest.mark.parametrize( + "settings_count,given_count,expected_status", + [ + (-1, 9, status.HTTP_200_OK), + (0, 9, status.HTTP_200_OK), + (10, 9, status.HTTP_200_OK), + (9, 10, status.HTTP_400_BAD_REQUEST), + ], +) +def test_work_report_count( + internal_employee_client, settings, settings_count, given_count, expected_status +): + user = internal_employee_client.user + customer = CustomerFactory.create(name="Customer") + report_date = date(2017, 8, 17) + + settings.WORK_REPORTS_EXPORT_MAX_COUNT = settings_count + + project = ProjectFactory.create(customer=customer) + task = TaskFactory.create(project=project) + ReportFactory.create_batch(given_count, user=user, task=task, date=report_date) + + url = reverse("work-report-list") + res = internal_employee_client.get( + url, data={"user": internal_employee_client.user.id, "verified": 0} + ) + + assert res.status_code == expected_status diff --git a/backend/timed/reports/tests/test_year_statistic.py b/backend/timed/reports/tests/test_year_statistic.py new file mode 100644 index 000000000..132b33863 --- /dev/null +++ b/backend/timed/reports/tests/test_year_statistic.py @@ -0,0 +1,78 @@ +from datetime import date, timedelta + +import pytest +from django.urls import reverse +from rest_framework import status + +from timed.conftest import setup_customer_and_employment_status +from timed.employment.factories import EmploymentFactory +from timed.tracking.factories import ReportFactory + + +@pytest.mark.parametrize( + "is_employed, is_customer_assignee, is_customer, expected", + [ + (False, True, False, status.HTTP_403_FORBIDDEN), + (False, True, True, status.HTTP_403_FORBIDDEN), + (True, False, False, status.HTTP_200_OK), + (True, True, False, status.HTTP_200_OK), + (True, True, True, status.HTTP_200_OK), + ], +) +def test_year_statistic_list( + auth_client, is_employed, is_customer_assignee, is_customer, expected +): + user = auth_client.user + setup_customer_and_employment_status( + user=user, + is_assignee=is_customer_assignee, + is_customer=is_customer, + is_employed=is_employed, + is_external=False, + ) + + ReportFactory.create(duration=timedelta(hours=1), date=date(2017, 1, 1)) + ReportFactory.create(duration=timedelta(hours=1), date=date(2015, 2, 28)) + ReportFactory.create(duration=timedelta(hours=1), date=date(2015, 12, 31)) + + url = reverse("year-statistic-list") + result = auth_client.get(url, data={"ordering": "year"}) + assert result.status_code == expected + + if expected == status.HTTP_200_OK: + json = result.json() + expected_json = [ + { + "type": "year-statistics", + "id": "2015", + "attributes": {"year": 2015, "duration": "02:00:00"}, + }, + { + "type": "year-statistics", + "id": "2017", + "attributes": {"year": 2017, "duration": "01:00:00"}, + }, + ] + assert json["data"] == expected_json + assert json["meta"]["total-time"] == "03:00:00" + + +@pytest.mark.parametrize( + "is_employed, expected", + [ + (True, status.HTTP_200_OK), + (False, status.HTTP_403_FORBIDDEN), + ], +) +def test_year_statistic_detail(auth_client, is_employed, expected): + if is_employed: + EmploymentFactory.create(user=auth_client.user) + ReportFactory.create(duration=timedelta(hours=1), date=date(2015, 2, 28)) + ReportFactory.create(duration=timedelta(hours=1), date=date(2015, 12, 31)) + + url = reverse("year-statistic-detail", args=[2015]) + result = auth_client.get(url, data={"ordering": "year"}) + assert result.status_code == expected + if expected == status.HTTP_200_OK: + json = result.json() + assert json["data"]["attributes"]["duration"] == "02:00:00" diff --git a/backend/timed/reports/urls.py b/backend/timed/reports/urls.py new file mode 100644 index 000000000..0ee974648 --- /dev/null +++ b/backend/timed/reports/urls.py @@ -0,0 +1,16 @@ +from django.conf import settings +from rest_framework.routers import SimpleRouter + +from . import views + +r = SimpleRouter(trailing_slash=settings.APPEND_SLASH) + +r.register(r"work-reports", views.WorkReportViewSet, "work-report") +r.register(r"year-statistics", views.YearStatisticViewSet, "year-statistic") +r.register(r"month-statistics", views.MonthStatisticViewSet, "month-statistic") +r.register(r"task-statistics", views.TaskStatisticViewSet, "task-statistic") +r.register(r"user-statistics", views.UserStatisticViewSet, "user-statistic") +r.register(r"customer-statistics", views.CustomerStatisticViewSet, "customer-statistic") +r.register(r"project-statistics", views.ProjectStatisticViewSet, "project-statistic") + +urlpatterns = r.urls diff --git a/backend/timed/reports/views.py b/backend/timed/reports/views.py new file mode 100644 index 000000000..d293deb2a --- /dev/null +++ b/backend/timed/reports/views.py @@ -0,0 +1,440 @@ +import re +from collections import defaultdict +from datetime import date +from io import BytesIO +from zipfile import ZipFile + +from django.conf import settings +from django.db.models import F, Q, QuerySet, Sum +from django.db.models.functions import ExtractMonth, ExtractYear +from django.http import HttpResponse +from ezodf import Cell, opendoc +from rest_framework import status +from rest_framework.response import Response +from rest_framework.viewsets import GenericViewSet, ReadOnlyModelViewSet + +from timed.mixins import AggregateQuerysetMixin +from timed.permissions import IsAuthenticated, IsInternal, IsSuperUser +from timed.projects.models import Customer, Project, Task +from timed.reports import serializers +from timed.tracking.filters import ReportFilterSet +from timed.tracking.models import Report +from timed.tracking.views import ReportViewSet + +from . import filters + + +class YearStatisticViewSet(AggregateQuerysetMixin, ReadOnlyModelViewSet): + """Year statistics calculates total reported time per year.""" + + serializer_class = serializers.YearStatisticSerializer + filterset_class = ReportFilterSet + ordering_fields = ("year", "duration") + ordering = ("year",) + permission_classes = [ + # internal employees or super users may read all customer statistics + (IsInternal | IsSuperUser) + & IsAuthenticated + ] + + def get_queryset(self): + queryset = Report.objects.all() + queryset = queryset.annotate(year=ExtractYear("date")).values("year") + queryset = queryset.annotate(duration=Sum("duration")) + queryset = queryset.annotate(pk=F("year")) + + return queryset + + +class MonthStatisticViewSet(AggregateQuerysetMixin, ReadOnlyModelViewSet): + """Month statistics calculates total reported time per month.""" + + serializer_class = serializers.MonthStatisticSerializer + filterset_class = ReportFilterSet + ordering_fields = ("year", "month", "duration") + ordering = ("year", "month") + permission_classes = [ + # internal employees or super users may read all customer statistics + (IsInternal | IsSuperUser) + & IsAuthenticated + ] + + def get_queryset(self): + queryset = Report.objects.all() + queryset = queryset.annotate( + year=ExtractYear("date"), month=ExtractMonth("date") + ) + queryset = queryset.values("year", "month") + queryset = queryset.annotate(duration=Sum("duration")) + queryset = queryset.annotate(pk=F("year") * 100 + F("month")) + + return queryset + + +class StatisticQueryset(QuerySet): + def __init__(self, catch_prefixes, *args, base_qs=None, agg_filters=None, **kwargs): + super().__init__(*args, **kwargs) + if base_qs is None: + base_qs = self.model.objects.all() + self._base = base_qs + self._agg_filters = agg_filters + self._catch_prefixes = catch_prefixes + + def filter(self, *args, **kwargs): + if args: # pragma: no cover + # This is a check against programming errors, no need to test + raise RuntimeError( + "Unable to detect statistics filter type form Q objects. use " + "filter_aggregate() or filter_base() instead" + ) + my_filters = { + k: v for k, v in kwargs.items() if not k.startswith(self._catch_prefixes) + } + + agg_filters = { + k: v for k, v in kwargs.items() if k.startswith(self._catch_prefixes) + } + + new_qs = self + if my_filters: + new_qs = self.filter_base(**my_filters) + if agg_filters: + new_qs = new_qs.filter_aggregate(**agg_filters) + + return new_qs + + def filter_base(self, *args, **kwargs): + return StatisticQueryset( + model=self.model, + base_qs=self._base.filter(*args, **kwargs), + catch_prefixes=self._catch_prefixes, + agg_filters=self._agg_filters, + ) + + def _clone(self): + return StatisticQueryset( + model=self.model, + base_qs=self._base._clone(), + catch_prefixes=self._catch_prefixes, + agg_filters=self._agg_filters, + ) + + def __str__(self): + return f"StatisticQueryset({str(self._base)} | {str(self._agg_filters)})" + + def __repr__(self): + return f"StatisticQueryset({repr(self._base)} | {repr(self._agg_filters)})" + + def filter_aggregate(self, *args, **kwargs): + filter_q = Q(*args, **kwargs) + + new_filters = self._agg_filters & filter_q if self._agg_filters else filter_q + + return StatisticQueryset( + model=self.model, + base_qs=self._base, + catch_prefixes=self._catch_prefixes, + agg_filters=new_filters, + ) + + +class CustomerStatisticViewSet(AggregateQuerysetMixin, ReadOnlyModelViewSet): + """Customer statistics calculates total reported time per customer.""" + + serializer_class = serializers.CustomerStatisticSerializer + filterset_class = filters.CustomerStatisticFilterSet + ordering_fields = [ + "name", + "duration", + "estimated_time", + "remaining_effort", + ] + ordering = ("name",) + permission_classes = [ + # internal employees or super users may read all customer statistics + (IsInternal | IsSuperUser) + & IsAuthenticated + ] + + def get_queryset(self): + return StatisticQueryset(model=Customer, catch_prefixes="projects__") + + +class ProjectStatisticViewSet(AggregateQuerysetMixin, ReadOnlyModelViewSet): + """Project statistics calculates total reported time per project.""" + + serializer_class = serializers.ProjectStatisticSerializer + filterset_class = filters.ProjectStatisticFilterSet + ordering_fields = [ + "name", + "duration", + "estimated_time", + "remaining_effort", + ] + ordering = ("name",) + permission_classes = [ + # internal employees or super users may read all customer statistics + (IsInternal | IsSuperUser) + & IsAuthenticated + ] + + def get_queryset(self): + return StatisticQueryset(model=Project, catch_prefixes="tasks__") + + +class TaskStatisticViewSet(AggregateQuerysetMixin, ReadOnlyModelViewSet): + """Task statistics calculates total reported time per task.""" + + serializer_class = serializers.TaskStatisticSerializer + filterset_class = filters.TaskStatisticFilterSet + ordering_fields = [ + "name", + "duration", + "estimated_time", + "remaining_effort", + ] + ordering = ("name",) + permission_classes = [ + # internal employees or super users may read all customer statistics + (IsInternal | IsSuperUser) + & IsAuthenticated + ] + + def get_queryset(self): + return StatisticQueryset(model=Task, catch_prefixes="tasks__") + + +class UserStatisticViewSet(AggregateQuerysetMixin, ReadOnlyModelViewSet): + """User calculates total reported time per user.""" + + serializer_class = serializers.UserStatisticSerializer + filterset_class = ReportFilterSet + ordering_fields = ("user__username", "duration") + ordering = ("user__username",) + permission_classes = [ + # internal employees or super users may read all customer statistics + (IsInternal | IsSuperUser) + & IsAuthenticated + ] + + def get_queryset(self): + queryset = Report.objects.all() + queryset = queryset.values("user") + queryset = queryset.annotate(duration=Sum("duration")) + queryset = queryset.annotate(pk=F("user")) + + return queryset + + +class WorkReportViewSet(GenericViewSet): + """ + Build a ods work report of reports with given filters. + + It creates one work report per project. If given filters results + in several projects work reports will be returned as zip. + """ + + filterset_class = ReportFilterSet + ordering = ReportViewSet.ordering + ordering_fields = ReportViewSet.ordering_fields + + def get_queryset(self): + """Don't show any reports to customers.""" + user = self.request.user + + queryset = Report.objects.select_related( + "user", "task", "task__project", "task__project__customer" + ).prefetch_related( + # need to prefetch verified_by as select_related joins nullable + # foreign key verified_by with INNER JOIN instead of LEFT JOIN + # which leads to an empty result. + # This only happens as user and verified_by points to same table + # and user is not nullable + "verified_by" + ) + + if user.get_active_employment(): + return queryset + return queryset.none() + + def _parse_query_params(self, queryset, request): + """Parse query params by using filterset_class.""" + fltr = self.filterset_class( + request.query_params, queryset=queryset, request=request + ) + form = fltr.form + form.is_valid() + return form.cleaned_data + + def _clean_filename(self, name): + """ + Clean name so it can be used in file paths. + + To accomplish this it will remove all special chars and + replace spaces with underscores + """ + escaped = re.sub(r"[^\w\s-]", "", name) + return re.sub(r"\s+", "_", escaped) + + def _generate_workreport_name(self, from_date, today, project): + """ + Generate workreport name. + + Name is in format: YYMM-YYYYMMDD-$Customer-$Project.ods + whereas YYMM is year and month of from_date and YYYYMMDD + is date when work reports gets created. + """ + return "{0}-{1}-{2}-{3}.ods".format( + from_date.strftime("%y%m"), + today.strftime("%Y%m%d"), + self._clean_filename(project.customer.name), + self._clean_filename(project.name), + ) + + def _create_workreport(self, from_date, to_date, today, project, reports, user): + """ + Create ods workreport. + + :rtype: tuple + :return: tuple where as first value is name and second ezodf document + """ + customer = project.customer + verifiers = sorted( + { + report.verified_by.get_full_name() + for report in reports + if report.verified_by_id is not None + } + ) + + tmpl = settings.WORK_REPORT_PATH + doc = opendoc(tmpl) + table = doc.sheets[0] + tasks = defaultdict(int) + date_style = table["C5"].style_name + # in template cell D3 is empty but styled for float and borders + float_style = table["D3"].style_name + # in template cell D4 is empty but styled for text wrap and borders + text_style = table["D4"].style_name + # in template cell D8 is empty but styled for date with borders + date_style_report = table["D8"].style_name + + # for simplicity insert reports in reverse order + for report in reports: + table.insert_rows(12, 1) + table["A13"] = Cell( + report.date, style_name=date_style_report, value_type="date" + ) + table["B13"] = Cell(report.user.get_full_name(), style_name=text_style) + hours = report.duration.total_seconds() / 60 / 60 + table["C13"] = Cell(hours, style_name=float_style) + table["D13"] = Cell(report.comment, style_name=text_style) + table["E13"] = Cell(report.task.name, style_name=text_style) + if report.not_billable: + table["F13"] = Cell("no", style_name=float_style) + else: + table["F13"] = Cell("yes", style_name=float_style) + + # when from and to date are None find lowest and biggest date + from_date = min(report.date, from_date or date.max) + to_date = max(report.date, to_date or date.min) + + tasks[report.task.name] += hours + + # header values + table["C3"] = Cell(customer and customer.name) + table["C4"] = Cell(project and project.name) + table["C5"] = Cell(from_date, style_name=date_style, value_type="date") + table["C6"] = Cell(to_date, style_name=date_style, value_type="date") + table["C8"] = Cell(today, style_name=date_style, value_type="date") + table["C9"] = Cell(user.get_full_name()) + table["C10"] = Cell(", ".join(verifiers)) + + # reset temporary styles (mainly because of borders) + table["D3"].style_name = "" + table["D4"].style_name = "" + table["D8"].style_name = "" + + pos = 13 + len(reports) + for task_name, task_total_hours in tasks.items(): + table.insert_rows(pos, 1) + table.row_info(pos).style_name = table.row_info(pos - 1).style_name + table[pos, 0] = Cell(task_name, style_name=table[pos - 1, 0].style_name) + table[pos, 2] = Cell( + task_total_hours, style_name=table[pos - 1, 2].style_name + ) + + # calculate location of total hours as insert rows moved it + table[13 + len(reports) + len(tasks), 2].formula = "of:=SUM(C13:C{0})".format( + str(13 + len(reports) - 1) + ) + + # calculate location of total not billable hours as insert rows moved it + table[ + 13 + len(reports) + len(tasks) + 1, 2 + ].formula = 'of:=SUMIF(F13:F{0};"no";C13:C{0})'.format( + str(13 + len(reports) - 1) + ) + + name = self._generate_workreport_name(from_date, today, project) + return (name, doc) + + def list(self, request, *args, **kwargs): + queryset = self.filter_queryset(self.get_queryset()) + + if queryset.count() == 0: + return Response( + "No entries were selected. Make sure to clear unneeded filters.", + status=status.HTTP_400_BAD_REQUEST, + ) + + # needed as we add items in reverse order to work report + queryset = queryset.reverse() + + if ( + settings.WORK_REPORTS_EXPORT_MAX_COUNT > 0 + and queryset.count() > settings.WORK_REPORTS_EXPORT_MAX_COUNT + ): + return Response( + "Your request exceeds the maximum allowed entries ({0})".format( + settings.WORK_REPORTS_EXPORT_MAX_COUNT + ), + status=status.HTTP_400_BAD_REQUEST, + ) + + params = self._parse_query_params(queryset, request) + + from_date = params.get("from_date") + to_date = params.get("to_date") + today = date.today() + + reports_by_project = defaultdict(list) + for report in queryset: + reports_by_project[report.task.project].append(report) + + docs = [ + self._create_workreport( + from_date, to_date, today, project, reports, request.user + ) + for project, reports in reports_by_project.items() + ] + + if len(docs) == 1: + name, doc = docs[0] + response = HttpResponse( + doc.tobytes(), + content_type="application/vnd.oasis.opendocument.spreadsheet", + ) + response["Content-Disposition"] = "attachment; filename=%s" % name + return response + + # zip multiple work reports + buf = BytesIO() + with ZipFile(buf, "w") as zf: + for name, doc in docs: + zf.writestr(name, doc.tobytes()) + response = HttpResponse(buf.getvalue(), content_type="application/zip") + response["Content-Disposition"] = "attachment; filename=%s-WorkReports.zip" % ( + today.strftime("%Y%m%d") + ) + return response diff --git a/backend/timed/serializers.py b/backend/timed/serializers.py new file mode 100644 index 000000000..0370b15b9 --- /dev/null +++ b/backend/timed/serializers.py @@ -0,0 +1,31 @@ +from datetime import timedelta + +from django.db.models import Sum +from django.utils.duration import duration_string + + +class TotalTimeRootMetaMixin(object): + duration_field = "duration" + + def get_root_meta(self, resource, many): + """Add total hours over whole result (not just page) to meta.""" + if many: + view = self.context["view"] + queryset = view.filter_queryset(view.get_queryset()) + data = queryset.aggregate(total_time=Sum(self.duration_field)) + data["total_time"] = duration_string(data["total_time"] or timedelta(0)) + return data + return {} + + +class AggregateObject(dict): + """ + Wrap dict into an object. + + All values will be accessible through attributes. Note that + keys must be valid python names for this to work. + """ + + def __init__(self, **kwargs): + self.__dict__.update(kwargs) + super().__init__(**kwargs) diff --git a/backend/timed/settings.py b/backend/timed/settings.py new file mode 100644 index 000000000..e94471229 --- /dev/null +++ b/backend/timed/settings.py @@ -0,0 +1,401 @@ +import os +import re + +import environ +import sentry_sdk +from pkg_resources import resource_filename +from sentry_sdk.integrations.django import DjangoIntegration + +env = environ.Env() + +django_root = environ.Path(__file__) - 2 + +ENV_FILE = env.str("DJANGO_ENV_FILE", default=django_root(".env")) +if os.path.exists(ENV_FILE): # pragma: no cover + environ.Env.read_env(ENV_FILE) + +# per default production is enabled for security reasons +# for development create .env file with ENV=dev +ENV = env.str("ENV", "prod") + + +def default(default_dev=env.NOTSET, default_prod=env.NOTSET): + """Environment aware default.""" + return default_prod if ENV == "prod" else default_dev + + +# Database definition + +DATABASES = { + "default": { + "ENGINE": env.str( + "DJANGO_DATABASE_ENGINE", default="django_prometheus.db.backends.postgresql" + ), + "NAME": env.str("DJANGO_DATABASE_NAME", default="timed"), + "USER": env.str("DJANGO_DATABASE_USER", default="timed"), + "PASSWORD": env.str("DJANGO_DATABASE_PASSWORD", default=default("timed")), + "HOST": env.str("DJANGO_DATABASE_HOST", default="localhost"), + "PORT": env.str("DJANGO_DATABASE_PORT", default=""), + } +} + + +# Application definition + +DEBUG = env.bool("DJANGO_DEBUG", default=default(True, False)) +SECRET_KEY = env.str("DJANGO_SECRET_KEY", default=default("uuuuuuuuuu")) +ALLOWED_HOSTS = env.list("DJANGO_ALLOWED_HOSTS", default=default(["*"])) +HOST_PROTOCOL = env.str("DJANGO_HOST_PROTOCOL", default=default("http")) +HOST_DOMAIN = env.str("DJANGO_HOST_DOMAIN", default=default("localhost:4200")) + + +INSTALLED_APPS = [ + "timed.apps.TimedAdminConfig", + "django.contrib.humanize", + "multiselectfield.apps.MultiSelectFieldConfig", + "django.forms", + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.sessions", + "django.contrib.messages", + "django.contrib.staticfiles", + "hurricane", + "rest_framework", + "django_filters", + "djmoney.apps.MoneyConfig", + "mozilla_django_oidc", + "django_prometheus.apps.DjangoPrometheusConfig", + "corsheaders", + "nested_inline", + "timed.employment", + "timed.projects", + "timed.tracking", + "timed.reports", + "timed.redmine", + "timed.subscription", + "timed.notifications", +] + +if ENV == "dev": + INSTALLED_APPS.append("django_extensions") + +MIDDLEWARE = [ + "django_prometheus.middleware.PrometheusBeforeMiddleware", + "corsheaders.middleware.CorsMiddleware", + "django.middleware.security.SecurityMiddleware", + "django.contrib.sessions.middleware.SessionMiddleware", + "django.middleware.common.CommonMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "django.middleware.clickjacking.XFrameOptionsMiddleware", + "django_prometheus.middleware.PrometheusAfterMiddleware", + "whitenoise.middleware.WhiteNoiseMiddleware", +] + +ROOT_URLCONF = "timed.urls" + +FORM_RENDERER = "django.forms.renderers.TemplatesSetting" +TEMPLATES = [ + # default: needed for django-admin + { + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [django_root("timed", "templates")], + "APP_DIRS": True, + "OPTIONS": { + "context_processors": [ + "django.template.context_processors.debug", + "django.template.context_processors.request", + "django.contrib.auth.context_processors.auth", + "django.contrib.messages.context_processors.messages", + ] + }, + }, + # template backend for plain text (no escaping) + { + "BACKEND": "django.template.backends.django.DjangoTemplates", + "NAME": "text", + "APP_DIRS": True, + "OPTIONS": { + "autoescape": False, + "context_processors": [ + "django.template.context_processors.debug", + "django.template.context_processors.request", + "django.contrib.auth.context_processors.auth", + "django.contrib.messages.context_processors.messages", + ], + }, + }, +] + +WSGI_APPLICATION = "timed.wsgi.application" + + +# Internationalization +# https://docs.djangoproject.com/en/1.9/topics/i18n/ + +LOCALE_PATHS = [django_root("timed", "locale")] + +LANGUAGE_CODE = "en-US" + +TIME_ZONE = env.str("DJANGO_TIME_ZONE", "Europe/Zurich") + +USE_I18N = True +USE_L10N = True + +DATETIME_FORMAT = env.str("DJANGO_DATETIME_FORMAT", "d.m.Y H:i:s") +DATE_FORMAT = env.str("DJANGO_DATE_FORMAT", "d.m.Y") +TIME_FORMAT = env.str("DJANGO_TIME_FORMAT", "H:i:s") + +DECIMAL_SEPARATOR = env.str("DECIMAL_SEPARATOR", ".") + +USE_TZ = True + +# Static files (CSS, JavaScript, Images) +# https://docs.djangoproject.com/en/1.9/howto/static-files/ + +STATIC_URL = env.str("STATIC_URL", "/static/") +STATIC_ROOT = env.str("STATIC_ROOT", "/app/static") + +# Cache + +CACHES = { + "default": { + "BACKEND": env.str( + "CACHE_BACKEND", + default="django_prometheus.cache.backends.locmem.LocMemCache", + ), + "LOCATION": env.str("CACHE_LOCATION", ""), + } +} + +# Rest framework definition + +REST_FRAMEWORK = { + "DEFAULT_FILTER_BACKENDS": ( + "django_filters.rest_framework.DjangoFilterBackend", + "rest_framework.filters.SearchFilter", + "rest_framework.filters.OrderingFilter", + ), + "DEFAULT_PARSER_CLASSES": ("rest_framework_json_api.parsers.JSONParser",), + "DEFAULT_PERMISSION_CLASSES": ("rest_framework.permissions.IsAuthenticated",), + "DEFAULT_AUTHENTICATION_CLASSES": ( + "mozilla_django_oidc.contrib.drf.OIDCAuthentication", + ), + "DEFAULT_METADATA_CLASS": "rest_framework_json_api.metadata.JSONAPIMetadata", + "EXCEPTION_HANDLER": "rest_framework_json_api.exceptions.exception_handler", + "DEFAULT_PAGINATION_CLASS": "rest_framework_json_api.pagination.JsonApiPageNumberPagination", + "DEFAULT_RENDERER_CLASSES": ("rest_framework_json_api.renderers.JSONRenderer",), + "TEST_REQUEST_RENDERER_CLASSES": ( + "rest_framework_json_api.renderers.JSONRenderer", + "rest_framework.renderers.JSONRenderer", + "rest_framework.renderers.MultiPartRenderer", + ), + "TEST_REQUEST_DEFAULT_FORMAT": "vnd.api+json", +} + +JSON_API_FORMAT_FIELD_NAMES = "dasherize" +JSON_API_FORMAT_TYPES = "dasherize" +JSON_API_PLURALIZE_TYPES = True + +APPEND_SLASH = False + +# Authentication + +AUTH_USER_MODEL = "employment.User" +AUTHENTICATION_BACKENDS = [ + "django.contrib.auth.backends.ModelBackend", + "timed.authentication.TimedOIDCAuthenticationBackend", +] + +AUTH_PASSWORD_VALIDATORS = [ + { + "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator" # noqa + }, + {"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"}, # noqa + {"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"}, # noqa + { + "NAME": "django.contrib.auth.password_validation.NumericPasswordValidator" # noqa + }, +] + +# OIDC + +OIDC_DEFAULT_BASE_URL = env.str( + "DJANGO_OIDC_DEFAULT_BASE_URL", + default="http://timed.local/auth/realms/timed/protocol/openid-connect", +) +OIDC_OP_AUTHORIZATION_ENDPOINT = env.str( + "DJANGO_OIDC_OP_AUTHORIZATION_ENDPOINT", default=f"{OIDC_DEFAULT_BASE_URL}/auth" +) + +OIDC_OP_TOKEN_ENDPOINT = env.str( + "DJANGO_OIDC_OP_TOKEN_ENDPOINT", default=f"{OIDC_DEFAULT_BASE_URL}/token" +) +OIDC_OP_USER_ENDPOINT = env.str( + "DJANGO_OIDC_OP_USER_ENDPOINT", default=f"{OIDC_DEFAULT_BASE_URL}/userinfo" +) +OIDC_OP_JWKS_ENDPOINT = env.str( + "DJANGO_OIDC_OP_JWKS_ENDPOINT", default=f"{OIDC_DEFAULT_BASE_URL}/certs" +) + +OIDC_RP_CLIENT_ID = env.str("DJANGO_OIDC_RP_CLIENT_ID", default="timed-public") +OIDC_RP_CLIENT_SECRET = env.str("DJANGO_OIDC_RP_CLIENT_SECRET", default=None) + +OIDC_VERIFY_SSL = env.bool("DJANGO_OIDC_VERIFY_SSL", default=default(False, True)) +OIDC_RP_SIGN_ALGO = env.str("DJANGO_OIDC_RP_SIGN_ALGO", default="RS256") + +OIDC_CREATE_USER = env.bool("DJANGO_OIDC_CREATE_USER", default=True) +OIDC_USERNAME_CLAIM = env.str( + "DJANGO_OIDC_USERNAME_CLAIM", default=default("preferred_username", "sub") +) +OIDC_EMAIL_CLAIM = env.str("DJANGO_OIDC_EMAIL_CLAIM", default="email") +OIDC_FIRSTNAME_CLAIM = env.str("DJANGO_OIDC_FIRSTNAME_CLAIM", default="given_name") +OIDC_LASTNAME_CLAIM = env.str("DJANGO_OIDC_LASTNAME_CLAIM", default="family_name") +# time in seconds +OIDC_BEARER_TOKEN_REVALIDATION_TIME = env.int( + "DJANGO_OIDC_BEARER_TOKEN_REVALIDATION_TIME", default=60 +) + +# introspection endpoint for checking confidential client authentication +OIDC_CHECK_INTROSPECT = env.bool("DJANGO_OIDC_CHECK_INTROSPECT", default=True) +OIDC_OP_INTROSPECT_ENDPOINT = env.str( + "DJANGO_OIDC_OP_INTROSPECT_ENDPOINT", + default=f"{OIDC_DEFAULT_BASE_URL}/token/introspect", +) +OIDC_RP_INTROSPECT_CLIENT_ID = env.str( + "DJANGO_OIDC_RP_INTROSPECT_CLIENT_ID", default="timed-confidential" +) +OIDC_RP_INTROSPECT_CLIENT_SECRET = env.str( + "DJANGO_OIDC_RP_INTROSPECT_CLIENT_SECRET", default=None +) + +# admin page after completing server-side authentication flow +LOGIN_REDIRECT_URL = env.str( + "DJANGO_OIDC_ADMIN_LOGIN_REDIRECT_URL", default=default("http://timed.local/admin/") +) + +# allow / disallow login with local user / password +ALLOW_LOCAL_LOGIN = env.bool("DJANGO_ALLOW_LOCAL_LOGIN", default=True) + +if not ALLOW_LOCAL_LOGIN: # pragma: no cover + APPLICATION_BACKENDS = ["timed.authentication.TimedOIDCAuthenticationBackend"] + +# Email definition + +EMAIL_CONFIG = env.email_url("EMAIL_URL", default="smtp://localhost:25") +vars().update(EMAIL_CONFIG) + +EMAIL_USE_TLS = env.bool("DJANGO_EMAIL_USE_TLS", default=True) + +DEFAULT_FROM_EMAIL = env.str( + "DJANGO_DEFAULT_FROM_EMAIL", default("webmaster@localhost") +) + +CUSTOMER_CENTER_EMAIL = env.str( + "DJANGO_CUSTOMER_CENTER_EMAIL", default("admin@localhost") +) + +SERVER_EMAIL = env.str("DJANGO_SERVER_EMAIL", default("root@localhost")) +EMAIL_EXTRA_HEADERS = {"Auto-Submitted": "auto-generated"} + + +def parse_admins(admins): + """ + Parse env admins to django admins. + + Example of DJANGO_ADMINS environment variable: + Test Example ,Test2 + """ + result = [] + for admin in admins: + match = re.search(r"(.+) \<(.+@.+)\>", admin) + if not match: + raise environ.ImproperlyConfigured( + 'In DJANGO_ADMINS admin "{0}" is not in correct ' + '"Firstname Lastname "'.format(admin) + ) + result.append((match.group(1), match.group(2))) + return result + + +ADMINS = parse_admins(env.list("DJANGO_ADMINS", default=[])) + + +# Redmine definition (optional) + +REDMINE_URL = env.str("DJANGO_REDMINE_URL", default="") +REDMINE_APIKEY = env.str("DJANGO_REDMINE_APIKEY", default="") +REDMINE_HTACCESS_USER = env.str("DJANGO_REDMINE_HTACCESS_USER", default="") +REDMINE_HTACCESS_PASSWORD = env.str("DJANGO_REDMINE_HTACCESS_PASSWORD", default="") +REDMINE_SPENTHOURS_FIELD = env.int("DJANGO_REDMINE_SPENTHOURS_FIELD", default=0) +REDMINE_AMOUNT_OFFERED_FIELD = env.int("DJANGO_REDMINE_AMOUNT_OFFERED_FIELD", default=1) +REDMINE_AMOUNT_INVOICED_FIELD = env.int( + "DJANGO_REDMINE_AMOUNT_INVOICED_FIELD", default=2 +) +REDMINE_BUILD_PROJECT = env.str("DJANGO_REDMINE_BUILD_PROJECT", default="build") + + +# Work report definition + +WORK_REPORT_PATH = env.str( + "DJANGO_WORK_REPORT_PATH", + default=resource_filename("timed.reports", "templates/workreport.ots"), +) + +WORK_REPORTS_EXPORT_MAX_COUNT = env.int( + "DJANGO_WORK_REPORTS_EXPORT_MAX_COUNT", default=0 +) + +REPORTS_EXPORT_MAX_COUNT = env.int("DJANGO_REPORTS_EXPORT_MAX_COUNT", default=0) + +# Tracking: Report fields which should be included in email (when report was +# changed during verification) +TRACKING_REPORT_VERIFIED_CHANGES = env.list( + "DJANGO_TRACKING_REPORT_VERIFIED_CHANGES", + default=["task", "comment", "not_billable"], +) + +LOGGING = { + "version": 1, + "disable_existing_loggers": False, + "handlers": {"console": {"class": "logging.StreamHandler"}}, + "loggers": { + "django": { + "handlers": ["console"], + "level": env.str("DJANGO_LOG_LEVEL", "INFO"), + }, + "hurricane": { + "handlers": ["console"], + "level": os.getenv("HURRICANE_LOG_LEVEL", "INFO"), + "propagate": False, + }, + }, +} + +# Sentry error tracking +if env.str("DJANGO_SENTRY_DSN", default=""): # pragma: no cover + sentry_sdk.init( + dsn=env.str("DJANGO_SENTRY_DSN", default=""), + integrations=[DjangoIntegration()], + # Set traces_sample_rate to 1.0 to capture 100% + # of transactions for performance monitoring. + traces_sample_rate=env.float("DJANGO_SENTRY_TRACES_SAMPLE_RATE", default=1.0), + # If you wish to associate users to errors (assuming you are using + # django.contrib.auth) you may enable sending PII data. + send_default_pii=env.bool("DJANGO_SENTRY_SEND_DEFAULT_PII", default=True), + # By default the SDK will try to use the SENTRY_RELEASE + # environment variable, or infer a git commit + # SHA as release, however you may want to set + # something more human-readable. + # release="myapp@1.0.0", + ) + +SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https") +DATA_UPLOAD_MAX_NUMBER_FIELDS = env.int( + "DJANGO_DATA_UPLOAD_MAX_NUMBER_FIELDS", default=1000 +) +CORS_ALLOWED_ORIGINS = env.list("DJANGO_CORS_ALLOWED_ORIGINS", default=[]) +DEFAULT_AUTO_FIELD = "django.db.models.AutoField" + +BUILD_PROJECTS = env.str("DJANGO_BUILD_PROJECT", default="_BUILD") diff --git a/backend/timed/subscription/__init__.py b/backend/timed/subscription/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/timed/subscription/admin.py b/backend/timed/subscription/admin.py new file mode 100644 index 000000000..63a4adaab --- /dev/null +++ b/backend/timed/subscription/admin.py @@ -0,0 +1,33 @@ +import hashlib + +from django import forms +from django.contrib import admin +from django.utils.translation import gettext_lazy as _ + +from timed.forms import DurationInHoursField + +from . import models + + +class PackageForm(forms.ModelForm): + model = models.Package + duration = DurationInHoursField(label=_("Duration in hours"), required=True) + + +@admin.register(models.Package) +class PackageAdmin(admin.ModelAdmin): + list_display = ["billing_type", "duration", "price"] + form = PackageForm + + +class CustomerPasswordForm(forms.ModelForm): + def save(self, commit=True): + password = self.cleaned_data.get("password") + if password is not None: + self.instance.password = hashlib.md5(password.encode()).hexdigest() + return super().save(commit=commit) + + +class CustomerPasswordInline(admin.StackedInline): + form = CustomerPasswordForm + model = models.CustomerPassword diff --git a/backend/timed/subscription/factories.py b/backend/timed/subscription/factories.py new file mode 100644 index 000000000..b826ef0f7 --- /dev/null +++ b/backend/timed/subscription/factories.py @@ -0,0 +1,21 @@ +from factory import Faker, SubFactory +from factory.django import DjangoModelFactory + +from . import models + + +class OrderFactory(DjangoModelFactory): + project = SubFactory("timed.projects.factories.ProjectFactory") + duration = Faker("time_delta") + + class Meta: + model = models.Order + + +class PackageFactory(DjangoModelFactory): + billing_type = SubFactory("timed.projects.factories.BillingTypeFactory") + duration = Faker("time_delta") + price = Faker("pydecimal", positive=True, left_digits=4, right_digits=2) + + class Meta: + model = models.Package diff --git a/backend/timed/subscription/filters.py b/backend/timed/subscription/filters.py new file mode 100644 index 000000000..986dd3954 --- /dev/null +++ b/backend/timed/subscription/filters.py @@ -0,0 +1,26 @@ +from django_filters import FilterSet, NumberFilter + +from timed.projects.models import Project + +from . import models + + +class PackageFilter(FilterSet): + customer = NumberFilter(method="filter_customer") + + def filter_customer(self, queryset, name, value): + billing_types = Project.objects.filter(customer=value).values("billing_type") + return queryset.filter(billing_type__in=billing_types) + + class Meta: + model = models.Package + fields = ("billing_type", "customer") + + +class OrderFilter(FilterSet): + customer = NumberFilter(field_name="project__customer") + acknowledged = NumberFilter(field_name="acknowledged") + + class Meta: + model = models.Order + fields = ("customer", "project", "acknowledged") diff --git a/backend/timed/subscription/migrations/0001_initial.py b/backend/timed/subscription/migrations/0001_initial.py new file mode 100644 index 000000000..acbde0e22 --- /dev/null +++ b/backend/timed/subscription/migrations/0001_initial.py @@ -0,0 +1,368 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.4 on 2017-08-03 14:51 +from __future__ import unicode_literals + +from decimal import Decimal +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +import django.utils.timezone +import djmoney.models.fields + + +class Migration(migrations.Migration): + initial = True + + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ("projects", "0001_initial"), + ] + + operations = [ + migrations.CreateModel( + name="CustomerPassword", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "password", + models.CharField( + blank=True, max_length=128, null=True, verbose_name="password" + ), + ), + ( + "customer", + models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, + to="projects.Customer", + ), + ), + ], + ), + migrations.CreateModel( + name="Order", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("duration", models.DurationField()), + ("ordered", models.DateTimeField(default=django.utils.timezone.now)), + ("acknowledged", models.BooleanField(default=False)), + ( + "confirmedby", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="orders_confirmed", + to=settings.AUTH_USER_MODEL, + ), + ), + ( + "project", + models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, + related_name="subscription", + to="projects.Project", + ), + ), + ], + ), + migrations.CreateModel( + name="Package", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("duration", models.DurationField()), + ( + "price_currency", + djmoney.models.fields.CurrencyField( + choices=[ + ("XUA", "ADB Unit of Account"), + ("AFN", "Afghani"), + ("DZD", "Algerian Dinar"), + ("ARS", "Argentine Peso"), + ("AMD", "Armenian Dram"), + ("AWG", "Aruban Guilder"), + ("AUD", "Australian Dollar"), + ("AZN", "Azerbaijanian Manat"), + ("BSD", "Bahamian Dollar"), + ("BHD", "Bahraini Dinar"), + ("THB", "Baht"), + ("PAB", "Balboa"), + ("BBD", "Barbados Dollar"), + ("BYN", "Belarussian Ruble"), + ("BYR", "Belarussian Ruble"), + ("BZD", "Belize Dollar"), + ( + "BMD", + "Bermudian Dollar (customarily known as Bermuda Dollar)", + ), + ("BTN", "Bhutanese ngultrum"), + ("VEF", "Bolivar Fuerte"), + ("BOB", "Boliviano"), + ( + "XBA", + "Bond Markets Units European Composite Unit (EURCO)", + ), + ("BRL", "Brazilian Real"), + ("BND", "Brunei Dollar"), + ("BGN", "Bulgarian Lev"), + ("BIF", "Burundi Franc"), + ("XOF", "CFA Franc BCEAO"), + ("XAF", "CFA franc BEAC"), + ("XPF", "CFP Franc"), + ("CAD", "Canadian Dollar"), + ("CVE", "Cape Verde Escudo"), + ("KYD", "Cayman Islands Dollar"), + ("CLP", "Chilean peso"), + ("XTS", "Codes specifically reserved for testing purposes"), + ("COP", "Colombian peso"), + ("KMF", "Comoro Franc"), + ("CDF", "Congolese franc"), + ("BAM", "Convertible Marks"), + ("NIO", "Cordoba Oro"), + ("CRC", "Costa Rican Colon"), + ("HRK", "Croatian Kuna"), + ("CUP", "Cuban Peso"), + ("CUC", "Cuban convertible peso"), + ("CZK", "Czech Koruna"), + ("GMD", "Dalasi"), + ("DKK", "Danish Krone"), + ("MKD", "Denar"), + ("DJF", "Djibouti Franc"), + ("STD", "Dobra"), + ("DOP", "Dominican Peso"), + ("VND", "Dong"), + ("XCD", "East Caribbean Dollar"), + ("EGP", "Egyptian Pound"), + ("SVC", "El Salvador Colon"), + ("ETB", "Ethiopian Birr"), + ("EUR", "Euro"), + ("XBB", "European Monetary Unit (E.M.U.-6)"), + ("XBD", "European Unit of Account 17(E.U.A.-17)"), + ("XBC", "European Unit of Account 9(E.U.A.-9)"), + ("FKP", "Falkland Islands Pound"), + ("FJD", "Fiji Dollar"), + ("HUF", "Forint"), + ("GHS", "Ghana Cedi"), + ("GIP", "Gibraltar Pound"), + ("XAU", "Gold"), + ("XFO", "Gold-Franc"), + ("PYG", "Guarani"), + ("GNF", "Guinea Franc"), + ("GYD", "Guyana Dollar"), + ("HTG", "Haitian gourde"), + ("HKD", "Hong Kong Dollar"), + ("UAH", "Hryvnia"), + ("ISK", "Iceland Krona"), + ("INR", "Indian Rupee"), + ("IRR", "Iranian Rial"), + ("IQD", "Iraqi Dinar"), + ("IMP", "Isle of Man Pound"), + ("JMD", "Jamaican Dollar"), + ("JOD", "Jordanian Dinar"), + ("KES", "Kenyan Shilling"), + ("PGK", "Kina"), + ("LAK", "Kip"), + ("KWD", "Kuwaiti Dinar"), + ("AOA", "Kwanza"), + ("MMK", "Kyat"), + ("GEL", "Lari"), + ("LVL", "Latvian Lats"), + ("LBP", "Lebanese Pound"), + ("ALL", "Lek"), + ("HNL", "Lempira"), + ("SLL", "Leone"), + ("LSL", "Lesotho loti"), + ("LRD", "Liberian Dollar"), + ("LYD", "Libyan Dinar"), + ("SZL", "Lilangeni"), + ("LTL", "Lithuanian Litas"), + ("MGA", "Malagasy Ariary"), + ("MWK", "Malawian Kwacha"), + ("MYR", "Malaysian Ringgit"), + ("TMM", "Manat"), + ("MUR", "Mauritius Rupee"), + ("MZN", "Metical"), + ("MXV", "Mexican Unidad de Inversion (UDI)"), + ("MXN", "Mexican peso"), + ("MDL", "Moldovan Leu"), + ("MAD", "Moroccan Dirham"), + ("BOV", "Mvdol"), + ("NGN", "Naira"), + ("ERN", "Nakfa"), + ("NAD", "Namibian Dollar"), + ("NPR", "Nepalese Rupee"), + ("ANG", "Netherlands Antillian Guilder"), + ("ILS", "New Israeli Sheqel"), + ("RON", "New Leu"), + ("TWD", "New Taiwan Dollar"), + ("NZD", "New Zealand Dollar"), + ("KPW", "North Korean Won"), + ("NOK", "Norwegian Krone"), + ("PEN", "Nuevo Sol"), + ("MRO", "Ouguiya"), + ("TOP", "Paanga"), + ("PKR", "Pakistan Rupee"), + ("XPD", "Palladium"), + ("MOP", "Pataca"), + ("PHP", "Philippine Peso"), + ("XPT", "Platinum"), + ("GBP", "Pound Sterling"), + ("BWP", "Pula"), + ("QAR", "Qatari Rial"), + ("GTQ", "Quetzal"), + ("ZAR", "Rand"), + ("OMR", "Rial Omani"), + ("KHR", "Riel"), + ("MVR", "Rufiyaa"), + ("IDR", "Rupiah"), + ("RUB", "Russian Ruble"), + ("RWF", "Rwanda Franc"), + ("XDR", "SDR"), + ("SHP", "Saint Helena Pound"), + ("SAR", "Saudi Riyal"), + ("RSD", "Serbian Dinar"), + ("SCR", "Seychelles Rupee"), + ("XAG", "Silver"), + ("SGD", "Singapore Dollar"), + ("SBD", "Solomon Islands Dollar"), + ("KGS", "Som"), + ("SOS", "Somali Shilling"), + ("TJS", "Somoni"), + ("SSP", "South Sudanese Pound"), + ("LKR", "Sri Lanka Rupee"), + ("XSU", "Sucre"), + ("SDG", "Sudanese Pound"), + ("SRD", "Surinam Dollar"), + ("SEK", "Swedish Krona"), + ("CHF", "Swiss Franc"), + ("SYP", "Syrian Pound"), + ("BDT", "Taka"), + ("WST", "Tala"), + ("TZS", "Tanzanian Shilling"), + ("KZT", "Tenge"), + ( + "XXX", + "The codes assigned for transactions where no currency is involved", + ), + ("TTD", "Trinidad and Tobago Dollar"), + ("MNT", "Tugrik"), + ("TND", "Tunisian Dinar"), + ("TRY", "Turkish Lira"), + ("TMT", "Turkmenistan New Manat"), + ("TVD", "Tuvalu dollar"), + ("AED", "UAE Dirham"), + ("XFU", "UIC-Franc"), + ("USD", "US Dollar"), + ("USN", "US Dollar (Next day)"), + ("UGX", "Uganda Shilling"), + ("CLF", "Unidad de Fomento"), + ("COU", "Unidad de Valor Real"), + ("UYI", "Uruguay Peso en Unidades Indexadas (URUIURUI)"), + ("UYU", "Uruguayan peso"), + ("UZS", "Uzbekistan Sum"), + ("VUV", "Vatu"), + ("CHE", "WIR Euro"), + ("CHW", "WIR Franc"), + ("KRW", "Won"), + ("YER", "Yemeni Rial"), + ("JPY", "Yen"), + ("CNY", "Yuan Renminbi"), + ("ZMK", "Zambian Kwacha"), + ("ZMW", "Zambian Kwacha"), + ("ZWD", "Zimbabwe Dollar A/06"), + ("ZWN", "Zimbabwe dollar A/08"), + ("ZWL", "Zimbabwe dollar A/09"), + ("PLN", "Zloty"), + ], + default="CHF", + editable=False, + max_length=3, + ), + ), + ( + "price", + djmoney.models.fields.MoneyField( + decimal_places=2, + default=Decimal("0.0"), + default_currency="CHF", + max_digits=7, + ), + ), + ], + ), + migrations.CreateModel( + name="Subscription", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("name", models.CharField(max_length=255)), + ("archived", models.BooleanField(default=False)), + ], + ), + migrations.CreateModel( + name="SubscriptionProject", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "project", + models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, + to="projects.Project", + ), + ), + ( + "subscription", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="subscription.Subscription", + ), + ), + ], + ), + migrations.AddField( + model_name="package", + name="subscription", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="subscription.Subscription", + ), + ), + ] diff --git a/backend/timed/subscription/migrations/0002_auto_20170808_1729.py b/backend/timed/subscription/migrations/0002_auto_20170808_1729.py new file mode 100644 index 000000000..fb6afe0c9 --- /dev/null +++ b/backend/timed/subscription/migrations/0002_auto_20170808_1729.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.4 on 2017-08-08 15:29 +from __future__ import unicode_literals + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + dependencies = [("subscription", "0001_initial")] + + operations = [ + migrations.AlterField( + model_name="order", + name="project", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="orders", + to="projects.Project", + ), + ) + ] diff --git a/backend/timed/subscription/migrations/0003_auto_20170907_1151.py b/backend/timed/subscription/migrations/0003_auto_20170907_1151.py new file mode 100644 index 000000000..11b35df35 --- /dev/null +++ b/backend/timed/subscription/migrations/0003_auto_20170907_1151.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.4 on 2017-09-07 09:51 +from __future__ import unicode_literals + +from django.db import migrations, models +import django.db.models.deletion + + +SUBSCRIPTION_TO_BILLINGTYPE = { + "DL-Budget": "Engineering Budget", + "SLA Störungsbehebung": "SLA Incident Management", + "Software Maintenance Abonnement": "Software Maintenance", + "SySupport-Premium": "SSA Premium", + "SySupport-Standard": "SSA Standard", +} + + +def migrate_packages(apps, schema_editor): + """Map package subscription to billing type.""" + Package = apps.get_model("subscription", "Package") + BillingType = apps.get_model("projects", "BillingType") + + for subscription, billing_type in SUBSCRIPTION_TO_BILLINGTYPE.items(): + pkgs = Package.objects.filter(subscription__name=subscription) + if pkgs.exists(): + billing_type, _ = BillingType.objects.get_or_create(name=billing_type) + pkgs.update(billing_type=billing_type) + + # delete all obsolete packages + Package.objects.filter(billing_type__isnull=True).delete() + + +class Migration(migrations.Migration): + dependencies = [ + ("projects", "0005_auto_20170907_0938"), + ("subscription", "0002_auto_20170808_1729"), + ] + + operations = [ + migrations.RemoveField(model_name="subscriptionproject", name="project"), + migrations.RemoveField(model_name="subscriptionproject", name="subscription"), + migrations.AddField( + model_name="package", + name="billing_type", + field=models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="projects.BillingType", + related_name="packages", + ), + preserve_default=False, + ), + migrations.RunPython(migrate_packages), + migrations.RemoveField(model_name="package", name="subscription"), + migrations.DeleteModel(name="Subscription"), + migrations.DeleteModel(name="SubscriptionProject"), + ] diff --git a/backend/timed/subscription/migrations/0004_auto_20200407_2052.py b/backend/timed/subscription/migrations/0004_auto_20200407_2052.py new file mode 100644 index 000000000..a708df253 --- /dev/null +++ b/backend/timed/subscription/migrations/0004_auto_20200407_2052.py @@ -0,0 +1,18 @@ +# Generated by Django 2.2.10 on 2020-04-07 18:52 + +from django.db import migrations +import djmoney.models.fields + + +class Migration(migrations.Migration): + dependencies = [("subscription", "0003_auto_20170907_1151")] + + operations = [ + migrations.AlterField( + model_name="package", + name="price", + field=djmoney.models.fields.MoneyField( + decimal_places=2, default_currency="CHF", max_digits=7 + ), + ) + ] diff --git a/backend/timed/subscription/migrations/0005_alter_package_price_currency.py b/backend/timed/subscription/migrations/0005_alter_package_price_currency.py new file mode 100644 index 000000000..211ff0180 --- /dev/null +++ b/backend/timed/subscription/migrations/0005_alter_package_price_currency.py @@ -0,0 +1,333 @@ +# Generated by Django 3.2.13 on 2022-04-14 08:52 + +from django.db import migrations +import djmoney.models.fields + + +class Migration(migrations.Migration): + dependencies = [ + ("subscription", "0004_auto_20200407_2052"), + ] + + operations = [ + migrations.AlterField( + model_name="package", + name="price_currency", + field=djmoney.models.fields.CurrencyField( + choices=[ + ("XUA", "ADB Unit of Account"), + ("AFN", "Afghan Afghani"), + ("AFA", "Afghan Afghani (1927–2002)"), + ("ALL", "Albanian Lek"), + ("ALK", "Albanian Lek (1946–1965)"), + ("DZD", "Algerian Dinar"), + ("ADP", "Andorran Peseta"), + ("AOA", "Angolan Kwanza"), + ("AOK", "Angolan Kwanza (1977–1991)"), + ("AON", "Angolan New Kwanza (1990–2000)"), + ("AOR", "Angolan Readjusted Kwanza (1995–1999)"), + ("ARA", "Argentine Austral"), + ("ARS", "Argentine Peso"), + ("ARM", "Argentine Peso (1881–1970)"), + ("ARP", "Argentine Peso (1983–1985)"), + ("ARL", "Argentine Peso Ley (1970–1983)"), + ("AMD", "Armenian Dram"), + ("AWG", "Aruban Florin"), + ("AUD", "Australian Dollar"), + ("ATS", "Austrian Schilling"), + ("AZN", "Azerbaijani Manat"), + ("AZM", "Azerbaijani Manat (1993–2006)"), + ("BSD", "Bahamian Dollar"), + ("BHD", "Bahraini Dinar"), + ("BDT", "Bangladeshi Taka"), + ("BBD", "Barbadian Dollar"), + ("BYN", "Belarusian Ruble"), + ("BYB", "Belarusian Ruble (1994–1999)"), + ("BYR", "Belarusian Ruble (2000–2016)"), + ("BEF", "Belgian Franc"), + ("BEC", "Belgian Franc (convertible)"), + ("BEL", "Belgian Franc (financial)"), + ("BZD", "Belize Dollar"), + ("BMD", "Bermudan Dollar"), + ("BTN", "Bhutanese Ngultrum"), + ("BOB", "Bolivian Boliviano"), + ("BOL", "Bolivian Boliviano (1863–1963)"), + ("BOV", "Bolivian Mvdol"), + ("BOP", "Bolivian Peso"), + ("BAM", "Bosnia-Herzegovina Convertible Mark"), + ("BAD", "Bosnia-Herzegovina Dinar (1992–1994)"), + ("BAN", "Bosnia-Herzegovina New Dinar (1994–1997)"), + ("BWP", "Botswanan Pula"), + ("BRC", "Brazilian Cruzado (1986–1989)"), + ("BRZ", "Brazilian Cruzeiro (1942–1967)"), + ("BRE", "Brazilian Cruzeiro (1990–1993)"), + ("BRR", "Brazilian Cruzeiro (1993–1994)"), + ("BRN", "Brazilian New Cruzado (1989–1990)"), + ("BRB", "Brazilian New Cruzeiro (1967–1986)"), + ("BRL", "Brazilian Real"), + ("GBP", "British Pound"), + ("BND", "Brunei Dollar"), + ("BGL", "Bulgarian Hard Lev"), + ("BGN", "Bulgarian Lev"), + ("BGO", "Bulgarian Lev (1879–1952)"), + ("BGM", "Bulgarian Socialist Lev"), + ("BUK", "Burmese Kyat"), + ("BIF", "Burundian Franc"), + ("XPF", "CFP Franc"), + ("KHR", "Cambodian Riel"), + ("CAD", "Canadian Dollar"), + ("CVE", "Cape Verdean Escudo"), + ("KYD", "Cayman Islands Dollar"), + ("XAF", "Central African CFA Franc"), + ("CLE", "Chilean Escudo"), + ("CLP", "Chilean Peso"), + ("CLF", "Chilean Unit of Account (UF)"), + ("CNX", "Chinese People’s Bank Dollar"), + ("CNY", "Chinese Yuan"), + ("CNH", "Chinese Yuan (offshore)"), + ("COP", "Colombian Peso"), + ("COU", "Colombian Real Value Unit"), + ("KMF", "Comorian Franc"), + ("CDF", "Congolese Franc"), + ("CRC", "Costa Rican Colón"), + ("HRD", "Croatian Dinar"), + ("HRK", "Croatian Kuna"), + ("CUC", "Cuban Convertible Peso"), + ("CUP", "Cuban Peso"), + ("CYP", "Cypriot Pound"), + ("CZK", "Czech Koruna"), + ("CSK", "Czechoslovak Hard Koruna"), + ("DKK", "Danish Krone"), + ("DJF", "Djiboutian Franc"), + ("DOP", "Dominican Peso"), + ("NLG", "Dutch Guilder"), + ("XCD", "East Caribbean Dollar"), + ("DDM", "East German Mark"), + ("ECS", "Ecuadorian Sucre"), + ("ECV", "Ecuadorian Unit of Constant Value"), + ("EGP", "Egyptian Pound"), + ("GQE", "Equatorial Guinean Ekwele"), + ("ERN", "Eritrean Nakfa"), + ("EEK", "Estonian Kroon"), + ("ETB", "Ethiopian Birr"), + ("EUR", "Euro"), + ("XBA", "European Composite Unit"), + ("XEU", "European Currency Unit"), + ("XBB", "European Monetary Unit"), + ("XBC", "European Unit of Account (XBC)"), + ("XBD", "European Unit of Account (XBD)"), + ("FKP", "Falkland Islands Pound"), + ("FJD", "Fijian Dollar"), + ("FIM", "Finnish Markka"), + ("FRF", "French Franc"), + ("XFO", "French Gold Franc"), + ("XFU", "French UIC-Franc"), + ("GMD", "Gambian Dalasi"), + ("GEK", "Georgian Kupon Larit"), + ("GEL", "Georgian Lari"), + ("DEM", "German Mark"), + ("GHS", "Ghanaian Cedi"), + ("GHC", "Ghanaian Cedi (1979–2007)"), + ("GIP", "Gibraltar Pound"), + ("XAU", "Gold"), + ("GRD", "Greek Drachma"), + ("GTQ", "Guatemalan Quetzal"), + ("GWP", "Guinea-Bissau Peso"), + ("GNF", "Guinean Franc"), + ("GNS", "Guinean Syli"), + ("GYD", "Guyanaese Dollar"), + ("HTG", "Haitian Gourde"), + ("HNL", "Honduran Lempira"), + ("HKD", "Hong Kong Dollar"), + ("HUF", "Hungarian Forint"), + ("IMP", "IMP"), + ("ISK", "Icelandic Króna"), + ("ISJ", "Icelandic Króna (1918–1981)"), + ("INR", "Indian Rupee"), + ("IDR", "Indonesian Rupiah"), + ("IRR", "Iranian Rial"), + ("IQD", "Iraqi Dinar"), + ("IEP", "Irish Pound"), + ("ILS", "Israeli New Shekel"), + ("ILP", "Israeli Pound"), + ("ILR", "Israeli Shekel (1980–1985)"), + ("ITL", "Italian Lira"), + ("JMD", "Jamaican Dollar"), + ("JPY", "Japanese Yen"), + ("JOD", "Jordanian Dinar"), + ("KZT", "Kazakhstani Tenge"), + ("KES", "Kenyan Shilling"), + ("KWD", "Kuwaiti Dinar"), + ("KGS", "Kyrgystani Som"), + ("LAK", "Laotian Kip"), + ("LVL", "Latvian Lats"), + ("LVR", "Latvian Ruble"), + ("LBP", "Lebanese Pound"), + ("LSL", "Lesotho Loti"), + ("LRD", "Liberian Dollar"), + ("LYD", "Libyan Dinar"), + ("LTL", "Lithuanian Litas"), + ("LTT", "Lithuanian Talonas"), + ("LUL", "Luxembourg Financial Franc"), + ("LUC", "Luxembourgian Convertible Franc"), + ("LUF", "Luxembourgian Franc"), + ("MOP", "Macanese Pataca"), + ("MKD", "Macedonian Denar"), + ("MKN", "Macedonian Denar (1992–1993)"), + ("MGA", "Malagasy Ariary"), + ("MGF", "Malagasy Franc"), + ("MWK", "Malawian Kwacha"), + ("MYR", "Malaysian Ringgit"), + ("MVR", "Maldivian Rufiyaa"), + ("MVP", "Maldivian Rupee (1947–1981)"), + ("MLF", "Malian Franc"), + ("MTL", "Maltese Lira"), + ("MTP", "Maltese Pound"), + ("MRU", "Mauritanian Ouguiya"), + ("MRO", "Mauritanian Ouguiya (1973–2017)"), + ("MUR", "Mauritian Rupee"), + ("MXV", "Mexican Investment Unit"), + ("MXN", "Mexican Peso"), + ("MXP", "Mexican Silver Peso (1861–1992)"), + ("MDC", "Moldovan Cupon"), + ("MDL", "Moldovan Leu"), + ("MCF", "Monegasque Franc"), + ("MNT", "Mongolian Tugrik"), + ("MAD", "Moroccan Dirham"), + ("MAF", "Moroccan Franc"), + ("MZE", "Mozambican Escudo"), + ("MZN", "Mozambican Metical"), + ("MZM", "Mozambican Metical (1980–2006)"), + ("MMK", "Myanmar Kyat"), + ("NAD", "Namibian Dollar"), + ("NPR", "Nepalese Rupee"), + ("ANG", "Netherlands Antillean Guilder"), + ("TWD", "New Taiwan Dollar"), + ("NZD", "New Zealand Dollar"), + ("NIO", "Nicaraguan Córdoba"), + ("NIC", "Nicaraguan Córdoba (1988–1991)"), + ("NGN", "Nigerian Naira"), + ("KPW", "North Korean Won"), + ("NOK", "Norwegian Krone"), + ("OMR", "Omani Rial"), + ("PKR", "Pakistani Rupee"), + ("XPD", "Palladium"), + ("PAB", "Panamanian Balboa"), + ("PGK", "Papua New Guinean Kina"), + ("PYG", "Paraguayan Guarani"), + ("PEI", "Peruvian Inti"), + ("PEN", "Peruvian Sol"), + ("PES", "Peruvian Sol (1863–1965)"), + ("PHP", "Philippine Peso"), + ("XPT", "Platinum"), + ("PLN", "Polish Zloty"), + ("PLZ", "Polish Zloty (1950–1995)"), + ("PTE", "Portuguese Escudo"), + ("GWE", "Portuguese Guinea Escudo"), + ("QAR", "Qatari Rial"), + ("XRE", "RINET Funds"), + ("RHD", "Rhodesian Dollar"), + ("RON", "Romanian Leu"), + ("ROL", "Romanian Leu (1952–2006)"), + ("RUB", "Russian Ruble"), + ("RUR", "Russian Ruble (1991–1998)"), + ("RWF", "Rwandan Franc"), + ("SVC", "Salvadoran Colón"), + ("WST", "Samoan Tala"), + ("SAR", "Saudi Riyal"), + ("RSD", "Serbian Dinar"), + ("CSD", "Serbian Dinar (2002–2006)"), + ("SCR", "Seychellois Rupee"), + ("SLL", "Sierra Leonean Leone"), + ("XAG", "Silver"), + ("SGD", "Singapore Dollar"), + ("SKK", "Slovak Koruna"), + ("SIT", "Slovenian Tolar"), + ("SBD", "Solomon Islands Dollar"), + ("SOS", "Somali Shilling"), + ("ZAR", "South African Rand"), + ("ZAL", "South African Rand (financial)"), + ("KRH", "South Korean Hwan (1953–1962)"), + ("KRW", "South Korean Won"), + ("KRO", "South Korean Won (1945–1953)"), + ("SSP", "South Sudanese Pound"), + ("SUR", "Soviet Rouble"), + ("ESP", "Spanish Peseta"), + ("ESA", "Spanish Peseta (A account)"), + ("ESB", "Spanish Peseta (convertible account)"), + ("XDR", "Special Drawing Rights"), + ("LKR", "Sri Lankan Rupee"), + ("SHP", "St. Helena Pound"), + ("XSU", "Sucre"), + ("SDD", "Sudanese Dinar (1992–2007)"), + ("SDG", "Sudanese Pound"), + ("SDP", "Sudanese Pound (1957–1998)"), + ("SRD", "Surinamese Dollar"), + ("SRG", "Surinamese Guilder"), + ("SZL", "Swazi Lilangeni"), + ("SEK", "Swedish Krona"), + ("CHF", "Swiss Franc"), + ("SYP", "Syrian Pound"), + ("STN", "São Tomé & Príncipe Dobra"), + ("STD", "São Tomé & Príncipe Dobra (1977–2017)"), + ("TVD", "TVD"), + ("TJR", "Tajikistani Ruble"), + ("TJS", "Tajikistani Somoni"), + ("TZS", "Tanzanian Shilling"), + ("XTS", "Testing Currency Code"), + ("THB", "Thai Baht"), + ( + "XXX", + "The codes assigned for transactions where no currency is involved", + ), + ("TPE", "Timorese Escudo"), + ("TOP", "Tongan Paʻanga"), + ("TTD", "Trinidad & Tobago Dollar"), + ("TND", "Tunisian Dinar"), + ("TRY", "Turkish Lira"), + ("TRL", "Turkish Lira (1922–2005)"), + ("TMT", "Turkmenistani Manat"), + ("TMM", "Turkmenistani Manat (1993–2009)"), + ("USD", "US Dollar"), + ("USN", "US Dollar (Next day)"), + ("USS", "US Dollar (Same day)"), + ("UGX", "Ugandan Shilling"), + ("UGS", "Ugandan Shilling (1966–1987)"), + ("UAH", "Ukrainian Hryvnia"), + ("UAK", "Ukrainian Karbovanets"), + ("AED", "United Arab Emirates Dirham"), + ("UYW", "Uruguayan Nominal Wage Index Unit"), + ("UYU", "Uruguayan Peso"), + ("UYP", "Uruguayan Peso (1975–1993)"), + ("UYI", "Uruguayan Peso (Indexed Units)"), + ("UZS", "Uzbekistani Som"), + ("VUV", "Vanuatu Vatu"), + ("VES", "Venezuelan Bolívar"), + ("VEB", "Venezuelan Bolívar (1871–2008)"), + ("VEF", "Venezuelan Bolívar (2008–2018)"), + ("VND", "Vietnamese Dong"), + ("VNN", "Vietnamese Dong (1978–1985)"), + ("CHE", "WIR Euro"), + ("CHW", "WIR Franc"), + ("XOF", "West African CFA Franc"), + ("YDD", "Yemeni Dinar"), + ("YER", "Yemeni Rial"), + ("YUN", "Yugoslavian Convertible Dinar (1990–1992)"), + ("YUD", "Yugoslavian Hard Dinar (1966–1990)"), + ("YUM", "Yugoslavian New Dinar (1994–2002)"), + ("YUR", "Yugoslavian Reformed Dinar (1992–1993)"), + ("ZWN", "ZWN"), + ("ZRN", "Zairean New Zaire (1993–1998)"), + ("ZRZ", "Zairean Zaire (1971–1993)"), + ("ZMW", "Zambian Kwacha"), + ("ZMK", "Zambian Kwacha (1968–2012)"), + ("ZWD", "Zimbabwean Dollar (1980–2008)"), + ("ZWR", "Zimbabwean Dollar (2008)"), + ("ZWL", "Zimbabwean Dollar (2009)"), + ], + default="CHF", + editable=False, + max_length=3, + ), + ), + ] diff --git a/backend/timed/subscription/migrations/0006_alter_package_price_currency.py b/backend/timed/subscription/migrations/0006_alter_package_price_currency.py new file mode 100644 index 000000000..f0aa466c8 --- /dev/null +++ b/backend/timed/subscription/migrations/0006_alter_package_price_currency.py @@ -0,0 +1,333 @@ +# Generated by Django 3.2.19 on 2023-05-08 13:49 + +from django.db import migrations +import djmoney.models.fields + + +class Migration(migrations.Migration): + dependencies = [ + ("subscription", "0005_alter_package_price_currency"), + ] + + operations = [ + migrations.AlterField( + model_name="package", + name="price_currency", + field=djmoney.models.fields.CurrencyField( + choices=[ + ("XUA", "ADB Unit of Account"), + ("AFN", "Afghan Afghani"), + ("AFA", "Afghan Afghani (1927–2002)"), + ("ALL", "Albanian Lek"), + ("ALK", "Albanian Lek (1946–1965)"), + ("DZD", "Algerian Dinar"), + ("ADP", "Andorran Peseta"), + ("AOA", "Angolan Kwanza"), + ("AOK", "Angolan Kwanza (1977–1991)"), + ("AON", "Angolan New Kwanza (1990–2000)"), + ("AOR", "Angolan Readjusted Kwanza (1995–1999)"), + ("ARA", "Argentine Austral"), + ("ARS", "Argentine Peso"), + ("ARM", "Argentine Peso (1881–1970)"), + ("ARP", "Argentine Peso (1983–1985)"), + ("ARL", "Argentine Peso Ley (1970–1983)"), + ("AMD", "Armenian Dram"), + ("AWG", "Aruban Florin"), + ("AUD", "Australian Dollar"), + ("ATS", "Austrian Schilling"), + ("AZN", "Azerbaijani Manat"), + ("AZM", "Azerbaijani Manat (1993–2006)"), + ("BSD", "Bahamian Dollar"), + ("BHD", "Bahraini Dinar"), + ("BDT", "Bangladeshi Taka"), + ("BBD", "Barbadian Dollar"), + ("BYN", "Belarusian Ruble"), + ("BYB", "Belarusian Ruble (1994–1999)"), + ("BYR", "Belarusian Ruble (2000–2016)"), + ("BEF", "Belgian Franc"), + ("BEC", "Belgian Franc (convertible)"), + ("BEL", "Belgian Franc (financial)"), + ("BZD", "Belize Dollar"), + ("BMD", "Bermudan Dollar"), + ("BTN", "Bhutanese Ngultrum"), + ("BOB", "Bolivian Boliviano"), + ("BOL", "Bolivian Boliviano (1863–1963)"), + ("BOV", "Bolivian Mvdol"), + ("BOP", "Bolivian Peso"), + ("BAM", "Bosnia-Herzegovina Convertible Mark"), + ("BAD", "Bosnia-Herzegovina Dinar (1992–1994)"), + ("BAN", "Bosnia-Herzegovina New Dinar (1994–1997)"), + ("BWP", "Botswanan Pula"), + ("BRC", "Brazilian Cruzado (1986–1989)"), + ("BRZ", "Brazilian Cruzeiro (1942–1967)"), + ("BRE", "Brazilian Cruzeiro (1990–1993)"), + ("BRR", "Brazilian Cruzeiro (1993–1994)"), + ("BRN", "Brazilian New Cruzado (1989–1990)"), + ("BRB", "Brazilian New Cruzeiro (1967–1986)"), + ("BRL", "Brazilian Real"), + ("GBP", "British Pound"), + ("BND", "Brunei Dollar"), + ("BGL", "Bulgarian Hard Lev"), + ("BGN", "Bulgarian Lev"), + ("BGO", "Bulgarian Lev (1879–1952)"), + ("BGM", "Bulgarian Socialist Lev"), + ("BUK", "Burmese Kyat"), + ("BIF", "Burundian Franc"), + ("XPF", "CFP Franc"), + ("KHR", "Cambodian Riel"), + ("CAD", "Canadian Dollar"), + ("CVE", "Cape Verdean Escudo"), + ("KYD", "Cayman Islands Dollar"), + ("XAF", "Central African CFA Franc"), + ("CLE", "Chilean Escudo"), + ("CLP", "Chilean Peso"), + ("CLF", "Chilean Unit of Account (UF)"), + ("CNX", "Chinese People’s Bank Dollar"), + ("CNY", "Chinese Yuan"), + ("CNH", "Chinese Yuan (offshore)"), + ("COP", "Colombian Peso"), + ("COU", "Colombian Real Value Unit"), + ("KMF", "Comorian Franc"), + ("CDF", "Congolese Franc"), + ("CRC", "Costa Rican Colón"), + ("HRD", "Croatian Dinar"), + ("HRK", "Croatian Kuna"), + ("CUC", "Cuban Convertible Peso"), + ("CUP", "Cuban Peso"), + ("CYP", "Cypriot Pound"), + ("CZK", "Czech Koruna"), + ("CSK", "Czechoslovak Hard Koruna"), + ("DKK", "Danish Krone"), + ("DJF", "Djiboutian Franc"), + ("DOP", "Dominican Peso"), + ("NLG", "Dutch Guilder"), + ("XCD", "East Caribbean Dollar"), + ("DDM", "East German Mark"), + ("ECS", "Ecuadorian Sucre"), + ("ECV", "Ecuadorian Unit of Constant Value"), + ("EGP", "Egyptian Pound"), + ("GQE", "Equatorial Guinean Ekwele"), + ("ERN", "Eritrean Nakfa"), + ("EEK", "Estonian Kroon"), + ("ETB", "Ethiopian Birr"), + ("EUR", "Euro"), + ("XBA", "European Composite Unit"), + ("XEU", "European Currency Unit"), + ("XBB", "European Monetary Unit"), + ("XBC", "European Unit of Account (XBC)"), + ("XBD", "European Unit of Account (XBD)"), + ("FKP", "Falkland Islands Pound"), + ("FJD", "Fijian Dollar"), + ("FIM", "Finnish Markka"), + ("FRF", "French Franc"), + ("XFO", "French Gold Franc"), + ("XFU", "French UIC-Franc"), + ("GMD", "Gambian Dalasi"), + ("GEK", "Georgian Kupon Larit"), + ("GEL", "Georgian Lari"), + ("DEM", "German Mark"), + ("GHS", "Ghanaian Cedi"), + ("GHC", "Ghanaian Cedi (1979–2007)"), + ("GIP", "Gibraltar Pound"), + ("XAU", "Gold"), + ("GRD", "Greek Drachma"), + ("GTQ", "Guatemalan Quetzal"), + ("GWP", "Guinea-Bissau Peso"), + ("GNF", "Guinean Franc"), + ("GNS", "Guinean Syli"), + ("GYD", "Guyanaese Dollar"), + ("HTG", "Haitian Gourde"), + ("HNL", "Honduran Lempira"), + ("HKD", "Hong Kong Dollar"), + ("HUF", "Hungarian Forint"), + ("IMP", "IMP"), + ("ISK", "Icelandic Króna"), + ("ISJ", "Icelandic Króna (1918–1981)"), + ("INR", "Indian Rupee"), + ("IDR", "Indonesian Rupiah"), + ("IRR", "Iranian Rial"), + ("IQD", "Iraqi Dinar"), + ("IEP", "Irish Pound"), + ("ILS", "Israeli New Shekel"), + ("ILP", "Israeli Pound"), + ("ILR", "Israeli Shekel (1980–1985)"), + ("ITL", "Italian Lira"), + ("JMD", "Jamaican Dollar"), + ("JPY", "Japanese Yen"), + ("JOD", "Jordanian Dinar"), + ("KZT", "Kazakhstani Tenge"), + ("KES", "Kenyan Shilling"), + ("KWD", "Kuwaiti Dinar"), + ("KGS", "Kyrgystani Som"), + ("LAK", "Laotian Kip"), + ("LVL", "Latvian Lats"), + ("LVR", "Latvian Ruble"), + ("LBP", "Lebanese Pound"), + ("LSL", "Lesotho Loti"), + ("LRD", "Liberian Dollar"), + ("LYD", "Libyan Dinar"), + ("LTL", "Lithuanian Litas"), + ("LTT", "Lithuanian Talonas"), + ("LUL", "Luxembourg Financial Franc"), + ("LUC", "Luxembourgian Convertible Franc"), + ("LUF", "Luxembourgian Franc"), + ("MOP", "Macanese Pataca"), + ("MKD", "Macedonian Denar"), + ("MKN", "Macedonian Denar (1992–1993)"), + ("MGA", "Malagasy Ariary"), + ("MGF", "Malagasy Franc"), + ("MWK", "Malawian Kwacha"), + ("MYR", "Malaysian Ringgit"), + ("MVR", "Maldivian Rufiyaa"), + ("MVP", "Maldivian Rupee (1947–1981)"), + ("MLF", "Malian Franc"), + ("MTL", "Maltese Lira"), + ("MTP", "Maltese Pound"), + ("MRU", "Mauritanian Ouguiya"), + ("MRO", "Mauritanian Ouguiya (1973–2017)"), + ("MUR", "Mauritian Rupee"), + ("MXV", "Mexican Investment Unit"), + ("MXN", "Mexican Peso"), + ("MXP", "Mexican Silver Peso (1861–1992)"), + ("MDC", "Moldovan Cupon"), + ("MDL", "Moldovan Leu"), + ("MCF", "Monegasque Franc"), + ("MNT", "Mongolian Tugrik"), + ("MAD", "Moroccan Dirham"), + ("MAF", "Moroccan Franc"), + ("MZE", "Mozambican Escudo"), + ("MZN", "Mozambican Metical"), + ("MZM", "Mozambican Metical (1980–2006)"), + ("MMK", "Myanmar Kyat"), + ("NAD", "Namibian Dollar"), + ("NPR", "Nepalese Rupee"), + ("ANG", "Netherlands Antillean Guilder"), + ("TWD", "New Taiwan Dollar"), + ("NZD", "New Zealand Dollar"), + ("NIO", "Nicaraguan Córdoba"), + ("NIC", "Nicaraguan Córdoba (1988–1991)"), + ("NGN", "Nigerian Naira"), + ("KPW", "North Korean Won"), + ("NOK", "Norwegian Krone"), + ("OMR", "Omani Rial"), + ("PKR", "Pakistani Rupee"), + ("XPD", "Palladium"), + ("PAB", "Panamanian Balboa"), + ("PGK", "Papua New Guinean Kina"), + ("PYG", "Paraguayan Guarani"), + ("PEI", "Peruvian Inti"), + ("PEN", "Peruvian Sol"), + ("PES", "Peruvian Sol (1863–1965)"), + ("PHP", "Philippine Peso"), + ("XPT", "Platinum"), + ("PLN", "Polish Zloty"), + ("PLZ", "Polish Zloty (1950–1995)"), + ("PTE", "Portuguese Escudo"), + ("GWE", "Portuguese Guinea Escudo"), + ("QAR", "Qatari Riyal"), + ("XRE", "RINET Funds"), + ("RHD", "Rhodesian Dollar"), + ("RON", "Romanian Leu"), + ("ROL", "Romanian Leu (1952–2006)"), + ("RUB", "Russian Ruble"), + ("RUR", "Russian Ruble (1991–1998)"), + ("RWF", "Rwandan Franc"), + ("SVC", "Salvadoran Colón"), + ("WST", "Samoan Tala"), + ("SAR", "Saudi Riyal"), + ("RSD", "Serbian Dinar"), + ("CSD", "Serbian Dinar (2002–2006)"), + ("SCR", "Seychellois Rupee"), + ("SLL", "Sierra Leonean Leone (1964—2022)"), + ("XAG", "Silver"), + ("SGD", "Singapore Dollar"), + ("SKK", "Slovak Koruna"), + ("SIT", "Slovenian Tolar"), + ("SBD", "Solomon Islands Dollar"), + ("SOS", "Somali Shilling"), + ("ZAR", "South African Rand"), + ("ZAL", "South African Rand (financial)"), + ("KRH", "South Korean Hwan (1953–1962)"), + ("KRW", "South Korean Won"), + ("KRO", "South Korean Won (1945–1953)"), + ("SSP", "South Sudanese Pound"), + ("SUR", "Soviet Rouble"), + ("ESP", "Spanish Peseta"), + ("ESA", "Spanish Peseta (A account)"), + ("ESB", "Spanish Peseta (convertible account)"), + ("XDR", "Special Drawing Rights"), + ("LKR", "Sri Lankan Rupee"), + ("SHP", "St. Helena Pound"), + ("XSU", "Sucre"), + ("SDD", "Sudanese Dinar (1992–2007)"), + ("SDG", "Sudanese Pound"), + ("SDP", "Sudanese Pound (1957–1998)"), + ("SRD", "Surinamese Dollar"), + ("SRG", "Surinamese Guilder"), + ("SZL", "Swazi Lilangeni"), + ("SEK", "Swedish Krona"), + ("CHF", "Swiss Franc"), + ("SYP", "Syrian Pound"), + ("STN", "São Tomé & Príncipe Dobra"), + ("STD", "São Tomé & Príncipe Dobra (1977–2017)"), + ("TVD", "TVD"), + ("TJR", "Tajikistani Ruble"), + ("TJS", "Tajikistani Somoni"), + ("TZS", "Tanzanian Shilling"), + ("XTS", "Testing Currency Code"), + ("THB", "Thai Baht"), + ( + "XXX", + "The codes assigned for transactions where no currency is involved", + ), + ("TPE", "Timorese Escudo"), + ("TOP", "Tongan Paʻanga"), + ("TTD", "Trinidad & Tobago Dollar"), + ("TND", "Tunisian Dinar"), + ("TRY", "Turkish Lira"), + ("TRL", "Turkish Lira (1922–2005)"), + ("TMT", "Turkmenistani Manat"), + ("TMM", "Turkmenistani Manat (1993–2009)"), + ("USD", "US Dollar"), + ("USN", "US Dollar (Next day)"), + ("USS", "US Dollar (Same day)"), + ("UGX", "Ugandan Shilling"), + ("UGS", "Ugandan Shilling (1966–1987)"), + ("UAH", "Ukrainian Hryvnia"), + ("UAK", "Ukrainian Karbovanets"), + ("AED", "United Arab Emirates Dirham"), + ("UYW", "Uruguayan Nominal Wage Index Unit"), + ("UYU", "Uruguayan Peso"), + ("UYP", "Uruguayan Peso (1975–1993)"), + ("UYI", "Uruguayan Peso (Indexed Units)"), + ("UZS", "Uzbekistani Som"), + ("VUV", "Vanuatu Vatu"), + ("VES", "Venezuelan Bolívar"), + ("VEB", "Venezuelan Bolívar (1871–2008)"), + ("VEF", "Venezuelan Bolívar (2008–2018)"), + ("VND", "Vietnamese Dong"), + ("VNN", "Vietnamese Dong (1978–1985)"), + ("CHE", "WIR Euro"), + ("CHW", "WIR Franc"), + ("XOF", "West African CFA Franc"), + ("YDD", "Yemeni Dinar"), + ("YER", "Yemeni Rial"), + ("YUN", "Yugoslavian Convertible Dinar (1990–1992)"), + ("YUD", "Yugoslavian Hard Dinar (1966–1990)"), + ("YUM", "Yugoslavian New Dinar (1994–2002)"), + ("YUR", "Yugoslavian Reformed Dinar (1992–1993)"), + ("ZWN", "ZWN"), + ("ZRN", "Zairean New Zaire (1993–1998)"), + ("ZRZ", "Zairean Zaire (1971–1993)"), + ("ZMW", "Zambian Kwacha"), + ("ZMK", "Zambian Kwacha (1968–2012)"), + ("ZWD", "Zimbabwean Dollar (1980–2008)"), + ("ZWR", "Zimbabwean Dollar (2008)"), + ("ZWL", "Zimbabwean Dollar (2009)"), + ], + default="CHF", + editable=False, + max_length=3, + ), + ), + ] diff --git a/backend/timed/subscription/migrations/__init__.py b/backend/timed/subscription/migrations/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/timed/subscription/models.py b/backend/timed/subscription/models.py new file mode 100644 index 000000000..6c40bcc1a --- /dev/null +++ b/backend/timed/subscription/models.py @@ -0,0 +1,54 @@ +from django.conf import settings +from django.db import models +from django.utils import timezone +from django.utils.translation import gettext_lazy as _ +from djmoney.models.fields import MoneyField + + +class Package(models.Model): + """Representing a subscription package.""" + + billing_type = models.ForeignKey( + "projects.BillingType", + on_delete=models.CASCADE, + null=True, + related_name="packages", + ) + """ + This field has been added later so there might be old entries with null + hence null=True. However blank=True is not set as it is required to set + for new packages. + """ + + duration = models.DurationField() + price = MoneyField(max_digits=7, decimal_places=2, default_currency="CHF") + + +class Order(models.Model): + """Order of customer for specific amount of hours.""" + + project = models.ForeignKey( + "projects.Project", on_delete=models.CASCADE, related_name="orders" + ) + duration = models.DurationField() + ordered = models.DateTimeField(default=timezone.now) + acknowledged = models.BooleanField(default=False) + confirmedby = models.ForeignKey( + settings.AUTH_USER_MODEL, + on_delete=models.SET_NULL, + null=True, + blank=True, + related_name="orders_confirmed", + ) + + +class CustomerPassword(models.Model): + """ + Password per customer used for login into SySupport portal. + + Password are only hashed with md5. This model will be obsolete + once customer center will go live. + """ + + customer = models.OneToOneField("projects.Customer", on_delete=models.CASCADE) + password = models.CharField(_("password"), max_length=128, null=True, blank=True) diff --git a/backend/timed/subscription/serializers.py b/backend/timed/subscription/serializers.py new file mode 100644 index 000000000..1138e8cb7 --- /dev/null +++ b/backend/timed/subscription/serializers.py @@ -0,0 +1,86 @@ +from datetime import timedelta + +from django.db.models import Sum +from django.utils.duration import duration_string +from rest_framework_json_api.serializers import ( + CharField, + ModelSerializer, + SerializerMethodField, +) + +from timed.projects.models import Project +from timed.tracking.models import Report + +from .models import Order, Package + + +class SubscriptionProjectSerializer(ModelSerializer): + purchased_time = SerializerMethodField(source="get_purchased_time") + spent_time = SerializerMethodField(source="get_spent_time") + + def get_purchased_time(self, obj): + """ + Calculate purchased time for given project. + + Only acknowledged hours are included. + """ + orders = Order.objects.filter(project=obj, acknowledged=True) + data = orders.aggregate(purchased_time=Sum("duration")) + return duration_string(data["purchased_time"] or timedelta(0)) + + def get_spent_time(self, obj): + """ + Calculate spent time for given project. + + Reports which are not billable or are in review are excluded. + """ + reports = Report.objects.filter( + task__project=obj, not_billable=False, review=False + ) + data = reports.aggregate(spent_time=Sum("duration")) + return duration_string(data["spent_time"] or timedelta()) + + included_serializers = { + "billing_type": "timed.projects.serializers.BillingTypeSerializer", + "cost_center": "timed.projects.serializers.CostCenterSerializer", + "customer": "timed.projects.serializers.CustomerSerializer", + "orders": "timed.subscription.serializers.OrderSerializer", + } + + class Meta: + model = Project + resource_name = "subscription-projects" + fields = ( + "name", + "billing_type", + "cost_center", + "purchased_time", + "spent_time", + "customer", + "orders", + ) + + +class PackageSerializer(ModelSerializer): + price = CharField() + """CharField needed as it includes currency.""" + + included_serializers = { + "billing_type": "timed.projects.serializers.BillingTypeSerializer" + } + + class Meta: + model = Package + resource_name = "subscription-packages" + fields = ("duration", "price", "billing_type") + + +class OrderSerializer(ModelSerializer): + included_serializers = { + "project": ("timed.subscription.serializers" ".SubscriptionProjectSerializer") + } + + class Meta: + model = Order + resource_name = "subscription-orders" + fields = ("duration", "acknowledged", "ordered", "project") diff --git a/backend/timed/subscription/templates/notify_accountants_order.html b/backend/timed/subscription/templates/notify_accountants_order.html new file mode 100644 index 000000000..441b17b7b --- /dev/null +++ b/backend/timed/subscription/templates/notify_accountants_order.html @@ -0,0 +1,41 @@ +***EN***
+Charging {{hours_added}} hours + +
    +
  • Customer: {{customer.name}}
  • +
  • Project: {{project.name}}
  • +
  • Project total with charging: {{hours_total}} hours
  • +
+ +
+ __________________________________
+ Show in customer center
+ Show customer
+
+ + Customer Center
+ Credits / Reports +
+ +
+ __________________________________________________________________________

+ +***DE***
+ +Aufladung von {{hours_added}} Stunden + +
    +
  • Kunde: {{customer.name}}
  • +
  • Projekt: {{project.name}}
  • +
  • Projekt Total mit Aufladung: {{hours_total}} Stunden
  • +
+ +
+ __________________________________
+ Im Customer Center anzeigen
+ Kunde anzeigen
+
+ + Customer Center
+ Credits / Reports +
diff --git a/backend/timed/subscription/templates/notify_accountants_order.txt b/backend/timed/subscription/templates/notify_accountants_order.txt new file mode 100644 index 000000000..4f45eb0c8 --- /dev/null +++ b/backend/timed/subscription/templates/notify_accountants_order.txt @@ -0,0 +1,14 @@ +***EN*** + +Customer {{customer.name}} has ordered {{hours_added}} hours for {{project.name}}. +The new project total (if the order is accepted) would be {{hours_total}} hours. + +https://my.adfinis-sygroup.ch/timed-admin/confirm-subscriptions + + +***DE*** + +Kunde {{customer.name}} hat für {{project.name}} {{hours_added}} Stunden bestellt. +Das neue Projekt Total (falls die Bestellung akzeptiert wird) wäre {{hours_total}} Stunden. + +https://my.adfinis-sygroup.ch/timed-admin/confirm-subscriptions \ No newline at end of file diff --git a/backend/timed/subscription/tests/__init__.py b/backend/timed/subscription/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/timed/subscription/tests/test_order.py b/backend/timed/subscription/tests/test_order.py new file mode 100644 index 000000000..95b5c5f71 --- /dev/null +++ b/backend/timed/subscription/tests/test_order.py @@ -0,0 +1,307 @@ +from datetime import timedelta + +import pytest +from django.urls import reverse +from rest_framework import status + +from timed.notifications.models import Notification +from timed.projects.factories import CustomerAssigneeFactory, ProjectFactory +from timed.subscription import factories + + +@pytest.mark.parametrize( + "is_customer, is_accountant, is_superuser", + [ + (True, False, False), + (False, True, False), + (False, False, True), + (False, False, False), + (False, False, False), + ], +) +def test_order_list(auth_client, is_customer, is_accountant, is_superuser): + """Test which user can see orders.""" + order = factories.OrderFactory.create() + user = auth_client.user + + if is_customer: + CustomerAssigneeFactory.create( + customer=order.project.customer, user=user, is_customer=True + ) + elif is_accountant: + user.is_accountant = True + user.save() + elif is_superuser: + user.is_superuser = True + user.save() + + url = reverse("subscription-order-list") + + res = auth_client.get(url) + assert res.status_code == status.HTTP_200_OK + + json = res.json() + assert len(json["data"]) == 1 + assert json["data"][0]["relationships"]["project"]["data"]["type"] == ( + "subscription-projects" + ) + + +@pytest.mark.parametrize( + "is_customer, is_accountant, is_superuser, confirmed, expected", + [ + (True, False, False, True, status.HTTP_403_FORBIDDEN), + (True, False, False, False, status.HTTP_403_FORBIDDEN), + (False, True, False, True, status.HTTP_403_FORBIDDEN), + (False, True, False, False, status.HTTP_204_NO_CONTENT), + (False, False, True, True, status.HTTP_403_FORBIDDEN), + (False, False, True, False, status.HTTP_204_NO_CONTENT), + (False, False, False, True, status.HTTP_403_FORBIDDEN), + (False, False, False, False, status.HTTP_403_FORBIDDEN), + ], +) +def test_order_delete( + auth_client, is_customer, is_accountant, is_superuser, confirmed, expected +): + """Test which user can delete orders, confirmed or not.""" + order = factories.OrderFactory() + if confirmed: + order.acknowledged = True + order.save() + + user = auth_client.user + + if is_customer: + CustomerAssigneeFactory.create( + customer=order.project.customer, user=user, is_customer=True + ) + elif is_accountant: + user.is_accountant = True + user.save() + elif is_superuser: + user.is_superuser = True + user.save() + + url = reverse("subscription-order-detail", args=[order.id]) + + res = auth_client.delete(url) + assert res.status_code == expected + + +@pytest.mark.parametrize( + "is_superuser, is_accountant, is_customer, status_code", + [ + (True, False, False, status.HTTP_204_NO_CONTENT), + (False, True, False, status.HTTP_204_NO_CONTENT), + (False, False, True, status.HTTP_403_FORBIDDEN), + (False, False, False, status.HTTP_403_FORBIDDEN), + ], +) +def test_order_confirm( + auth_client, is_superuser, is_accountant, is_customer, status_code +): + """Test which user may confirm orders.""" + order = factories.OrderFactory.create() + user = auth_client.user + + if is_superuser: + user.is_superuser = True + user.save() + elif is_accountant: + user.is_accountant = True + user.save() + elif is_customer: + CustomerAssigneeFactory.create( + user=user, is_customer=True, customer=order.project.customer + ) + + url = reverse("subscription-order-confirm", args=[order.id]) + + res = auth_client.post(url) + assert res.status_code == status_code + + if status_code == status.HTTP_204_NO_CONTENT: + order.refresh_from_db() + assert order.acknowledged + assert order.confirmedby == auth_client.user + + +@pytest.mark.parametrize( + "is_customer, is_accountant, is_superuser, acknowledged, mail_sent, project_estimate, expected", + [ + ( + True, + False, + False, + True, + 0, + timedelta(minutes=1), + status.HTTP_400_BAD_REQUEST, + ), + (True, False, False, False, 1, timedelta(hours=1), status.HTTP_201_CREATED), + (False, True, False, True, 0, timedelta(hours=10), status.HTTP_201_CREATED), + (False, True, False, False, 0, timedelta(hours=24), status.HTTP_201_CREATED), + (False, False, True, True, 0, timedelta(hours=50), status.HTTP_201_CREATED), + (False, False, True, False, 0, timedelta(hours=100), status.HTTP_201_CREATED), + (False, False, False, True, 0, timedelta(hours=200), status.HTTP_403_FORBIDDEN), + (False, False, False, False, 0, None, status.HTTP_403_FORBIDDEN), + ], +) +def test_order_create( + auth_client, + mailoutbox, + is_customer, + is_accountant, + is_superuser, + acknowledged, + mail_sent, + project_estimate, + expected, +): + """Test which user may create orders. + + Additionally test if for creation of acknowledged/confirmed orders. + """ + user = auth_client.user + project = ProjectFactory.create(estimated_time=project_estimate) + + if is_customer: + CustomerAssigneeFactory.create( + user=user, is_customer=True, customer=project.customer + ) + elif is_accountant: + user.is_accountant = True + user.save() + elif is_superuser: + user.is_superuser = True + user.save() + + data = { + "data": { + "type": "subscription-orders", + "id": None, + "attributes": { + "acknowledged": acknowledged, + "duration": "00:30:00", + }, + "relationships": { + "project": { + "data": {"type": "subscription-projects", "id": project.id} + }, + }, + } + } + + url = reverse("subscription-order-list") + + response = auth_client.post(url, data) + assert response.status_code == expected + + assert len(mailoutbox) == mail_sent + if mail_sent: + mail = mailoutbox[0] + url = f"https://my.adfinis-sygroup.ch/timed-admin/{project.id}" + assert str(project.customer) in mail.body + assert str(project.name) in mail.body + assert "0:30:00" in mail.body + assert url in mail.alternatives[0][0] + assert Notification.objects.count() == 1 + + +@pytest.mark.parametrize( + "duration, expected, status_code", + [ + ("00:30:00", "0:30:00", status.HTTP_201_CREATED), + ("30:00:00", "1 day, 6:00:00", status.HTTP_201_CREATED), + ("30:30:00", "1 day, 6:30:00", status.HTTP_201_CREATED), + ("-00:30:00", "-0:30:00", status.HTTP_400_BAD_REQUEST), + ("-30:00:00", "-1 day, 6:00:00", status.HTTP_400_BAD_REQUEST), + ("-30:30:00", "-1 day, 6:30:00", status.HTTP_400_BAD_REQUEST), + ], +) +def test_order_create_duration( + auth_client, mailoutbox, duration, expected, status_code +): + user = auth_client.user + project = ProjectFactory.create(estimated_time=timedelta(hours=1)) + CustomerAssigneeFactory.create( + user=user, is_customer=True, customer=project.customer + ) + + data = { + "data": { + "type": "subscription-orders", + "id": None, + "attributes": { + "acknowledged": False, + "duration": duration, + }, + "relationships": { + "project": { + "data": {"type": "subscription-projects", "id": project.id} + }, + }, + } + } + + url = reverse("subscription-order-list") + + response = auth_client.post(url, data) + assert response.status_code == status_code + + if status_code == status.HTTP_201_CREATED: + assert len(mailoutbox) == 1 + + mail = mailoutbox[0] + assert expected in mail.body + + +@pytest.mark.parametrize( + "is_customer, is_accountant, is_superuser, acknowledged, expected", + [ + (True, False, False, True, status.HTTP_403_FORBIDDEN), + (True, False, False, False, status.HTTP_403_FORBIDDEN), + (False, True, False, True, status.HTTP_200_OK), + (False, True, False, False, status.HTTP_200_OK), + (False, False, True, True, status.HTTP_200_OK), + (False, False, True, False, status.HTTP_200_OK), + (False, False, False, True, status.HTTP_403_FORBIDDEN), + (False, False, False, False, status.HTTP_403_FORBIDDEN), + ], +) +def test_order_update( + auth_client, is_customer, is_accountant, is_superuser, acknowledged, expected +): + user = auth_client.user + order = factories.OrderFactory.create() + + if acknowledged: + order.acknowledged = True + order.save() + + if is_customer: + CustomerAssigneeFactory.create( + user=user, is_customer=True, customer=order.project.customer + ) + elif is_accountant: + user.is_accountant = True + user.save() + elif is_superuser: + user.is_superuser = True + user.save() + + data = { + "data": { + "type": "subscription-orders", + "id": order.id, + "attributes": { + "duration": "50:00:00", + "acknowledged": True, + }, + } + } + + url = reverse("subscription-order-detail", args=[order.id]) + + response = auth_client.patch(url, data) + assert response.status_code == expected diff --git a/backend/timed/subscription/tests/test_package.py b/backend/timed/subscription/tests/test_package.py new file mode 100644 index 000000000..60f5ccba7 --- /dev/null +++ b/backend/timed/subscription/tests/test_package.py @@ -0,0 +1,33 @@ +from django.urls import reverse +from rest_framework.status import HTTP_200_OK + +from timed.projects.factories import BillingTypeFactory, CustomerFactory, ProjectFactory +from timed.subscription.factories import PackageFactory + + +def test_subscription_package_list(auth_client): + PackageFactory.create() + + url = reverse("subscription-package-list") + + res = auth_client.get(url) + assert res.status_code == HTTP_200_OK + + json = res.json() + assert len(json["data"]) == 1 + + +def test_subscription_package_filter_customer(auth_client): + customer = CustomerFactory.create() + billing_type = BillingTypeFactory.create() + package = PackageFactory.create(billing_type=billing_type) + ProjectFactory.create_batch(2, billing_type=billing_type, customer=customer) + + url = reverse("subscription-package-list") + + res = auth_client.get(url, data={"customer": customer.id}) + assert res.status_code == HTTP_200_OK + + json = res.json() + assert len(json["data"]) == 1 + assert json["data"][0]["id"] == str(package.id) diff --git a/backend/timed/subscription/tests/test_subscription_project.py b/backend/timed/subscription/tests/test_subscription_project.py new file mode 100644 index 000000000..39336eb64 --- /dev/null +++ b/backend/timed/subscription/tests/test_subscription_project.py @@ -0,0 +1,122 @@ +from datetime import timedelta + +import pytest +from django.urls import reverse +from rest_framework.status import HTTP_200_OK, HTTP_404_NOT_FOUND + +from timed.employment.factories import EmploymentFactory +from timed.projects.factories import ( + BillingTypeFactory, + CustomerAssigneeFactory, + CustomerFactory, + ProjectFactory, + TaskFactory, +) +from timed.subscription.factories import OrderFactory, PackageFactory +from timed.tracking.factories import ReportFactory + + +@pytest.mark.parametrize("is_external, expected", [(True, 0), (False, 1)]) +def test_subscription_project_list(auth_client, is_external, expected): + employment = EmploymentFactory.create(user=auth_client.user, is_external=False) + if is_external: + employment.is_external = True + employment.save() + customer = CustomerFactory.create() + billing_type = BillingTypeFactory() + project = ProjectFactory.create( + billing_type=billing_type, customer=customer, customer_visible=True + ) + PackageFactory.create_batch(2, billing_type=billing_type) + # create spent hours + task = TaskFactory.create(project=project) + TaskFactory.create(project=project) + ReportFactory.create(task=task, duration=timedelta(hours=2)) + ReportFactory.create(task=task, duration=timedelta(hours=3)) + # not billable reports should not be included in spent hours + ReportFactory.create(not_billable=True, task=task, duration=timedelta(hours=4)) + # project of same customer but without customer_visible set + # should not appear + ProjectFactory.create(customer=customer) + + # create purchased time + OrderFactory.create(project=project, acknowledged=True, duration=timedelta(hours=2)) + OrderFactory.create(project=project, acknowledged=True, duration=timedelta(hours=4)) + + # report on different project should not be included in spent time + ReportFactory.create(duration=timedelta(hours=2)) + # not acknowledged order should not be included in purchased time + OrderFactory.create(project=project, duration=timedelta(hours=2)) + + url = reverse("subscription-project-list") + + res = auth_client.get(url, data={"customer": customer.id, "ordering": "id"}) + assert res.status_code == HTTP_200_OK + + json = res.json() + assert len(json["data"]) == expected + if expected: + assert json["data"][0]["id"] == str(project.id) + + attrs = json["data"][0]["attributes"] + assert attrs["spent-time"] == "05:00:00" + assert attrs["purchased-time"] == "06:00:00" + + +@pytest.mark.parametrize( + "is_customer, project_of_customer, has_employment, is_external, expected", + [ + (True, True, False, False, HTTP_200_OK), + (True, False, False, False, HTTP_404_NOT_FOUND), + (False, False, True, False, HTTP_200_OK), + (False, False, True, True, HTTP_404_NOT_FOUND), + ], +) +def test_subscription_project_detail( + auth_client, is_customer, project_of_customer, has_employment, is_external, expected +): + user = auth_client.user + billing_type = BillingTypeFactory() + project = ProjectFactory.create(billing_type=billing_type, customer_visible=True) + PackageFactory.create_batch(2, billing_type=billing_type) + + if has_employment: + employment = EmploymentFactory.create(user=user, is_external=False) + if is_external: + employment.is_external = True + employment.save() + + if is_customer: + customer_assignee = CustomerAssigneeFactory(user=user, is_customer=True) + if project_of_customer: + customer_assignee.customer = project.customer + customer_assignee.save() + + url = reverse("subscription-project-detail", args=[project.id]) + res = auth_client.get(url) + assert res.status_code == expected + + if expected == HTTP_200_OK: + json = res.json() + assert json["data"]["id"] == str(project.id) + + +def test_subscription_project_list_user_is_customer(auth_client): + customer = CustomerFactory.create() + project = ProjectFactory.create(customer=customer, customer_visible=True) + ProjectFactory.create_batch(4, customer_visible=True) + + user = auth_client.user + CustomerAssigneeFactory.create(user=user, customer=customer, is_customer=True) + + url = reverse("subscription-project-list") + + response = auth_client.get(url) + assert response.status_code == HTTP_200_OK + + json = response.json() + assert len(json["data"]) == 1 + assert json["data"][0]["id"] == str(project.id) + assert json["data"][0]["relationships"]["customer"]["data"]["id"] == str( + customer.id + ) diff --git a/backend/timed/subscription/urls.py b/backend/timed/subscription/urls.py new file mode 100644 index 000000000..c721b0713 --- /dev/null +++ b/backend/timed/subscription/urls.py @@ -0,0 +1,14 @@ +from django.conf import settings +from rest_framework.routers import DefaultRouter + +from . import views + +r = DefaultRouter(trailing_slash=settings.APPEND_SLASH) + +r.register( + r"subscription-projects", views.SubscriptionProjectViewSet, "subscription-project" +) +r.register(r"subscription-packages", views.PackageViewSet, "subscription-package") +r.register(r"subscription-orders", views.OrderViewSet, "subscription-order") + +urlpatterns = r.urls diff --git a/backend/timed/subscription/views.py b/backend/timed/subscription/views.py new file mode 100644 index 000000000..33e51f4cd --- /dev/null +++ b/backend/timed/subscription/views.py @@ -0,0 +1,121 @@ +from django.db.models import Q +from rest_framework import decorators, exceptions, response, status, viewsets +from rest_framework_json_api.serializers import ValidationError + +from timed.notifications import notify_admin +from timed.permissions import ( + IsAccountant, + IsAuthenticated, + IsCreateOnly, + IsCustomer, + IsReadOnly, + IsSuperUser, +) +from timed.projects.filters import ProjectFilterSet +from timed.projects.models import CustomerAssignee, Project + +from . import filters, models, serializers + + +class SubscriptionProjectViewSet(viewsets.ReadOnlyModelViewSet): + """ + Subscription specific project view. + + Subscription projects are not archived projects + which have a billing type with packages. + """ + + serializer_class = serializers.SubscriptionProjectSerializer + filterset_class = ProjectFilterSet + ordering_fields = ("name", "id") + + def get_queryset(self): + user = self.request.user + queryset = Project.objects.filter(archived=False, customer_visible=True) + current_employment = user.get_active_employment() + + if current_employment is None or current_employment.is_external: + if CustomerAssignee.objects.filter(user=user, is_customer=True).exists(): + return queryset.filter( + Q( + customer__customer_assignees__user=user, + customer__customer_assignees__is_customer=True, + ) + ) + elif not current_employment.is_external: + return queryset + return queryset.none() + + +class PackageViewSet(viewsets.ReadOnlyModelViewSet): + serializer_class = serializers.PackageSerializer + filterset_class = filters.PackageFilter + + def get_queryset(self): + return models.Package.objects.select_related("billing_type") + + +class OrderViewSet(viewsets.ModelViewSet): + serializer_class = serializers.OrderSerializer + filterset_class = filters.OrderFilter + permission_classes = [ + # superuser and accountants may edit all orders + (IsSuperUser | IsAccountant) + # customers may only create orders + | IsCustomer & IsCreateOnly + # all authenticated users may read all orders + | IsAuthenticated & IsReadOnly + ] + + def create(self, request, *args, **kwargs): + """Override so we can issue emails on creation.""" + # check if order is acknowledged and created by admin/accountant + if ( + request.method == "POST" + and request.data.get("acknowledged") + and not (request.user.is_accountant or request.user.is_superuser) + ): + raise ValidationError("User can not create confirmed orders!") + + project = Project.objects.get(id=request.data.get("project")["id"]) + order_duration = request.data.get("duration") + + # only send notification emails if order was created by a customer + # don't allow customers to create orders with negative duration + if not (request.user.is_accountant or request.user.is_superuser): + if "-" in request.data.get("duration"): + raise ValidationError( + "Customer can not create orders with negative duration!" + ) + notify_admin.prepare_and_send_email(project, order_duration) + return super().create(request, *args, **kwargs) + + @decorators.action( + detail=True, + methods=["post"], + permission_classes=[IsSuperUser | IsAccountant], + ) + def confirm(self, request, pk=None): + """ + Confirm order. + + Only allowed by staff members + """ + order = self.get_object() + order.acknowledged = True + order.confirmedby = request.user + order.save() + + return response.Response(status=status.HTTP_204_NO_CONTENT) + + def get_queryset(self): + return models.Order.objects.select_related("project") + + def destroy(self, request, pk): + instance = self.get_object() + if instance.acknowledged: + # acknowledge orders may not be deleted + raise exceptions.PermissionDenied() + + instance.delete() + return response.Response(status=status.HTTP_204_NO_CONTENT) diff --git a/backend/timed/templates/login.html b/backend/timed/templates/login.html new file mode 100644 index 000000000..7f3ec2605 --- /dev/null +++ b/backend/timed/templates/login.html @@ -0,0 +1,68 @@ +{% extends "admin/login.html" %} + +{% load i18n %} + +{% block content %} + +{% if form.errors and not form.non_field_errors %} +

+{% if form.errors.items|length == 1 %}{% trans "Please correct the error below." %}{% else %}{% trans "Please correct the errors below." %}{% endif %} +

+{% endif %} + +{% if form.non_field_errors %} +{% for error in form.non_field_errors %} +

+ {{ error }} +

+{% endfor %} +{% endif %} + +
+ +{% if user.is_authenticated %} +

+{% blocktrans trimmed %} + You are authenticated as {{ username }}, but are not authorized to + access this page. Would you like to login to a different account? +{% endblocktrans %} +

+{% endif %} + +{% if show_local_login %} +
{% csrf_token %} +
+ {{ form.username.errors }} + {{ form.username.label_tag }} {{ form.username }} +
+
+ {{ form.password.errors }} + {{ form.password.label_tag }} {{ form.password }} + +
+ {% url 'admin_password_reset' as password_reset_url %} + {% if password_reset_url %} + + {% endif %} +
+ +
+
+
+{% endif %} + +{% if user.is_authenticated %} +

{% trans 'Current user:' %} {{ user.email }}

+
+ +
+{% else %} +
+ +
+{% endif %} + +
+{% endblock %} diff --git a/backend/timed/tests/__init__.py b/backend/timed/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/timed/tests/test_authentication.py b/backend/timed/tests/test_authentication.py new file mode 100644 index 000000000..d18c8ca93 --- /dev/null +++ b/backend/timed/tests/test_authentication.py @@ -0,0 +1,166 @@ +import hashlib +import json + +import pytest +from django.contrib.auth import get_user_model +from django.core.cache import cache +from mozilla_django_oidc.contrib.drf import OIDCAuthentication +from requests.exceptions import HTTPError +from rest_framework import exceptions, status +from rest_framework.exceptions import AuthenticationFailed +from rest_framework.reverse import reverse + +from timed.employment.factories import UserFactory + + +@pytest.mark.parametrize("is_id_token", [True, False]) +@pytest.mark.parametrize( + "authentication_header,authenticated,error", + [ + ("", False, False), + ("Bearer", False, True), + ("Bearer Too many params", False, True), + ("Basic Auth", False, True), + ("Bearer Token", True, False), + ], +) +@pytest.mark.parametrize("user__username", ["1"]) +def test_authentication( + db, + user, + rf, + authentication_header, + authenticated, + error, + is_id_token, + requests_mock, + settings, +): + userinfo = {"sub": "1"} + requests_mock.get(settings.OIDC_OP_USER_ENDPOINT, text=json.dumps(userinfo)) + + if not is_id_token: + userinfo = {"client_id": "test_client", "sub": "1"} + requests_mock.get( + settings.OIDC_OP_USER_ENDPOINT, status_code=status.HTTP_401_UNAUTHORIZED + ) + requests_mock.post( + settings.OIDC_OP_INTROSPECT_ENDPOINT, text=json.dumps(userinfo) + ) + + request = rf.get("/openid", HTTP_AUTHORIZATION=authentication_header) + try: + result = OIDCAuthentication().authenticate(request) + except exceptions.AuthenticationFailed: + assert error + else: + if result: + key = "userinfo" if is_id_token else "introspection" + user, auth = result + assert user.is_authenticated + assert ( + cache.get(f"auth.{key}.{hashlib.sha256(b'Token').hexdigest()}") + == userinfo + ) + + +@pytest.mark.parametrize( + "create_user,username,expected_count", + [(False, "", 0), (True, "", 1), (True, "foo@example.com", 1)], +) +def test_authentication_new_user( + db, rf, requests_mock, settings, create_user, username, expected_count +): + settings.OIDC_CREATE_USER = create_user + user_model = get_user_model() + assert user_model.objects.filter(username=username).count() == 0 + + userinfo = {"sub": username} + requests_mock.get(settings.OIDC_OP_USER_ENDPOINT, text=json.dumps(userinfo)) + + request = rf.get("/openid", HTTP_AUTHORIZATION="Bearer Token") + + try: + user, _ = OIDCAuthentication().authenticate(request) + except AuthenticationFailed: + assert not create_user + else: + assert user.username == username + + assert user_model.objects.count() == expected_count + + +def test_authentication_update_user_data(db, rf, requests_mock, settings): + user_model = get_user_model() + user = UserFactory.create() + + userinfo = { + "sub": user.username, + "email": "test@localhost", + "given_name": "Max", + "family_name": "Mustermann", + } + + requests_mock.get(settings.OIDC_OP_USER_ENDPOINT, text=json.dumps(userinfo)) + + request = rf.get("/openid", HTTP_AUTHORIZATION="Bearer Token") + + user, _ = OIDCAuthentication().authenticate(request) + + assert user_model.objects.count() == 1 + assert user.first_name == "Max" + assert user.last_name == "Mustermann" + assert user.email == "test@localhost" + + +def test_authentication_idp_502(db, rf, requests_mock, settings): + requests_mock.get( + settings.OIDC_OP_USER_ENDPOINT, status_code=status.HTTP_502_BAD_GATEWAY + ) + + request = rf.get("/openid", HTTP_AUTHORIZATION="Bearer Token") + with pytest.raises(HTTPError): + OIDCAuthentication().authenticate(request) + + +def test_authentication_idp_missing_claim(db, rf, requests_mock, settings): + settings.OIDC_USERNAME_CLAIM = "missing" + userinfo = {"preferred_username": "1"} + requests_mock.get(settings.OIDC_OP_USER_ENDPOINT, text=json.dumps(userinfo)) + + request = rf.get("/openid", HTTP_AUTHORIZATION="Bearer Token") + with pytest.raises(AuthenticationFailed): + OIDCAuthentication().authenticate(request) + + +def test_authentication_no_client(db, rf, requests_mock, settings): + requests_mock.get( + settings.OIDC_OP_USER_ENDPOINT, status_code=status.HTTP_401_UNAUTHORIZED + ) + requests_mock.post( + settings.OIDC_OP_INTROSPECT_ENDPOINT, + text=json.dumps({"preferred_username": "1"}), + ) + + request = rf.get("/openid", HTTP_AUTHORIZATION="Bearer Token") + with pytest.raises(AuthenticationFailed): + OIDCAuthentication().authenticate(request) + + +@pytest.mark.parametrize("check_introspect", [True, False]) +def test_userinfo_introspection_failure( + db, client, rf, requests_mock, settings, check_introspect +): + settings.OIDC_CHECK_INTROSPECT = check_introspect + requests_mock.get( + settings.OIDC_OP_USER_ENDPOINT, status_code=status.HTTP_401_UNAUTHORIZED + ) + requests_mock.post( + settings.OIDC_OP_INTROSPECT_ENDPOINT, status_code=status.HTTP_403_FORBIDDEN + ) + resp = client.get(reverse("user-me"), HTTP_AUTHORIZATION="Bearer Token") + assert resp.status_code == status.HTTP_401_UNAUTHORIZED + request = rf.get("/openid", HTTP_AUTHORIZATION="Bearer Token") + with pytest.raises(AuthenticationFailed): + OIDCAuthentication().authenticate(request) + cache.clear() diff --git a/backend/timed/tests/test_settings.py b/backend/timed/tests/test_settings.py new file mode 100644 index 000000000..33149b283 --- /dev/null +++ b/backend/timed/tests/test_settings.py @@ -0,0 +1,15 @@ +import environ +import pytest + +from timed import settings + + +def test_admins(): + assert settings.parse_admins(["Test Example "]) == [ + ("Test Example", "test@example.com") + ] + + +def test_invalid_admins(monkeypatch): + with pytest.raises(environ.ImproperlyConfigured): + settings.parse_admins(["Test Example = employment.worktime_per_day: + # prevent negative duration in case user already + # reported more time than worktime per day + return timedelta() + + return employment.worktime_per_day - reported_time + + class Meta: + """Meta informations for the absence model.""" + + unique_together = ("date", "user") diff --git a/backend/timed/tracking/serializers.py b/backend/timed/tracking/serializers.py new file mode 100644 index 000000000..65ff17e32 --- /dev/null +++ b/backend/timed/tracking/serializers.py @@ -0,0 +1,454 @@ +"""Serializers for the tracking app.""" +from datetime import date, timedelta + +from django.contrib.auth import get_user_model +from django.db.models import BooleanField, Case, Q, When +from django.utils.duration import duration_string +from django.utils.translation import gettext_lazy as _ +from rest_framework_json_api import relations, serializers +from rest_framework_json_api.relations import ResourceRelatedField +from rest_framework_json_api.serializers import ( + ModelSerializer, + Serializer, + SerializerMethodField, + ValidationError, +) + +from timed.employment.models import AbsenceType, Employment, PublicHoliday, User +from timed.employment.relations import CurrentUserResourceRelatedField +from timed.projects.models import Customer, Project, Task +from timed.serializers import TotalTimeRootMetaMixin +from timed.tracking import models + + +class ActivitySerializer(ModelSerializer): + """Activity serializer.""" + + user = CurrentUserResourceRelatedField() + + included_serializers = { + "task": "timed.projects.serializers.TaskSerializer", + "user": "timed.employment.serializers.UserSerializer", + } + + def validate(self, data): + """Validate the activity block. + + Ensure that a user can only have one activity + which doesn't end before it started. + """ + instance = self.instance + from_time = data.get("from_time", instance and instance.from_time) + to_time = data.get("to_time", instance and instance.to_time) + user = instance and instance.user or data["user"] + + def validate_running_activity(): + if activity.filter(to_time__isnull=True).exists(): + raise ValidationError(_("A user can only have one active activity")) + + # validate that there is only one active activity + activity = models.Activity.objects.filter(user=user) + # if the request creates a new activity + if instance is None and to_time is None: + validate_running_activity() + # if the request mutates an existsting activity + if instance and instance.to_time and to_time is None: + validate_running_activity() + + # validate that to is not before from + if to_time is not None and to_time < from_time: + raise ValidationError(_("An activity block may not end before it starts.")) + + return data + + class Meta: + """Meta information for the activity serializer.""" + + model = models.Activity + fields = "__all__" + + +class AttendanceSerializer(ModelSerializer): + """Attendance serializer.""" + + user = CurrentUserResourceRelatedField() + + included_serializers = {"user": "timed.employment.serializers.UserSerializer"} + + class Meta: + """Meta information for the attendance serializer.""" + + model = models.Attendance + fields = ["date", "from_time", "to_time", "user"] + + +class ReportSerializer(TotalTimeRootMetaMixin, ModelSerializer): + """Report serializer.""" + + task = ResourceRelatedField(queryset=Task.objects.all()) + activity = ResourceRelatedField( + queryset=models.Activity.objects.all(), allow_null=True, required=False + ) + user = CurrentUserResourceRelatedField() + verified_by = ResourceRelatedField( + queryset=get_user_model().objects, required=False, allow_null=True + ) + + included_serializers = { + "task": "timed.projects.serializers.TaskSerializer", + "user": "timed.employment.serializers.UserSerializer", + "verified_by": "timed.employment.serializers.UserSerializer", + } + + def _validate_owner_only(self, value, field): + if self.instance is not None: + user = self.context["request"].user + owner = self.instance.user + if getattr(self.instance, field) != value and user != owner: + raise ValidationError(_(f"Only owner may change {field}")) + + return value + + def validate_date(self, value): + """Only owner is allowed to change date.""" + return self._validate_owner_only(value, "date") + + def validate_duration(self, value): + """Only owner is allowed to change duration.""" + return self._validate_owner_only(value, "duration") + + def validate_billed(self, value): + """Only accountants may bill reports.""" + if self.instance is not None: + if not self.context["request"].user.is_accountant and ( + self.instance.billed != value + ): + raise ValidationError(_("Only accountants may bill reports.")) + + return value + + def validate_rejected(self, value): + """Only reviewers are allowed to change rejected field.""" + if self.instance is not None: + user = self.context["request"].user + if ( + not user.is_reviewer or self.instance.user == user + ) and self.instance.rejected != value: + raise ValidationError(_("Only reviewers may reject reports.")) + + return value + + def validate(self, data): + """ + Validate that verified by is only set by reviewer or superuser. + + Additionally make sure a report is cannot be verified_by if is still + needs review. + + External employees with manager or reviewer role may not create reports. + + Check if remaing effort tracking is active on the corresponding project. + """ + + user = self.context["request"].user + current_verified_by = self.instance and self.instance.verified_by + new_verified_by = data.get("verified_by") + task = data.get("task") or self.instance.task + review = data.get("review") + billed = data.get("billed") + is_reviewer = ( + user.is_superuser + or Task.objects.filter( + Q( + task_assignees__user=user, + task_assignees__is_reviewer=True, + task_assignees__task=task, + ) + | Q( + project__project_assignees__user=user, + project__project_assignees__is_reviewer=True, + project__project_assignees__project=task.project, + ) + | Q( + project__customer__customer_assignees__user=user, + project__customer__customer_assignees__is_reviewer=True, + project__customer__customer_assignees__customer=task.project.customer, + ) + ).exists() + ) + + # check if remaining effort tracking is active on the corresponding project + if not task.project.remaining_effort_tracking and data.get("remaining_effort"): + raise ValidationError( + "Remaining effort tracking is not active on this project!" + ) + + if new_verified_by != current_verified_by: + if not is_reviewer: + raise ValidationError(_("Only reviewer may verify reports.")) + + if new_verified_by is not None and new_verified_by != user: + raise ValidationError(_("You may only verifiy with your own user")) + + if new_verified_by and review: # pragma: no cover + raise ValidationError( + _("Report can't both be set as `review` and `verified`.") + ) + + # update billed flag on created reports + if not self.instance or billed is None: + data["billed"] = task.project.billed + + # update billed flag on reports that are being moved to a different project + # according to the billed flag of the project the report was moved to + if self.instance and data.get("task"): + if self.instance.task.id != data.get("task").id: + data["billed"] = data.get("task").project.billed + + current_employment = Employment.objects.get_at(user=user, date=date.today()) + + if ( + self.context["request"].method == "POST" + and current_employment.is_external + and Task.objects.filter( + Q( + task_assignees__user=user, + task_assignees__is_reviewer=True, + ) + | Q( + project__project_assignees__user=user, + project__project_assignees__is_reviewer=True, + ) + | Q( + project__customer__customer_assignees__user=user, + project__customer__customer_assignees__is_reviewer=True, + ) + | Q( + task_assignees__user=user, + task_assignees__is_manager=True, + ) + | Q( + project__project_assignees__user=user, + project__project_assignees__is_manager=True, + ) + | Q( + project__customer__customer_assignees__user=user, + project__customer__customer_assignees__is_manager=True, + ) + ).exists() + ): + raise ValidationError( + "User is not a resource on the corresponding task, project or customer" + ) + return data + + class Meta: + model = models.Report + fields = [ + "comment", + "date", + "duration", + "review", + "not_billable", + "billed", + "task", + "activity", + "user", + "verified_by", + "rejected", + "remaining_effort", + ] + + +class ReportBulkSerializer(Serializer): + """Serializer used for bulk updates of reports.""" + + task = ResourceRelatedField( + queryset=Task.objects.all(), allow_null=True, required=False + ) + comment = serializers.CharField(allow_null=True, required=False) + review = serializers.BooleanField(required=False, allow_null=True) + not_billable = serializers.BooleanField(required=False, allow_null=True) + billed = serializers.BooleanField(required=False, allow_null=True) + verified = serializers.BooleanField(required=False, allow_null=True) + rejected = serializers.BooleanField(required=False) + + class Meta: + resource_name = "report-bulks" + + +class ReportIntersectionSerializer(Serializer): + """ + Serializer of report intersections. + + Serializes a representation of all fields which are the same + in given Report objects. If values of one field are not the same + in all objects it will be represented as None. + + Serializer expect instance to have a queryset value. + """ + + customer = relations.SerializerMethodResourceRelatedField( + method_name="get_customer", model=Customer, read_only=True + ) + project = relations.SerializerMethodResourceRelatedField( + method_name="get_project", model=Project, read_only=True + ) + task = relations.SerializerMethodResourceRelatedField( + method_name="get_task", model=Task, read_only=True + ) + user = relations.SerializerMethodResourceRelatedField( + method_name="get_user", model=User, read_only=True + ) + comment = SerializerMethodField() + review = SerializerMethodField() + not_billable = SerializerMethodField() + billed = SerializerMethodField() + verified = SerializerMethodField() + rejected = SerializerMethodField() + + def _intersection(self, instance, field, model=None): + """Get intersection of given field. + + :return: Returns value of field if objects have same value; + otherwise None + """ + value = None + queryset = instance["queryset"] + values = queryset.values(field).distinct() + if values.count() == 1: + value = values.first()[field] + if model: + value = model.objects.get(pk=value) + + return value + + def get_customer(self, instance): + return self._intersection(instance, "task__project__customer", Customer) + + def get_project(self, instance): + return self._intersection(instance, "task__project", Project) + + def get_task(self, instance): + return self._intersection(instance, "task", Task) + + def get_user(self, instance): + return self._intersection(instance, "user", User) + + def get_comment(self, instance): + return self._intersection(instance, "comment") + + def get_review(self, instance): + return self._intersection(instance, "review") + + def get_not_billable(self, instance): + return self._intersection(instance, "not_billable") + + def get_billed(self, instance): + return self._intersection(instance, "billed") + + def get_verified(self, instance): + queryset = instance["queryset"] + queryset = queryset.annotate( + verified=Case( + When(verified_by_id__isnull=True, then=False), + default=True, + output_field=BooleanField(), + ) + ) + instance["queryset"] = queryset + return self._intersection(instance, "verified") + + def get_rejected(self, instance): + return self._intersection(instance, "rejected") + + def get_root_meta(self, resource, many): + """Add number of results to meta.""" + queryset = self.instance["queryset"] + return {"count": queryset.count()} + + included_serializers = { + "customer": "timed.projects.serializers.CustomerSerializer", + "project": "timed.projects.serializers.ProjectSerializer", + "task": "timed.projects.serializers.TaskSerializer", + "user": "timed.employment.serializers.UserSerializer", + } + + class Meta: + resource_name = "report-intersections" + + +class AbsenceSerializer(ModelSerializer): + """Absence serializer.""" + + duration = SerializerMethodField(source="get_duration") + absence_type = ResourceRelatedField(queryset=AbsenceType.objects.all()) + user = CurrentUserResourceRelatedField() + + included_serializers = { + "user": "timed.employment.serializers.UserSerializer", + "absence_type": "timed.employment.serializers.AbsenceTypeSerializer", + } + + def get_duration(self, instance): + try: + employment = Employment.objects.get_at(instance.user, instance.date) + except Employment.DoesNotExist: + # absence is invalid if no employment exists on absence date + return duration_string(timedelta()) + + return duration_string(instance.calculate_duration(employment)) + + def validate_date(self, value): + """Only owner is allowed to change date.""" + if self.instance is not None: + user = self.context["request"].user + owner = self.instance.user + if self.instance.date != value and user != owner: + raise ValidationError(_("Only owner may change date")) + + return value + + def validate_absence_type(self, value): + """Only owner is allowed to change type.""" + if self.instance is not None: + user = self.context["request"].user + owner = self.instance.user + if self.instance.date != value and user != owner: + raise ValidationError(_("Only owner may change absence type")) + + return value + + def validate(self, data): + """Validate the absence data. + + An absence should not be created on a public holiday or a weekend. + + :returns: The validated data + :rtype: dict + """ + instance = self.instance + user = data.get("user", instance and instance.user) + try: + location = Employment.objects.get_at(user, data.get("date")).location + except Employment.DoesNotExist: # pragma: no cover + raise ValidationError( + _("You can't create an absence on an unemployed day.") + ) + + if PublicHoliday.objects.filter( + location_id=location.id, date=data.get("date") + ).exists(): + raise ValidationError(_("You can't create an absence on a public holiday")) + + workdays = [int(day) for day in location.workdays] + if data.get("date").isoweekday() not in workdays: + raise ValidationError(_("You can't create an absence on a weekend")) + + return data + + class Meta: + """Meta information for the absence serializer.""" + + model = models.Absence + fields = ["comment", "date", "duration", "absence_type", "user"] diff --git a/backend/timed/tracking/signals.py b/backend/timed/tracking/signals.py new file mode 100644 index 000000000..c08cede52 --- /dev/null +++ b/backend/timed/tracking/signals.py @@ -0,0 +1,55 @@ +from django.db.models import Sum +from django.db.models.signals import pre_save +from django.dispatch import receiver + +from timed.tracking.models import Report + + +@receiver(pre_save, sender=Report) +def update_rejected_on_reports(sender, instance, **kwargs): + """Unreject report when the task changes.""" + # Check if the report is being created or updated + if instance.pk and instance.rejected: + report = Report.objects.get(id=instance.id) + if report.task_id != instance.task_id: + instance.rejected = False + + +@receiver(pre_save, sender=Report) +def update_most_recent_remaining_effort(sender, instance, **kwargs): + """Update remaining effort on task, if remaining effort tracking is active. + + Update most_recent_remaining_effort on task and total_remaining_effort on project + only if remaining effort on report has changed. + Any other change on report should not trigger this signal. + """ + if kwargs.get("raw", False): # pragma: no cover + return + + if instance.task.project.remaining_effort_tracking is not True: + return + + # update most_recent_remaining_effort and total_remaining_effort on report creation + if not instance.pk: + update_remaining_effort(instance) + return + + # check if remaining effort has changed on report update + if instance.remaining_effort != Report.objects.get(id=instance.id).remaining_effort: + update_remaining_effort(instance) + + +def update_remaining_effort(report): + task = report.task + project = task.project + + task.most_recent_remaining_effort = report.remaining_effort + task.save() + + total_remaining_effort = ( + task.project.tasks.all() + .aggregate(sum_remaining=Sum("most_recent_remaining_effort")) + .get("sum_remaining") + ) + project.total_remaining_effort = total_remaining_effort + project.save() diff --git a/backend/timed/tracking/tasks.py b/backend/timed/tracking/tasks.py new file mode 100644 index 000000000..7422defb7 --- /dev/null +++ b/backend/timed/tracking/tasks.py @@ -0,0 +1,110 @@ +from django.conf import settings +from django.core.mail import EmailMessage, get_connection +from django.template.loader import get_template + + +def _send_notification_emails(changes, reviewer, rejected=False): + """Send email for each user.""" + + if rejected: + subject = "[Timed] Your reports have been rejected" + template = get_template("mail/notify_user_rejected_reports.tmpl", using="text") + else: + template = get_template("mail/notify_user_changed_reports.tmpl", using="text") + subject = "[Timed] Your reports have been changed" + from_email = settings.DEFAULT_FROM_EMAIL + connection = get_connection() + + messages = [] + + for user_changes in changes: + user = user_changes["user"] + + body = template.render( + { + # we need start and end date in system format + "reviewer": reviewer, + "user_changes": user_changes["changes"], + } + ) + + message = EmailMessage( + subject=subject, + body=body, + from_email=from_email, + to=[user.email], + connection=connection, + headers=settings.EMAIL_EXTRA_HEADERS, + ) + + messages.append(message) + if len(messages) > 0: + connection.send_messages(messages) + + +def _get_report_changeset(report, fields): + changeset = { + "report": report, + "changes": { + key: {"old": getattr(report, key), "new": fields[key]} + for key in fields.keys() + # skip if field is not changed or just a reviewer field + if getattr(report, key) != fields[key] + and key in settings.TRACKING_REPORT_VERIFIED_CHANGES + }, + } + if not changeset["changes"]: + return False + return changeset + + +def notify_user_changed_report(report, fields, reviewer): + changeset = _get_report_changeset(report, fields) + + if not changeset: + return + + user_changes = {"user": report.user, "changes": [changeset]} + _send_notification_emails([user_changes], reviewer) + + +def notify_user_changed_reports(queryset, fields, reviewer): + users = [report.user for report in queryset.order_by("user").distinct("user")] + user_changes = [] + + for user in users: + changes = [] + for report in queryset.filter(user=user).order_by("date"): + changeset = _get_report_changeset(report, fields) + + # skip edits of own reports and empty changes + if report.user == reviewer or not changeset: + continue + changes.append(changeset) + + # skip user if changes are empty + if not changes: + continue + + user_changes.append({"user": user, "changes": changes}) + + _send_notification_emails(user_changes, reviewer) + + +def notify_user_rejected_report(report, reviewer): + user_changes = {"user": report.user, "changes": [{"report": report}]} + _send_notification_emails([user_changes], reviewer, True) + + +def notify_user_rejected_reports(queryset, fields, reviewer): + users = [report.user for report in queryset.order_by("user").distinct("user")] + user_changes = [] + + for user in users: + changes = [] + for report in queryset.filter(user=user).order_by("date"): + changeset = {"report": report} + changes.append(changeset) + user_changes.append({"user": user, "changes": changes}) + + _send_notification_emails(user_changes, reviewer, True) diff --git a/backend/timed/tracking/templates/mail/notify_user_changed_reports.tmpl b/backend/timed/tracking/templates/mail/notify_user_changed_reports.tmpl new file mode 100644 index 000000000..88a0ab34f --- /dev/null +++ b/backend/timed/tracking/templates/mail/notify_user_changed_reports.tmpl @@ -0,0 +1,16 @@ +{% load tracking_extras %} +Some of your reports have been changed. + +Reviewer: {{reviewer.first_name }} {{ reviewer.last_name }} +{% for changeset in user_changes %} + +Date: {{ changeset.report.date|date:"SHORT_DATE_FORMAT" }} +Duration: {{ changeset.report.duration|duration }} +{% if "task" not in changeset.changes %}Task: {{ changeset.report.task }}{% endif %} +{% if "comment" not in changeset.changes %}Comment: {{ changeset.report.comment }}{% endif %} +{% for key, change in changeset.changes.items %} +* {{ key|title}} + [old] {{ change.old }} + [new] {{ change.new }} +{% endfor %} +---{% endfor %} diff --git a/backend/timed/tracking/templates/mail/notify_user_rejected_reports.tmpl b/backend/timed/tracking/templates/mail/notify_user_rejected_reports.tmpl new file mode 100644 index 000000000..ace805ea4 --- /dev/null +++ b/backend/timed/tracking/templates/mail/notify_user_rejected_reports.tmpl @@ -0,0 +1,13 @@ +{% load tracking_extras %} +Some of your reports have been rejected by a reviewer. Please get in contact +with them to clarify the reports. Most likely, you will just need to move +the reports to the correct project / task. + +Reviewer: {{reviewer.first_name }} {{ reviewer.last_name }} +{% for changeset in user_changes %} + +Date: {{ changeset.report.date|date:"SHORT_DATE_FORMAT" }} +Duration: {{ changeset.report.duration|duration }} +Task: {{ changeset.report.task }} +Comment: {{ changeset.report.comment }} +---{% endfor %} diff --git a/backend/timed/tracking/templatetags/tracking_extras.py b/backend/timed/tracking/templatetags/tracking_extras.py new file mode 100644 index 000000000..d1d2f945f --- /dev/null +++ b/backend/timed/tracking/templatetags/tracking_extras.py @@ -0,0 +1,12 @@ +from django import template + +register = template.Library() + + +@register.filter +def duration(timedelta): + total_seconds = int(timedelta.total_seconds()) + hours = total_seconds // 3600 + minutes = (total_seconds % 3600) // 60 + + return f"{hours}:{minutes:02} (h:mm)" diff --git a/backend/timed/tracking/tests/__init__.py b/backend/timed/tracking/tests/__init__.py new file mode 100644 index 000000000..6e031999e --- /dev/null +++ b/backend/timed/tracking/tests/__init__.py @@ -0,0 +1 @@ +# noqa: D104 diff --git a/backend/timed/tracking/tests/snapshots/__init__.py b/backend/timed/tracking/tests/snapshots/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/timed/tracking/tests/snapshots/snap_test_report.py b/backend/timed/tracking/tests/snapshots/snap_test_report.py new file mode 100644 index 000000000..2ce98f5e7 --- /dev/null +++ b/backend/timed/tracking/tests/snapshots/snap_test_report.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +# snapshottest: v1 - https://goo.gl/zC4yUc +from __future__ import unicode_literals + +from snapshottest import Snapshot + + +snapshots = Snapshot() + +snapshots[ + "test_report_notify_rendering 1" +] = """ +Some of your reports have been changed. + +Reviewer: Test User + + +Date: 10/03/1998 +Duration: 3:15 (h:mm) + + + +* Task + [old] Allen Inc > Cross-platform content-based synergy > and Sons + [new] Allen Inc > Cross-platform content-based synergy > LLC + +* Comment + [old] foo + [new] some other comment + +--- + +Date: 05/27/2000 +Duration: 2:30 (h:mm) + +Comment: some other comment + +* Task + [old] Allen Inc > Cross-platform content-based synergy > Ltd + [new] Allen Inc > Cross-platform content-based synergy > LLC + +--- + +Date: 04/20/2005 +Duration: 0:15 (h:mm) +Task: Allen Inc > Cross-platform content-based synergy > LLC +Comment: some other comment + +* Not_Billable + [old] True + [new] False + +--- + +Date: 03/23/2016 +Duration: 1:00 (h:mm) +Task: Allen Inc > Cross-platform content-based synergy > LLC + + +* Comment + [old] original comment + [new] some other comment + +--- +""" diff --git a/backend/timed/tracking/tests/test_absence.py b/backend/timed/tracking/tests/test_absence.py new file mode 100644 index 000000000..e5dc07c15 --- /dev/null +++ b/backend/timed/tracking/tests/test_absence.py @@ -0,0 +1,412 @@ +import datetime + +import pytest +from django.urls import reverse +from rest_framework import status + +from timed.employment.factories import ( + AbsenceTypeFactory, + EmploymentFactory, + PublicHolidayFactory, + UserFactory, +) +from timed.tracking.factories import AbsenceFactory, ReportFactory + + +@pytest.mark.parametrize( + "is_external", + [True, False], +) +def test_absence_list_authenticated(auth_client, is_external): + absence = AbsenceFactory.create(user=auth_client.user) + + # overlapping absence with public holidays need to be hidden + overlap_absence = AbsenceFactory.create( + user=auth_client.user, date=datetime.date(2018, 1, 1) + ) + employment = EmploymentFactory.create( + user=overlap_absence.user, start_date=datetime.date(2017, 12, 31) + ) + if is_external: + employment.is_external = True + employment.save() + + PublicHolidayFactory.create(date=overlap_absence.date, location=employment.location) + url = reverse("absence-list") + + response = auth_client.get(url) + assert response.status_code == status.HTTP_200_OK + + json = response.json() + + if not is_external: + assert len(json["data"]) == 1 + assert json["data"][0]["id"] == str(absence.id) + + +def test_absence_list_superuser(superadmin_client): + AbsenceFactory.create_batch(2) + + url = reverse("absence-list") + response = superadmin_client.get(url) + assert response.status_code == status.HTTP_200_OK + + json = response.json() + assert len(json["data"]) == 2 + + +def test_absence_list_supervisor(internal_employee_client): + user = UserFactory.create() + internal_employee_client.user.supervisees.add(user) + + AbsenceFactory.create(user=internal_employee_client.user) + AbsenceFactory.create(user=user) + + url = reverse("absence-list") + response = internal_employee_client.get(url) + assert response.status_code == status.HTTP_200_OK + json = response.json() + assert len(json["data"]) == 2 + + +def test_absence_list_supervisee(internal_employee_client): + AbsenceFactory.create(user=internal_employee_client.user) + + supervisors = UserFactory.create_batch(2) + + supervisors[0].supervisees.add(internal_employee_client.user) + AbsenceFactory.create(user=supervisors[0]) + + url = reverse("absence-list") + + response = internal_employee_client.get(url) + assert response.status_code == status.HTTP_200_OK + json = response.json() + assert len(json["data"]) == 1 + + # absences of multiple supervisors shouldn't affect supervisee + supervisors[1].supervisees.add(internal_employee_client.user) + AbsenceFactory.create(user=supervisors[1]) + + response = internal_employee_client.get(url) + assert response.status_code == status.HTTP_200_OK + json = response.json() + assert len(json["data"]) == 1 + + +def test_absence_detail(internal_employee_client): + absence = AbsenceFactory.create(user=internal_employee_client.user) + + url = reverse("absence-detail", args=[absence.id]) + + response = internal_employee_client.get(url) + + assert response.status_code == status.HTTP_200_OK + json = response.json() + assert json["data"]["id"] == str(absence.id) + + +@pytest.mark.parametrize( + "is_external, expected", + [(False, status.HTTP_201_CREATED), (True, status.HTTP_403_FORBIDDEN)], +) +def test_absence_create(auth_client, is_external, expected): + user = auth_client.user + date = datetime.date(2017, 5, 4) + employment = EmploymentFactory.create( + user=user, start_date=date, worktime_per_day=datetime.timedelta(hours=8) + ) + absence_type = AbsenceTypeFactory.create() + + if is_external: + employment.is_external = True + employment.save() + + data = { + "data": { + "type": "absences", + "id": None, + "attributes": {"date": date.strftime("%Y-%m-%d")}, + "relationships": { + "absence_type": { + "data": {"type": "absence-types", "id": absence_type.id} + } + }, + } + } + + url = reverse("absence-list") + + response = auth_client.post(url, data) + + assert response.status_code == expected + + if response.status_code == status.HTTP_201_CREATED: + json = response.json() + assert json["data"]["relationships"]["user"]["data"]["id"] == ( + str(auth_client.user.id) + ) + + +def test_absence_update_owner(auth_client): + user = auth_client.user + date = datetime.date(2017, 5, 3) + absence = AbsenceFactory.create( + user=auth_client.user, date=datetime.date(2016, 5, 3) + ) + EmploymentFactory.create( + user=user, start_date=date, worktime_per_day=datetime.timedelta(hours=8) + ) + + data = { + "data": { + "type": "absences", + "id": absence.id, + "attributes": {"date": date.strftime("%Y-%m-%d")}, + } + } + + url = reverse("absence-detail", args=[absence.id]) + + response = auth_client.patch(url, data) + + assert response.status_code == status.HTTP_200_OK + json = response.json() + assert json["data"]["attributes"]["date"] == "2017-05-03" + + +def test_absence_update_superadmin_date(superadmin_client): + """Test that superadmin may not change date of absence.""" + user = UserFactory.create() + date = datetime.date(2017, 5, 3) + absence = AbsenceFactory.create(user=user, date=datetime.date(2016, 5, 3)) + EmploymentFactory.create( + user=user, start_date=date, worktime_per_day=datetime.timedelta(hours=8) + ) + + data = { + "data": { + "type": "absences", + "id": absence.id, + "attributes": {"date": date.strftime("%Y-%m-%d")}, + } + } + + url = reverse("absence-detail", args=[absence.id]) + + response = superadmin_client.patch(url, data) + assert response.status_code == status.HTTP_400_BAD_REQUEST + + +def test_absence_update_superadmin_type(superadmin_client): + """Test that superadmin may not change type of absence.""" + user = UserFactory.create() + date = datetime.date(2017, 5, 3) + absence_type = AbsenceTypeFactory.create() + absence = AbsenceFactory.create(user=user, date=datetime.date(2016, 5, 3)) + EmploymentFactory.create( + user=user, start_date=date, worktime_per_day=datetime.timedelta(hours=8) + ) + + data = { + "data": { + "type": "absences", + "id": absence.id, + "attributes": {"date": date.strftime("%Y-%m-%d")}, + "relationships": { + "absence_type": { + "data": {"type": "absence-types", "id": absence_type.id} + } + }, + } + } + + url = reverse("absence-detail", args=[absence.id]) + + response = superadmin_client.patch(url, data) + assert response.status_code == status.HTTP_400_BAD_REQUEST + + +def test_absence_delete_owner(internal_employee_client): + absence = AbsenceFactory.create(user=internal_employee_client.user) + + url = reverse("absence-detail", args=[absence.id]) + + response = internal_employee_client.delete(url) + assert response.status_code == status.HTTP_204_NO_CONTENT + + +def test_absence_delete_superuser(superadmin_client): + """Test that superuser may not delete absences of other users.""" + user = UserFactory.create() + absence = AbsenceFactory.create(user=user) + + url = reverse("absence-detail", args=[absence.id]) + + response = superadmin_client.delete(url) + assert response.status_code == status.HTTP_403_FORBIDDEN + + +def test_absence_fill_worktime(auth_client): + """Should create an absence which fills the worktime.""" + date = datetime.date(2017, 5, 10) + user = auth_client.user + EmploymentFactory.create( + user=user, start_date=date, worktime_per_day=datetime.timedelta(hours=8) + ) + absence_type = AbsenceTypeFactory.create(fill_worktime=True) + + ReportFactory.create(user=user, date=date, duration=datetime.timedelta(hours=5)) + + data = { + "data": { + "type": "absences", + "id": None, + "attributes": {"date": date.strftime("%Y-%m-%d")}, + "relationships": { + "absence_type": { + "data": {"type": "absence-types", "id": absence_type.id} + } + }, + } + } + + url = reverse("absence-list") + + response = auth_client.post(url, data) + assert response.status_code == status.HTTP_201_CREATED + + json = response.json() + assert json["data"]["attributes"]["duration"] == "03:00:00" + + +def test_absence_fill_worktime_reported_time_to_long(auth_client): + """ + Verify absence fill worktime is zero when reported time is too long. + + Too long is defined when reported time is longer than worktime per day. + """ + date = datetime.date(2017, 5, 10) + user = auth_client.user + EmploymentFactory.create( + user=user, start_date=date, worktime_per_day=datetime.timedelta(hours=8) + ) + absence_type = AbsenceTypeFactory.create(fill_worktime=True) + + ReportFactory.create( + user=user, date=date, duration=datetime.timedelta(hours=8, minutes=30) + ) + + data = { + "data": { + "type": "absences", + "id": None, + "attributes": {"date": date.strftime("%Y-%m-%d")}, + "relationships": { + "absence_type": { + "data": {"type": "absence-types", "id": absence_type.id} + } + }, + } + } + + url = reverse("absence-list") + + response = auth_client.post(url, data) + assert response.status_code == status.HTTP_201_CREATED + + json = response.json() + assert json["data"]["attributes"]["duration"] == "00:00:00" + + +def test_absence_weekend(auth_client): + """Should not be able to create an absence on a weekend.""" + date = datetime.date(2017, 5, 14) + user = auth_client.user + absence_type = AbsenceTypeFactory.create() + EmploymentFactory.create( + user=user, start_date=date, worktime_per_day=datetime.timedelta(hours=8) + ) + + data = { + "data": { + "type": "absences", + "id": None, + "attributes": {"date": date.strftime("%Y-%m-%d")}, + "relationships": { + "absence_type": { + "data": {"type": "absence-types", "id": absence_type.id} + } + }, + } + } + + url = reverse("absence-list") + + response = auth_client.post(url, data) + assert response.status_code == status.HTTP_400_BAD_REQUEST + + +def test_absence_public_holiday(auth_client): + """Should not be able to create an absence on a public holiday.""" + date = datetime.date(2017, 5, 16) + user = auth_client.user + absence_type = AbsenceTypeFactory.create() + employment = EmploymentFactory.create( + user=user, start_date=date, worktime_per_day=datetime.timedelta(hours=8) + ) + PublicHolidayFactory.create(location=employment.location, date=date) + + data = { + "data": { + "type": "absences", + "id": None, + "attributes": {"date": date.strftime("%Y-%m-%d")}, + "relationships": { + "absence_type": { + "data": {"type": "absence-types", "id": absence_type.id} + } + }, + } + } + + url = reverse("absence-list") + + response = auth_client.post(url, data) + assert response.status_code == status.HTTP_400_BAD_REQUEST + + +def test_absence_create_unemployed(auth_client): + """Test creation of absence fails on unemployed day.""" + absence_type = AbsenceTypeFactory.create() + + data = { + "data": { + "type": "absences", + "id": None, + "attributes": {"date": "2017-05-16"}, + "relationships": { + "absence_type": { + "data": {"type": "absence-types", "id": absence_type.id} + } + }, + } + } + + url = reverse("absence-list") + + response = auth_client.post(url, data) + assert response.status_code == status.HTTP_403_FORBIDDEN + + +def test_absence_detail_unemployed(internal_employee_client): + """Test creation of absence fails on unemployed day.""" + absence = AbsenceFactory.create(user=internal_employee_client.user) + + url = reverse("absence-detail", args=[absence.id]) + + res = internal_employee_client.get(url) + assert res.status_code == status.HTTP_200_OK + + json = res.json() + assert json["data"]["attributes"]["duration"] == "00:00:00" diff --git a/backend/timed/tracking/tests/test_activity.py b/backend/timed/tracking/tests/test_activity.py new file mode 100644 index 000000000..b94471df8 --- /dev/null +++ b/backend/timed/tracking/tests/test_activity.py @@ -0,0 +1,342 @@ +from datetime import date, time, timedelta + +import pytest +from django.urls import reverse +from rest_framework import status + +from timed.employment.factories import EmploymentFactory +from timed.tracking.factories import ActivityFactory + + +def test_activity_list(internal_employee_client): + activity = ActivityFactory.create(user=internal_employee_client.user) + url = reverse("activity-list") + + response = internal_employee_client.get(url) + assert response.status_code == status.HTTP_200_OK + + json = response.json() + assert len(json["data"]) == 1 + assert json["data"][0]["id"] == str(activity.id) + + +def test_activity_detail(internal_employee_client): + activity = ActivityFactory.create(user=internal_employee_client.user) + + url = reverse("activity-detail", args=[activity.id]) + + response = internal_employee_client.get(url) + assert response.status_code == status.HTTP_200_OK + + +@pytest.mark.parametrize( + "task_assignee__is_resource, task_assignee__is_reviewer, is_external, expected", + [ + (True, False, True, status.HTTP_201_CREATED), + (False, True, True, status.HTTP_403_FORBIDDEN), + (True, False, False, status.HTTP_201_CREATED), + (False, True, False, status.HTTP_201_CREATED), + ], +) +def test_activity_create(auth_client, is_external, task_assignee, expected): + """Should create a new activity and automatically set the user.""" + user = auth_client.user + employment = EmploymentFactory(user=user) + + if is_external: + employment.is_external = True + employment.save() + + task_assignee.user = user + task_assignee.save() + task = task_assignee.task + + data = { + "data": { + "type": "activities", + "id": None, + "attributes": { + "from-time": "08:00", + "date": "2017-01-01", + "comment": "Test activity", + }, + "relationships": {"task": {"data": {"type": "tasks", "id": task.id}}}, + } + } + + url = reverse("activity-list") + + response = auth_client.post(url, data) + assert response.status_code == expected + + if response.status_code == status.HTTP_201_CREATED: + json = response.json() + assert int(json["data"]["relationships"]["user"]["data"]["id"]) == int(user.id) + + +def test_activity_create_no_task_external_employee(auth_client, task_assignee): + user = auth_client.user + EmploymentFactory(user=user) + task_assignee.user = user + task_assignee.save() + + data = { + "data": { + "type": "activities", + "id": None, + "attributes": { + "from-time": "08:00", + "date": "2017-01-01", + "comment": "Test activity", + }, + } + } + + url = reverse("activity-list") + + response = auth_client.post(url, data) + assert response.status_code == status.HTTP_201_CREATED + + json = response.json() + assert int(json["data"]["relationships"]["user"]["data"]["id"]) == int(user.id) + + +@pytest.mark.parametrize( + "task_assignee__is_resource, task_assignee__is_reviewer, is_external, expected", + [ + (True, False, True, status.HTTP_200_OK), + (False, True, True, status.HTTP_403_FORBIDDEN), + (True, False, False, status.HTTP_200_OK), + (False, True, False, status.HTTP_200_OK), + ], +) +def test_activity_update(auth_client, is_external, task_assignee, expected): + user = auth_client.user + activity = ActivityFactory.create(user=user, task=task_assignee.task) + task_assignee.user = user + task_assignee.save() + employment = EmploymentFactory(user=user) + + if is_external: + employment.is_external = True + employment.save() + + data = { + "data": { + "type": "activities", + "id": activity.id, + "attributes": {"comment": "Test activity 2"}, + } + } + + url = reverse("activity-detail", args=[activity.id]) + + response = auth_client.patch(url, data) + assert response.status_code == expected + + if response.status_code == status.HTTP_200_OK: + json = response.json() + assert ( + json["data"]["attributes"]["comment"] + == data["data"]["attributes"]["comment"] + ) + + +@pytest.mark.parametrize( + "task_assignee__is_resource, task_assignee__is_reviewer, is_external, expected", + [ + (True, False, True, status.HTTP_204_NO_CONTENT), + (False, True, True, status.HTTP_403_FORBIDDEN), + (True, False, False, status.HTTP_204_NO_CONTENT), + (False, True, False, status.HTTP_204_NO_CONTENT), + ], +) +def test_activity_delete(auth_client, is_external, task_assignee, expected): + user = auth_client.user + task_assignee.user = user + task_assignee.save() + activity = ActivityFactory.create(user=user, task=task_assignee.task) + + employment = EmploymentFactory(user=user) + + if is_external: + employment.is_external = True + employment.save() + + url = reverse("activity-detail", args=[activity.id]) + + response = auth_client.delete(url) + assert response.status_code == expected + + +def test_activity_list_filter_active(internal_employee_client): + user = internal_employee_client.user + activity1 = ActivityFactory.create(user=user) + activity2 = ActivityFactory.create(user=user, to_time=None, task=activity1.task) + + url = reverse("activity-list") + + response = internal_employee_client.get(url, data={"active": "true"}) + assert response.status_code == status.HTTP_200_OK + json = response.json() + assert len(json["data"]) == 1 + assert json["data"][0]["id"] == str(activity2.id) + + +def test_activity_list_filter_day(internal_employee_client): + user = internal_employee_client.user + day = date(2016, 2, 2) + ActivityFactory.create(date=day - timedelta(days=1), user=user) + activity = ActivityFactory.create(date=day, user=user) + + url = reverse("activity-list") + response = internal_employee_client.get(url, data={"day": day.strftime("%Y-%m-%d")}) + assert response.status_code == status.HTTP_200_OK + + json = response.json() + assert len(json["data"]) == 1 + assert json["data"][0]["id"] == str(activity.id) + + +def test_activity_create_no_task(internal_employee_client): + """Should create a new activity without a task.""" + data = { + "data": { + "type": "activities", + "id": None, + "attributes": { + "from-time": "08:00", + "date": "2017-01-01", + "comment": "Test activity", + }, + "relationships": {"task": {"data": None}}, + } + } + + url = reverse("activity-list") + response = internal_employee_client.post(url, data) + assert response.status_code == status.HTTP_201_CREATED + + json = response.json() + assert json["data"]["relationships"]["task"]["data"] is None + + +def test_activity_active_unique(internal_employee_client): + """Should not be able to have two active blocks.""" + ActivityFactory.create(user=internal_employee_client.user, to_time=None) + + data = { + "data": { + "type": "activities", + "id": None, + "attributes": { + "from-time": "08:00", + "date": "2017-01-01", + "comment": "Test activity", + }, + } + } + + url = reverse("activity-list") + + res = internal_employee_client.post(url, data) + + assert res.status_code == status.HTTP_400_BAD_REQUEST + json = res.json() + assert json["errors"][0]["detail"] == ("A user can only have one active activity") + + +def test_activity_to_before_from(internal_employee_client): + """Test that to is not before from.""" + activity = ActivityFactory.create( + user=internal_employee_client.user, from_time=time(7, 30), to_time=None + ) + + data = { + "data": { + "type": "activities", + "id": activity.id, + "attributes": {"to-time": "07:00"}, + } + } + + url = reverse("activity-detail", args=[activity.id]) + + res = internal_employee_client.patch(url, data) + + assert res.status_code == status.HTTP_400_BAD_REQUEST + json = res.json() + assert json["errors"][0]["detail"] == ( + "An activity block may not end before it starts." + ) + + +def test_activity_not_editable(internal_employee_client): + """Test that transferred activities are read only.""" + activity = ActivityFactory.create( + user=internal_employee_client.user, transferred=True + ) + + data = { + "data": { + "type": "activities", + "id": activity.id, + "attributes": {"comment": "Changed Comment"}, + } + } + + url = reverse("activity-detail", args=[activity.id]) + response = internal_employee_client.patch(url, data) + assert response.status_code == status.HTTP_403_FORBIDDEN + + +def test_activity_retrievable_not_editable(internal_employee_client): + """Test that transferred activities are still retrievable.""" + activity = ActivityFactory.create( + user=internal_employee_client.user, transferred=True + ) + + url = reverse("activity-detail", args=[activity.id]) + + response = internal_employee_client.get(url) + assert response.status_code == status.HTTP_200_OK + + +def test_activity_active_update(internal_employee_client): + activity = ActivityFactory.create(user=internal_employee_client.user, to_time=None) + + data = { + "data": { + "type": "activities", + "id": activity.id, + "attributes": {"from-time": "08:00", "comment": "Changed Comment"}, + } + } + + url = reverse("activity-detail", args=[activity.id]) + response = internal_employee_client.patch(url, data) + assert response.status_code == status.HTTP_200_OK + json = response.json() + assert ( + json["data"]["attributes"]["comment"] == data["data"]["attributes"]["comment"] + ) + + +def test_activity_set_to_time_none(internal_employee_client, activity_factory): + activity1 = activity_factory(user=internal_employee_client.user, to_time=None) + activity2 = activity_factory( + user=internal_employee_client.user, task=activity1.task + ) + + data = { + "data": { + "type": "activities", + "id": activity2.id, + "attributes": {"to-time": None}, + } + } + + url = reverse("activity-detail", args=[activity2.id]) + + res = internal_employee_client.patch(url, data) + assert res.status_code == status.HTTP_400_BAD_REQUEST diff --git a/backend/timed/tracking/tests/test_attendance.py b/backend/timed/tracking/tests/test_attendance.py new file mode 100644 index 000000000..fc67e2b7d --- /dev/null +++ b/backend/timed/tracking/tests/test_attendance.py @@ -0,0 +1,102 @@ +import pytest +from django.urls import reverse +from rest_framework import status + +from timed.tracking.factories import AttendanceFactory + + +def test_attendance_list(internal_employee_client): + AttendanceFactory.create() + attendance = AttendanceFactory.create(user=internal_employee_client.user) + + url = reverse("attendance-list") + response = internal_employee_client.get(url) + assert response.status_code == status.HTTP_200_OK + + json = response.json() + assert len(json["data"]) == 1 + assert json["data"][0]["id"] == str(attendance.id) + + +def test_attendance_detail(internal_employee_client): + attendance = AttendanceFactory.create(user=internal_employee_client.user) + + url = reverse("attendance-detail", args=[attendance.id]) + response = internal_employee_client.get(url) + assert response.status_code == status.HTTP_200_OK + + +@pytest.mark.parametrize( + "is_external, task_assignee__is_resource, expected", + [ + (False, False, status.HTTP_201_CREATED), + (True, True, status.HTTP_201_CREATED), + (True, False, status.HTTP_403_FORBIDDEN), + ], +) +def test_attendance_create( + auth_client, employment, is_external, task_assignee, expected +): + """Should create a new attendance and automatically set the user.""" + user = auth_client.user + employment.user = user + task_assignee.user = user + task_assignee.save() + + if is_external: + employment.is_external = True + employment.save() + + data = { + "data": { + "type": "attendances", + "id": None, + "attributes": { + "date": "2017-01-01", + "from-time": "08:00", + "to-time": "10:00", + }, + } + } + + url = reverse("attendance-list") + + response = auth_client.post(url, data) + assert response.status_code == expected + + if response.status_code == status.HTTP_201_CREATED: + json = response.json() + assert json["data"]["relationships"]["user"]["data"]["id"] == str(user.id) + + +def test_attendance_update(internal_employee_client): + user = internal_employee_client.user + attendance = AttendanceFactory.create(user=user) + + data = { + "data": { + "type": "attendances", + "id": attendance.id, + "attributes": {"to-time": "15:00:00"}, + "relationships": {"user": {"data": {"id": user.id, "type": "users"}}}, + } + } + + url = reverse("attendance-detail", args=[attendance.id]) + + response = internal_employee_client.patch(url, data) + assert response.status_code == status.HTTP_200_OK + + json = response.json() + assert ( + json["data"]["attributes"]["to-time"] == data["data"]["attributes"]["to-time"] + ) + + +def test_attendance_delete(internal_employee_client): + attendance = AttendanceFactory.create(user=internal_employee_client.user) + + url = reverse("attendance-detail", args=[attendance.id]) + + response = internal_employee_client.delete(url) + assert response.status_code == status.HTTP_204_NO_CONTENT diff --git a/backend/timed/tracking/tests/test_report.py b/backend/timed/tracking/tests/test_report.py new file mode 100644 index 000000000..2688bcbcb --- /dev/null +++ b/backend/timed/tracking/tests/test_report.py @@ -0,0 +1,2089 @@ +"""Tests for the reports endpoint.""" + +from datetime import timedelta + +import pyexcel +import pytest +from django.urls import reverse +from django.utils.duration import duration_string +from rest_framework import status + +from timed.employment.factories import EmploymentFactory, UserFactory +from timed.projects.factories import ( + CustomerAssigneeFactory, + ProjectAssigneeFactory, + TaskAssigneeFactory, + TaskFactory, +) + + +def test_report_list( + internal_employee_client, + report_factory, +): + user = internal_employee_client.user + report_factory.create(user=user) + report = report_factory.create(user=user, duration=timedelta(hours=1)) + url = reverse("report-list") + + response = internal_employee_client.get( + url, + data={ + "date": report.date, + "user": user.id, + "task": report.task_id, + "project": report.task.project_id, + "customer": report.task.project.customer_id, + "include": ("user,task,task.project,task.project.customer,verified_by"), + }, + ) + + assert response.status_code == status.HTTP_200_OK + + json = response.json() + assert len(json["data"]) == 1 + assert json["data"][0]["id"] == str(report.id) + assert json["meta"]["total-time"] == "01:00:00" + + +def test_report_intersection_full( + internal_employee_client, + report_factory, +): + report = report_factory.create() + + url = reverse("report-intersection") + response = internal_employee_client.get( + url, + data={ + "ordering": "task__name", + "task": report.task.id, + "project": report.task.project.id, + "customer": report.task.project.customer.id, + "include": "task,customer,project", + }, + ) + assert response.status_code == status.HTTP_200_OK + + json = response.json() + pk = json["data"].pop("id") + assert "task={0}".format(report.task.id) in pk + assert "project={0}".format(report.task.project.id) in pk + assert "customer={0}".format(report.task.project.customer.id) in pk + + included = json.pop("included") + assert len(included) == 3 + + expected = { + "data": { + "type": "report-intersections", + "attributes": { + "comment": report.comment, + "not-billable": False, + "verified": False, + "review": False, + "billed": False, + "rejected": False, + }, + "relationships": { + "customer": { + "data": { + "id": str(report.task.project.customer.id), + "type": "customers", + } + }, + "project": { + "data": {"id": str(report.task.project.id), "type": "projects"} + }, + "task": {"data": {"id": str(report.task.id), "type": "tasks"}}, + "user": {"data": {"id": str(report.user.id), "type": "users"}}, + }, + }, + "meta": {"count": 1}, + } + assert json == expected + + +def test_report_intersection_partial( + internal_employee_client, + report_factory, +): + user = internal_employee_client.user + report = report_factory.create(review=True, not_billable=True, comment="test") + report_factory.create(verified_by=user, comment="test") + # Billed is not set on create because the factory doesnt seem to work with that + report.billed = True + report.save() + + url = reverse("report-intersection") + response = internal_employee_client.get(url) + assert response.status_code == status.HTTP_200_OK + + json = response.json() + expected = { + "data": { + "id": "", + "type": "report-intersections", + "attributes": { + "comment": "test", + "not-billable": None, + "verified": None, + "review": None, + "billed": None, + "rejected": False, + }, + "relationships": { + "customer": {"data": None}, + "project": {"data": None}, + "task": {"data": None}, + "user": {"data": None}, + }, + }, + "meta": {"count": 2}, + } + assert json == expected + + +def test_report_intersection_accountant_editable( + internal_employee_client, + report_factory, + user_factory, +): + user = internal_employee_client.user + user.is_accountant = True + user.save() + + other_user = user_factory() + report_factory.create(review=True, not_billable=True, user=other_user) + + report1 = report_factory.create(review=True, not_billable=True, user=other_user) + report1.billed = True + report1.save() + + url = reverse("report-intersection") + response = internal_employee_client.get(url, {"editable": 1}) + assert response.status_code == status.HTTP_200_OK + + json = response.json() + expected = { + "data": { + "id": "editable=1", + "type": "report-intersections", + "attributes": { + "comment": None, + "not-billable": True, + "verified": False, + "review": True, + "billed": None, + "rejected": False, + }, + "relationships": { + "customer": {"data": None}, + "project": {"data": None}, + "task": {"data": None}, + "user": {"data": {"id": str(other_user.id), "type": "users"}}, + }, + }, + "meta": {"count": 2}, + } + assert json == expected + + +def test_report_intersection_accountant_not_editable( + internal_employee_client, + report_factory, + user_factory, +): + user = internal_employee_client.user + user.is_accountant = True + user.save() + + other_user = user_factory() + report_factory.create(review=True, not_billable=True, user=other_user) + + report = report_factory.create(review=True, not_billable=True, user=other_user) + report.billed = True + report.save() + + url = reverse("report-intersection") + response = internal_employee_client.get(url, {"editable": 0}) + assert response.status_code == status.HTTP_200_OK + + json = response.json() + expected = { + "data": { + "id": "editable=0", + "type": "report-intersections", + "attributes": { + "comment": None, + "not-billable": None, + "verified": None, + "review": None, + "billed": None, + "rejected": None, + }, + "relationships": { + "customer": {"data": None}, + "project": {"data": None}, + "task": {"data": None}, + "user": {"data": None}, + }, + }, + "meta": {"count": 0}, + } + assert json == expected + + +def test_report_list_filter_id( + internal_employee_client, + report_factory, +): + report_1 = report_factory.create(date="2017-01-01") + report_2 = report_factory.create(date="2017-02-01") + report_factory.create() + + url = reverse("report-list") + + response = internal_employee_client.get( + url, data={"id": "{0},{1}".format(report_1.id, report_2.id), "ordering": "id"} + ) + assert response.status_code == status.HTTP_200_OK + json = response.json() + assert len(json["data"]) == 2 + assert json["data"][0]["id"] == str(report_1.id) + assert json["data"][1]["id"] == str(report_2.id) + + +def test_report_list_filter_id_empty( + internal_employee_client, + report_factory, +): + """Test that empty id filter is ignored.""" + report_factory.create() + + url = reverse("report-list") + + response = internal_employee_client.get(url, data={"id": ""}) + assert response.status_code == status.HTTP_200_OK + json = response.json() + assert len(json["data"]) == 1 + + +def test_report_list_filter_reviewer( + internal_employee_client, + report_factory, +): + user = internal_employee_client.user + report = report_factory.create(user=user) + ProjectAssigneeFactory.create( + user=user, project=report.task.project, is_reviewer=True + ) + + # add new task to the project + task2 = TaskFactory.create(project=report.task.project) + report_factory.create(user=user, task=task2) + + # add task assignee with reviewer role to the new task + user2 = UserFactory.create() + TaskAssigneeFactory.create(user=user2, task=task2, is_reviewer=True) + + url = reverse("report-list") + + response = internal_employee_client.get(url, data={"reviewer": user.id}) + assert response.status_code == status.HTTP_200_OK + json = response.json() + assert len(json["data"]) == 1 + assert json["data"][0]["id"] == str(report.id) + + +def test_report_list_filter_verifier( + internal_employee_client, + report_factory, +): + user = internal_employee_client.user + report = report_factory.create(verified_by=user) + report_factory.create() + + url = reverse("report-list") + + response = internal_employee_client.get(url, data={"verifier": user.id}) + assert response.status_code == status.HTTP_200_OK + json = response.json() + assert len(json["data"]) == 1 + assert json["data"][0]["id"] == str(report.id) + + +def test_report_list_filter_editable_owner( + internal_employee_client, + report_factory, +): + user = internal_employee_client.user + report = report_factory.create(user=user) + report_factory.create() + + url = reverse("report-list") + + response = internal_employee_client.get(url, data={"editable": 1}) + assert response.status_code == status.HTTP_200_OK + json = response.json() + assert len(json["data"]) == 1 + assert json["data"][0]["id"] == str(report.id) + + +def test_report_list_filter_not_editable_owner( + internal_employee_client, + report_factory, +): + user = internal_employee_client.user + report_factory.create(user=user) + report = report_factory.create() + + url = reverse("report-list") + + response = internal_employee_client.get(url, data={"editable": 0}) + assert response.status_code == status.HTTP_200_OK + json = response.json() + assert len(json["data"]) == 1 + assert json["data"][0]["id"] == str(report.id) + + +def test_report_list_filter_editable_reviewer( + internal_employee_client, report_factory, user_factory +): + user = internal_employee_client.user + # not editable report + report_factory.create() + + # editable reports + # 1st report of current user + report_factory.create(user=user) + # 2nd case: report of a project which has several + # reviewers and report is created by current user + report = report_factory.create(user=user) + other_user = user_factory.create() + ProjectAssigneeFactory.create( + user=user, project=report.task.project, is_reviewer=True + ) + ProjectAssigneeFactory.create( + user=other_user, project=report.task.project, is_reviewer=True + ) + # 3rd case: report by other user and current user + # is the reviewer + reviewer_report = report_factory.create() + ProjectAssigneeFactory.create( + user=user, project=reviewer_report.task.project, is_reviewer=True + ) + + url = reverse("report-list") + + response = internal_employee_client.get(url, data={"editable": 1}) + assert response.status_code == status.HTTP_200_OK + json = response.json() + assert len(json["data"]) == 3 + + +def test_report_list_filter_editable_superuser(superadmin_client, report_factory): + EmploymentFactory.create(user=superadmin_client.user) + report = report_factory.create() + + url = reverse("report-list") + + response = superadmin_client.get(url, data={"editable": 1}) + assert response.status_code == status.HTTP_200_OK + json = response.json() + assert len(json["data"]) == 1 + assert json["data"][0]["id"] == str(report.id) + + +def test_report_list_filter_not_editable_superuser(superadmin_client, report_factory): + EmploymentFactory.create(user=superadmin_client.user) + report_factory.create() + + url = reverse("report-list") + + response = superadmin_client.get(url, data={"editable": 0}) + assert response.status_code == status.HTTP_200_OK + json = response.json() + assert len(json["data"]) == 0 + + +def test_report_list_filter_editable_supervisor( + internal_employee_client, + report_factory, + user_factory, +): + user = internal_employee_client.user + # not editable report + report_factory.create() + + # editable reports + # 1st case: report by current user + report_factory.create(user=user) + # 2nd case: report by current user with several supervisors + report = report_factory.create(user=user) + report.user.supervisors.add(user) + other_user = user_factory.create() + report.user.supervisors.add(other_user) + # 3rd case: report by different user with current user as supervisor + supervisor_report = report_factory.create() + supervisor_report.user.supervisors.add(user) + + url = reverse("report-list") + + response = internal_employee_client.get(url, data={"editable": 1}) + assert response.status_code == status.HTTP_200_OK + json = response.json() + assert len(json["data"]) == 3 + + +def test_report_list_filter_billed( + internal_employee_client, + report, +): + # Billed is not set on create because the factory doesnt seem to work with that + report.billed = True + report.save() + + url = reverse("report-list") + + response = internal_employee_client.get(url, data={"billed": 1}) + assert response.status_code == status.HTTP_200_OK + json = response.json() + assert len(json["data"]) == 1 + assert json["data"][0]["id"] == str(report.id) + + +def test_report_export_missing_type( + internal_employee_client, + report_factory, +): + user = internal_employee_client.user + url = reverse("report-export") + + response = internal_employee_client.get(url, data={"user": user.id}) + + assert response.status_code == status.HTTP_400_BAD_REQUEST + + +def test_report_detail( + internal_employee_client, + report_factory, +): + user = internal_employee_client.user + report = report_factory.create(user=user) + + url = reverse("report-detail", args=[report.id]) + response = internal_employee_client.get(url) + + assert response.status_code == status.HTTP_200_OK + + +@pytest.mark.parametrize( + "task_assignee__is_reviewer, task_assignee__is_manager, task_assignee__is_resource, is_external, expected", + [ + (True, False, False, True, status.HTTP_400_BAD_REQUEST), + (False, True, False, True, status.HTTP_403_FORBIDDEN), + (False, False, True, True, status.HTTP_201_CREATED), + (True, False, False, False, status.HTTP_201_CREATED), + (False, True, False, False, status.HTTP_201_CREATED), + (False, False, True, False, status.HTTP_201_CREATED), + ], +) +def test_report_create( + auth_client, report_factory, task_factory, task_assignee, is_external, expected +): + """Should create a new report and automatically set the user.""" + user = auth_client.user + task = task_factory.create() + task_assignee.user = user + task_assignee.task = task + task_assignee.save() + + if is_external: + EmploymentFactory.create(user=user, is_external=True) + else: + EmploymentFactory.create(user=user, is_external=False) + + data = { + "data": { + "type": "reports", + "id": None, + "attributes": { + "comment": "foo", + "duration": "00:50:00", + "date": "2017-02-01", + }, + "relationships": { + "task": {"data": {"type": "tasks", "id": task.id}}, + "verified-by": {"data": None}, + }, + } + } + + url = reverse("report-list") + + response = auth_client.post(url, data) + assert response.status_code == expected + + if response.status_code == status.HTTP_201_CREATED: + json = response.json() + assert json["data"]["relationships"]["user"]["data"]["id"] == str(user.id) + + assert json["data"]["relationships"]["task"]["data"]["id"] == str(task.id) + + +def test_report_create_billed( + internal_employee_client, report_factory, project_factory, task_factory +): + """Should create a new report and automatically set the user.""" + user = internal_employee_client.user + project = project_factory.create(billed=True) + task = task_factory.create(project=project) + + data = { + "data": { + "type": "reports", + "id": None, + "attributes": { + "comment": "foo", + "duration": "00:50:00", + "date": "2017-02-01", + }, + "relationships": { + "task": {"data": {"type": "tasks", "id": task.id}}, + "verified-by": {"data": None}, + }, + } + } + + url = reverse("report-list") + + response = internal_employee_client.post(url, data) + assert response.status_code == status.HTTP_201_CREATED + + json = response.json() + assert json["data"]["relationships"]["user"]["data"]["id"] == str(user.id) + + assert json["data"]["relationships"]["task"]["data"]["id"] == str(task.id) + + assert json["data"]["attributes"]["billed"] + + +def test_report_update_bulk( + internal_employee_client, + report_factory, + task_factory, +): + task = task_factory.create() + report = report_factory.create(user=internal_employee_client.user) + + url = reverse("report-bulk") + + data = { + "data": { + "type": "report-bulks", + "id": None, + "relationships": {"task": {"data": {"type": "tasks", "id": task.id}}}, + } + } + + response = internal_employee_client.post(url + "?editable=1", data) + assert response.status_code == status.HTTP_204_NO_CONTENT + + report.refresh_from_db() + assert report.task == task + + +def test_report_update_bulk_verify_non_reviewer( + internal_employee_client, + report_factory, +): + report_factory.create(user=internal_employee_client.user) + + url = reverse("report-bulk") + + data = { + "data": {"type": "report-bulks", "id": None, "attributes": {"verified": True}} + } + + response = internal_employee_client.post(url + "?editable=1", data) + assert response.status_code == status.HTTP_400_BAD_REQUEST + + +def test_report_update_bulk_verify_superuser(superadmin_client, report_factory): + user = superadmin_client.user + EmploymentFactory.create(user=user) + report = report_factory.create(user=user) + + url = reverse("report-bulk") + + data = { + "data": {"type": "report-bulks", "id": None, "attributes": {"verified": True}} + } + + response = superadmin_client.post(url + "?editable=1", data) + assert response.status_code == status.HTTP_204_NO_CONTENT + + report.refresh_from_db() + assert report.verified_by == user + + +def test_report_update_bulk_verify_reviewer( + internal_employee_client, + report_factory, +): + user = internal_employee_client.user + report = report_factory.create(user=user) + ProjectAssigneeFactory.create( + user=user, project=report.task.project, is_reviewer=True + ) + + url = reverse("report-bulk") + + data = { + "data": { + "type": "report-bulks", + "id": None, + "attributes": {"verified": True, "comment": "some comment"}, + } + } + + response = internal_employee_client.post( + url + "?editable=1&reviewer={0}".format(user.id), data + ) + assert response.status_code == status.HTTP_204_NO_CONTENT + + report.refresh_from_db() + assert report.verified_by == user + assert report.comment == "some comment" + + +def test_report_update_bulk_reset_verify(superadmin_client, report_factory): + user = superadmin_client.user + EmploymentFactory.create(user=user) + report = report_factory.create(verified_by=user) + + url = reverse("report-bulk") + + data = { + "data": {"type": "report-bulks", "id": None, "attributes": {"verified": False}} + } + + response = superadmin_client.post(url + "?editable=1", data) + assert response.status_code == status.HTTP_204_NO_CONTENT + + report.refresh_from_db() + assert report.verified_by_id is None + + +def test_report_update_bulk_not_editable( + internal_employee_client, + report_factory, +): + url = reverse("report-bulk") + + data = { + "data": { + "type": "report-bulks", + "id": None, + "attributes": {"not_billable": True}, + } + } + + response = internal_employee_client.post(url, data) + assert response.status_code == status.HTTP_400_BAD_REQUEST + + +def test_report_update_verified_as_non_staff_but_owner( + internal_employee_client, + report_factory, +): + """Test that an owner (not staff) may not change a verified report.""" + user = internal_employee_client.user + report = report_factory.create( + user=user, verified_by=user, duration=timedelta(hours=2) + ) + + url = reverse("report-detail", args=[report.id]) + + data = { + "data": { + "type": "reports", + "id": report.id, + "attributes": {"duration": "01:00:00"}, + } + } + + response = internal_employee_client.patch(url, data) + assert response.status_code == status.HTTP_403_FORBIDDEN + + +def test_report_update_owner( + internal_employee_client, report_factory, task_factory, mailoutbox +): + """Should update an existing report.""" + user = internal_employee_client.user + report = report_factory.create(user=user) + task = task_factory.create() + + data = { + "data": { + "type": "reports", + "id": report.id, + "attributes": { + "comment": "foobar", + "duration": "01:00:00", + "rejected": False, + "date": "2017-02-04", + }, + "relationships": {"task": {"data": {"type": "tasks", "id": task.id}}}, + } + } + + url = reverse("report-detail", args=[report.id]) + + response = internal_employee_client.patch(url, data) + assert response.status_code == status.HTTP_200_OK + + json = response.json() + assert ( + json["data"]["attributes"]["comment"] == data["data"]["attributes"]["comment"] + ) + assert ( + json["data"]["attributes"]["duration"] == data["data"]["attributes"]["duration"] + ) + assert json["data"]["attributes"]["date"] == data["data"]["attributes"]["date"] + assert json["data"]["relationships"]["task"]["data"]["id"] == str( + data["data"]["relationships"]["task"]["data"]["id"] + ) + assert len(mailoutbox) == 0 + + +def test_report_update_date_reviewer( + internal_employee_client, + report_factory, +): + user = internal_employee_client.user + report = report_factory.create() + ProjectAssigneeFactory.create( + user=user, project=report.task.project, is_reviewer=True + ) + + data = { + "data": { + "type": "reports", + "id": report.id, + "attributes": {"date": "2017-02-04"}, + } + } + + url = reverse("report-detail", args=[report.id]) + + response = internal_employee_client.patch(url, data) + assert response.status_code == status.HTTP_400_BAD_REQUEST + + +def test_report_update_duration_reviewer( + internal_employee_client, + report_factory, +): + user = internal_employee_client.user + report = report_factory.create(duration=timedelta(hours=2)) + ProjectAssigneeFactory.create( + user=user, project=report.task.project, is_reviewer=True + ) + + data = { + "data": { + "type": "reports", + "id": report.id, + "attributes": {"duration": "01:00:00"}, + } + } + + url = reverse("report-detail", args=[report.id]) + + res = internal_employee_client.patch(url, data) + assert res.status_code == status.HTTP_400_BAD_REQUEST + + +def test_report_update_by_user( + internal_employee_client, + report_factory, +): + """Updating of report belonging to different user is not allowed.""" + report = report_factory.create() + data = { + "data": { + "type": "reports", + "id": report.id, + "attributes": {"comment": "foobar"}, + } + } + + url = reverse("report-detail", args=[report.id]) + response = internal_employee_client.patch(url, data) + assert response.status_code == status.HTTP_403_FORBIDDEN + + +def test_report_update_verified_and_review_reviewer( + internal_employee_client, + report_factory, +): + user = internal_employee_client.user + report = report_factory.create(duration=timedelta(hours=2)) + ProjectAssigneeFactory.create( + user=user, project=report.task.project, is_reviewer=True + ) + + data = { + "data": { + "type": "reports", + "id": report.id, + "attributes": {"review": True}, + "relationships": { + "verified-by": {"data": {"id": user.pk, "type": "users"}} + }, + } + } + + url = reverse("report-detail", args=[report.id]) + + res = internal_employee_client.patch(url, data) + assert res.status_code == status.HTTP_400_BAD_REQUEST + + +def test_report_set_verified_by_user( + internal_employee_client, + report_factory, +): + """Test that normal user may not verify report.""" + user = internal_employee_client.user + report = report_factory.create(user=user) + data = { + "data": { + "type": "reports", + "id": report.id, + "relationships": { + "verified-by": {"data": {"id": user.id, "type": "users"}} + }, + } + } + + url = reverse("report-detail", args=[report.id]) + response = internal_employee_client.patch(url, data) + assert response.status_code == status.HTTP_400_BAD_REQUEST + + +def test_report_update_reviewer( + internal_employee_client, + report_factory, + mailoutbox, +): + user = internal_employee_client.user + report = report_factory.create(user=user) + ProjectAssigneeFactory.create( + user=user, project=report.task.project, is_reviewer=True + ) + + data = { + "data": { + "type": "reports", + "id": report.id, + "attributes": {"comment": "foobar", "rejected": False}, + "relationships": { + "verified-by": {"data": {"id": user.id, "type": "users"}} + }, + } + } + + url = reverse("report-detail", args=[report.id]) + + response = internal_employee_client.patch(url, data) + assert response.status_code == status.HTTP_200_OK + assert len(mailoutbox) == 0 + + +def test_report_update_supervisor( + internal_employee_client, + report_factory, +): + user = internal_employee_client.user + report = report_factory.create(user=user) + report.user.supervisors.add(user) + + data = { + "data": { + "type": "reports", + "id": report.id, + "attributes": {"comment": "foobar"}, + } + } + + url = reverse("report-detail", args=[report.id]) + + response = internal_employee_client.patch(url, data) + assert response.status_code == status.HTTP_200_OK + + +def test_report_verify_other_user(superadmin_client, report_factory, user_factory): + """Verify that superuser may not verify to other user.""" + EmploymentFactory.create(user=superadmin_client.user) + user = user_factory.create() + report = report_factory.create() + + data = { + "data": { + "type": "reports", + "id": report.id, + "relationships": { + "verified-by": {"data": {"id": user.id, "type": "users"}} + }, + } + } + + url = reverse("report-detail", args=[report.id]) + response = superadmin_client.patch(url, data) + assert response.status_code == status.HTTP_400_BAD_REQUEST + + +def test_report_reset_verified_by_reviewer( + internal_employee_client, + report_factory, +): + """Test that reviewer may not change verified report.""" + user = internal_employee_client.user + reviewer = UserFactory.create() + report = report_factory.create(user=user, verified_by=reviewer) + ProjectAssigneeFactory.create( + user=reviewer, project=report.task.project, is_reviewer=True + ) + + data = { + "data": { + "type": "reports", + "id": report.id, + "attributes": {"comment": "foobar"}, + "relationships": {"verified-by": {"data": None}}, + } + } + + url = reverse("report-detail", args=[report.id]) + response = internal_employee_client.patch(url, data) + assert response.status_code == status.HTTP_403_FORBIDDEN + + report.refresh_from_db() + report.verified_by = None + + +def test_report_reset_verified_and_billed_by_reviewer( + internal_employee_client, + report_factory, +): + """Test that reviewer may not change verified and billed report.""" + user = internal_employee_client.user + report = report_factory.create(user=user, verified_by=user) + ProjectAssigneeFactory.create( + user=user, project=report.task.project, is_reviewer=True + ) + # Billed is not set on create because the factory doesnt seem to work with that + report.billed = True + report.save() + + data = { + "data": { + "type": "reports", + "id": report.id, + "attributes": {"comment": "foobar"}, + "relationships": {"verified-by": {"data": None}}, + } + } + + url = reverse("report-detail", args=[report.id]) + response = internal_employee_client.patch(url, data) + assert response.status_code == status.HTTP_403_FORBIDDEN + + +@pytest.mark.parametrize( + "task_assignee__is_reviewer, task_assignee__is_manager, task_assignee__is_resource, is_external, verified, expected", + [ + (True, False, False, False, True, status.HTTP_403_FORBIDDEN), + (True, False, False, False, False, status.HTTP_204_NO_CONTENT), + (False, True, False, False, False, status.HTTP_204_NO_CONTENT), + (False, True, False, False, True, status.HTTP_403_FORBIDDEN), + (False, False, True, False, False, status.HTTP_204_NO_CONTENT), + (False, False, True, False, True, status.HTTP_403_FORBIDDEN), + (True, False, False, True, False, status.HTTP_403_FORBIDDEN), + (False, True, False, True, False, status.HTTP_403_FORBIDDEN), + (False, False, True, True, False, status.HTTP_204_NO_CONTENT), + (True, False, False, True, True, status.HTTP_403_FORBIDDEN), + (False, True, False, True, True, status.HTTP_403_FORBIDDEN), + (False, False, True, True, True, status.HTTP_403_FORBIDDEN), + ], +) +def test_report_delete_own_report( + auth_client, report_factory, task_assignee, is_external, verified, expected +): + user = auth_client.user + task_assignee.user = user + task_assignee.save() + report = report_factory.create(user=user, task=task_assignee.task) + + if verified: + report.verified_by = UserFactory.create() + report.save() + + if is_external: + EmploymentFactory.create(user=user, is_external=True) + else: + EmploymentFactory.create(user=user, is_external=False) + + url = reverse("report-detail", args=[report.id]) + response = auth_client.delete(url) + assert response.status_code == expected + + +@pytest.mark.parametrize( + "task_assignee__is_reviewer, task_assignee__is_manager, task_assignee__is_resource, is_external, verified", + [ + (True, False, False, False, True), + (True, False, False, False, False), + (False, True, False, False, False), + (False, True, False, False, True), + (False, False, True, False, False), + (False, False, True, False, True), + (True, False, False, True, False), + (True, False, False, True, True), + (False, True, False, True, False), + (False, True, False, True, True), + (False, False, True, True, False), + (False, False, True, True, True), + ], +) +def test_report_delete_not_report_owner( + auth_client, report_factory, task_assignee, is_external, verified +): + user = auth_client.user + task_assignee.user = user + task_assignee.save() + + user2 = UserFactory.create() + report = report_factory.create(user=user2, task=task_assignee.task) + + if verified: + report.verified_by = UserFactory.create() + report.save() + + if is_external: + EmploymentFactory.create(user=user, is_external=True) + else: + EmploymentFactory.create(user=user, is_external=False) + + url = reverse("report-detail", args=[report.id]) + response = auth_client.delete(url) + # status code 404 is expected, when the user cannot see the specific report + # otherwise the user shouldn't be allowed to delete it, therefore code 403 + assert response.status_code in [ + status.HTTP_403_FORBIDDEN, + status.HTTP_404_NOT_FOUND, + ] + + +def test_report_round_duration(db, report_factory): + """Should round the duration of a report to 15 minutes.""" + report = report_factory.create() + + report.duration = timedelta(hours=1, minutes=7) + report.save() + + assert duration_string(report.duration) == "01:00:00" + + report.duration = timedelta(hours=1, minutes=8) + report.save() + + assert duration_string(report.duration) == "01:15:00" + + report.duration = timedelta(hours=1, minutes=53) + report.save() + + assert duration_string(report.duration) == "02:00:00" + + +def test_report_list_no_result(admin_client): + EmploymentFactory.create(user=admin_client.user) + url = reverse("report-list") + res = admin_client.get(url) + + assert res.status_code == status.HTTP_200_OK + json = res.json() + assert json["meta"]["total-time"] == "00:00:00" + + +def test_report_delete_superuser(superadmin_client, report_factory): + """Test that superuser may not delete reports of other users.""" + EmploymentFactory.create(user=superadmin_client.user) + report = report_factory.create() + url = reverse("report-detail", args=[report.id]) + + response = superadmin_client.delete(url) + assert response.status_code == status.HTTP_403_FORBIDDEN + + +def test_report_list_filter_cost_center( + internal_employee_client, + report_factory, + cost_center_factory, + project_factory, + task_factory, +): + cost_center = cost_center_factory.create() + # 1st valid case: report with task of given cost center + # but different project cost center + task = task_factory.create(cost_center=cost_center) + report_task = report_factory.create(task=task) + # 2nd valid case: report with project of given cost center + project = project_factory.create(cost_center=cost_center) + task = task_factory.create(cost_center=None, project=project) + report_project = report_factory.create(task=task) + # Invalid case: report without cost center + project = project_factory.create(cost_center=None) + task = task_factory.create(cost_center=None, project=project) + report_factory.create(task=task) + + url = reverse("report-list") + + res = internal_employee_client.get(url, data={"cost_center": cost_center.id}) + assert res.status_code == status.HTTP_200_OK + json = res.json() + assert len(json["data"]) == 2 + ids = {int(entry["id"]) for entry in json["data"]} + assert {report_task.id, report_project.id} == ids + + +@pytest.mark.parametrize("file_type", ["csv", "xlsx", "ods"]) +@pytest.mark.parametrize( + "project_cs_name,task_cs_name,project_bt_name", + [("Project cost center", "Task cost center", "Some billing type")], +) +@pytest.mark.parametrize( + "project_cs,task_cs,expected_cs_name", + [ + (True, True, "Task cost center"), + (True, False, "Project cost center"), + (False, True, "Task cost center"), + (False, False, ""), + ], +) +@pytest.mark.parametrize( + "project_bt,expected_bt_name", [(True, "Some billing type"), (False, "")] +) +def test_report_export( + internal_employee_client, + django_assert_num_queries, + report, + task, + project, + cost_center_factory, + file_type, + project_cs, + task_cs, + expected_cs_name, + project_bt, + expected_bt_name, + project_cs_name, + task_cs_name, + project_bt_name, +): + report.task.project.cost_center = cost_center_factory(name=project_cs_name) + report.task.cost_center = cost_center_factory(name=task_cs_name) + report.task.project.billing_type.name = project_bt_name + report.task.project.billing_type.save() + + if not project_cs: + project.cost_center = None + if not task_cs: + task.cost_center = None + if not project_bt: + project.billing_type = None + project.save() + task.save() + + url = reverse("report-export") + + with django_assert_num_queries(7): + response = internal_employee_client.get(url, data={"file_type": file_type}) + + assert response.status_code == status.HTTP_200_OK + + book = pyexcel.get_book(file_content=response.content, file_type=file_type) + # bookdict is a dict of tuples(name, content) + sheet = book.bookdict.popitem()[1] + + assert len(sheet) == 2 + assert sheet[1][-2:] == [expected_bt_name, expected_cs_name] + + +@pytest.mark.parametrize( + "settings_count,given_count,expected_status", + [ + (-1, 9, status.HTTP_200_OK), + (0, 9, status.HTTP_200_OK), + (10, 9, status.HTTP_200_OK), + (9, 10, status.HTTP_400_BAD_REQUEST), + ], +) +def test_report_export_max_count( + internal_employee_client, + django_assert_num_queries, + report_factory, + task, + settings, + settings_count, + given_count, + expected_status, +): + settings.REPORTS_EXPORT_MAX_COUNT = settings_count + report_factory.create_batch(given_count, task=task) + + url = reverse("report-export") + + response = internal_employee_client.get(url, data={"file_type": "csv"}) + + assert response.status_code == expected_status + + +def test_report_update_bulk_verify_reviewer_multiple_notify( + internal_employee_client, + task, + task_factory, + project, + report_factory, + user_factory, + mailoutbox, +): + reviewer = internal_employee_client.user + ProjectAssigneeFactory.create(user=reviewer, project=project, is_reviewer=True) + + user1, user2, user3 = user_factory.create_batch(3) + report1_1 = report_factory(user=user1, task=task) + report1_2 = report_factory(user=user1, task=task) + report2 = report_factory(user=user2, task=task) + report3 = report_factory(user=user3, task=task) + + other_task = task_factory() + + url = reverse("report-bulk") + + data = { + "data": { + "type": "report-bulks", + "id": None, + "attributes": {"verified": True, "comment": "some comment"}, + "relationships": {"task": {"data": {"type": "tasks", "id": other_task.id}}}, + } + } + + query_params = ( + "?editable=1" + f"&reviewer={reviewer.id}" + "&id=" + ",".join(str(r.id) for r in [report1_1, report1_2, report2, report3]) + ) + response = internal_employee_client.post(url + query_params, data) + assert response.status_code == status.HTTP_204_NO_CONTENT + + for report in [report1_1, report1_2, report2, report3]: + report.refresh_from_db() + assert report.verified_by == reviewer + assert report.comment == "some comment" + assert report.task == other_task + + # every user received one mail + assert len(mailoutbox) == 3 + assert all(True for mail in mailoutbox if len(mail.to) == 1) + assert set(mail.to[0] for mail in mailoutbox) == set( + user.email for user in [user1, user2, user3] + ) + + +@pytest.mark.parametrize("own_report", [True, False]) +@pytest.mark.parametrize( + "has_attributes,different_attributes,verified,expected", + [ + (True, True, True, True), + (True, True, False, True), + (True, False, True, False), + (False, None, True, False), + (False, None, False, False), + ], +) +def test_report_update_reviewer_notify( + internal_employee_client, + user_factory, + report_factory, + task_factory, + mailoutbox, + own_report, + has_attributes, + different_attributes, + verified, + expected, +): + reviewer = internal_employee_client.user + user = user_factory() + + if own_report: + report = report_factory(user=reviewer, review=True) + else: + report = report_factory(user=user, review=True) + ProjectAssigneeFactory.create( + user=reviewer, project=report.task.project, is_reviewer=True + ) + ProjectAssigneeFactory.create( + user=user, project=report.task.project, is_reviewer=True + ) + new_task = task_factory(project=report.task.project) + task = report.task + TaskAssigneeFactory.create(user=user, is_resource=True, task=task) + TaskAssigneeFactory.create(user=reviewer, is_reviewer=True, task=task) + + data = { + "data": { + "type": "reports", + "id": report.id, + "attributes": {}, + "relationships": {}, + } + } + if has_attributes: + if different_attributes: + data["data"]["attributes"] = {"comment": "foobar", "review": False} + data["data"]["relationships"]["task"] = { + "data": {"id": new_task.id, "type": "tasks"} + } + else: + data["data"]["attributes"] = {"comment": report.comment} + + if verified: + data["data"]["attributes"]["verified"] = verified + + url = reverse("report-detail", args=[report.id]) + + response = internal_employee_client.patch(url, data) + assert response.status_code == status.HTTP_200_OK + + mail_count = 1 if not own_report and expected else 0 + assert len(mailoutbox) == mail_count + + if mail_count: + mail = mailoutbox[0] + assert len(mail.to) == 1 + assert mail.to[0] == user.email + + +def test_report_notify_rendering( + internal_employee_client, + user_factory, + project, + report_factory, + task_factory, + mailoutbox, + snapshot, +): + reviewer = internal_employee_client.user + user = user_factory() + ProjectAssigneeFactory.create(user=reviewer, project=project, is_reviewer=True) + task1, task2, task3 = task_factory.create_batch(3, project=project) + + report1 = report_factory( + user=user, task=task1, comment="original comment", not_billable=False + ) + report2 = report_factory( + user=user, task=task2, comment="some other comment", not_billable=False + ) + report3 = report_factory(user=user, task=task3, comment="foo", not_billable=False) + report4 = report_factory( + user=user, task=task1, comment=report2.comment, not_billable=True + ) + + data = { + "data": { + "type": "report-bulks", + "id": None, + "attributes": {"comment": report2.comment, "not-billable": False}, + "relationships": { + "task": {"data": {"id": report1.task.id, "type": "tasks"}} + }, + } + } + + url = reverse("report-bulk") + + query_params = ( + "?editable=1" + f"&reviewer={reviewer.id}" + "&id=" + ",".join(str(r.id) for r in [report1, report2, report3, report4]) + ) + response = internal_employee_client.post(url + query_params, data) + assert response.status_code == status.HTTP_204_NO_CONTENT + + assert len(mailoutbox) == 1 + snapshot.assert_match(mailoutbox[0].body) + + +@pytest.mark.parametrize( + "report__review,needs_review", [(True, False), (False, True), (True, True)] +) +def test_report_update_bulk_review_and_verified( + superadmin_client, project, task, report, user_factory, needs_review +): + EmploymentFactory.create(user=superadmin_client.user) + data = { + "data": {"type": "report-bulks", "id": None, "attributes": {"verified": True}} + } + + if needs_review: + data["data"]["attributes"]["review"] = True + + url = reverse("report-bulk") + + query_params = f"?id={report.id}" + response = superadmin_client.post(url + query_params, data) + assert response.status_code == status.HTTP_400_BAD_REQUEST + + +def test_report_update_bulk_bill_non_reviewer( + internal_employee_client, + report_factory, +): + report_factory.create(user=internal_employee_client.user) + + url = reverse("report-bulk") + + data = {"data": {"type": "report-bulks", "id": None, "attributes": {"billed": 1}}} + + response = internal_employee_client.post(url + "?editable=1", data) + assert response.status_code == status.HTTP_400_BAD_REQUEST + + +def test_report_update_bulk_bill_reviewer( + internal_employee_client, + report_factory, +): + user = internal_employee_client.user + report = report_factory.create(user=user) + ProjectAssigneeFactory.create( + user=user, project=report.task.project, is_reviewer=True + ) + + url = reverse("report-bulk") + + data = { + "data": { + "type": "report-bulks", + "id": None, + "attributes": {"billed": True}, + } + } + + response = internal_employee_client.post( + url + "?editable=1&reviewer={0}".format(user.id), data + ) + assert response.status_code == status.HTTP_400_BAD_REQUEST + + report.refresh_from_db() + assert not report.billed + + +def test_report_update_bulk_bill_accountant( + internal_employee_client, + report_factory, +): + user = internal_employee_client.user + user.is_accountant = True + user.save() + report = report_factory.create(user=user) + url = reverse("report-bulk") + + data = { + "data": { + "type": "report-bulks", + "id": None, + "attributes": {"billed": True}, + } + } + + response = internal_employee_client.post(url + "?editable=1", data) + assert response.status_code == status.HTTP_204_NO_CONTENT + + report.refresh_from_db() + assert report.billed + + +def test_report_update_billed_user( + internal_employee_client, + report_factory, +): + report = report_factory.create() + + data = { + "data": { + "type": "reports", + "id": report.id, + "attributes": {"billed": 1}, + } + } + + url = reverse("report-detail", args=[report.id]) + + response = internal_employee_client.patch(url, data) + assert response.status_code == status.HTTP_403_FORBIDDEN + + +@pytest.mark.parametrize( + "is_accountant, expected", + [ + (True, status.HTTP_200_OK), + (False, status.HTTP_400_BAD_REQUEST), + ], +) +def test_report_set_billed_by_user( + internal_employee_client, + report_factory, + is_accountant, + expected, +): + """Test that normal user may not bill report.""" + user = internal_employee_client.user + if is_accountant: + user.is_accountant = True + user.save() + report = report_factory.create(user=user) + data = { + "data": { + "type": "reports", + "id": report.id, + "attributes": {"billed": 1}, + } + } + + url = reverse("report-detail", args=[report.id]) + response = internal_employee_client.patch(url, data) + assert response.status_code == expected + + +def test_report_update_billed(internal_employee_client, report_factory, task): + user = internal_employee_client.user + report = report_factory.create(user=user, billed=True) + report.task.project.billed = True + report.task.project.save() + + data = { + "data": { + "type": "reports", + "id": report.id, + "attributes": {"comment": "foobar"}, + } + } + + url = reverse("report-detail", args=[report.id]) + response = internal_employee_client.patch(url, data) + assert response.status_code == status.HTTP_200_OK + + report.refresh_from_db() + assert report.billed + + data = { + "data": { + "type": "reports", + "id": report.id, + "relationships": { + "project": {"data": {"type": "projects", "id": task.project.id}}, + "task": {"data": {"type": "tasks", "id": task.id}}, + }, + } + } + + response = internal_employee_client.patch(url, data) + assert response.status_code == status.HTTP_200_OK + + report.refresh_from_db() + assert not report.billed + + +def test_report_update_bulk_billed(internal_employee_client, report_factory, task): + user = internal_employee_client.user + report = report_factory.create(user=user) + ProjectAssigneeFactory.create( + user=user, project=report.task.project, is_reviewer=True + ) + task.project.billed = True + task.project.save() + + url = reverse("report-bulk") + + data = { + "data": { + "type": "report-bulks", + "id": None, + "relationships": { + "project": {"data": {"type": "projects", "id": task.project.id}}, + "task": {"data": {"type": "tasks", "id": task.id}}, + }, + } + } + + response = internal_employee_client.post( + url + "?editable=1&reviewer={0}".format(user.id), data + ) + assert response.status_code == status.HTTP_204_NO_CONTENT + + report.refresh_from_db() + assert report.billed + + +def test_report_list_external_employee(external_employee_client, report_factory): + user = external_employee_client.user + report = report_factory.create(user=user, duration=timedelta(hours=1)) + TaskAssigneeFactory.create(user=user, task=report.task, is_resource=True) + report_factory.create_batch(4) + url = reverse("report-list") + + response = external_employee_client.get( + url, + data={ + "date": report.date, + "user": user.id, + "task": report.task_id, + "project": report.task.project_id, + "customer": report.task.project.customer_id, + "include": ("user,task,task.project,task.project.customer,verified_by"), + }, + ) + + assert response.status_code == status.HTTP_200_OK + + json = response.json() + assert len(json["data"]) == 1 + assert json["data"][0]["id"] == str(report.id) + assert json["meta"]["total-time"] == "01:00:00" + + +@pytest.mark.parametrize( + "is_assigned, expected, status_code", + [(True, 1, status.HTTP_200_OK), (False, 0, status.HTTP_403_FORBIDDEN)], +) +def test_report_list_no_employment( + auth_client, report_factory, is_assigned, expected, status_code +): + user = auth_client.user + report = report_factory.create(user=user, duration=timedelta(hours=1)) + if is_assigned: + CustomerAssigneeFactory.create( + user=user, is_customer=True, customer=report.task.project.customer + ) + report_factory.create_batch(4) + + url = reverse("report-list") + + response = auth_client.get(url) + assert response.status_code == status_code + + json = response.json() + if expected: + assert len(json["data"]) == expected + assert json["data"][0]["id"] == str(report.id) + assert json["meta"]["total-time"] == "01:00:00" + + +@pytest.mark.parametrize( + "report_owner, reviewer, expected, mail_count, status_code", + [ + (True, True, False, 0, status.HTTP_400_BAD_REQUEST), + (False, True, True, 1, status.HTTP_200_OK), + (True, False, False, 0, status.HTTP_400_BAD_REQUEST), + (False, False, False, 0, status.HTTP_403_FORBIDDEN), + ], +) +def test_report_reject( + internal_employee_client, + report_owner, + report_factory, + reviewer, + expected, + status_code, + mail_count, + mailoutbox, +): + user = internal_employee_client.user + user2 = UserFactory.create() + report = report_factory.create(user=user2 if not report_owner else user) + if reviewer: + ProjectAssigneeFactory.create( + user=user, is_reviewer=True, project=report.task.project + ) + + data = { + "data": { + "type": "reports", + "id": report.id, + "attributes": {"rejected": True}, + } + } + + url = reverse("report-detail", args=[report.id]) + response = internal_employee_client.patch(url, data) + assert response.status_code == status_code + + report.refresh_from_db() + assert report.rejected == expected + + assert len(mailoutbox) == mail_count + + if mail_count: + mail = mailoutbox[0] + assert mail.to[0] == user2.email if not report_owner else user + + +def test_report_update_rejected_owner( + internal_employee_client, report_factory, mailoutbox +): + user = internal_employee_client.user + report = report_factory.create(user=user, rejected=True) + + data = { + "data": { + "type": "reports", + "id": report.id, + "attributes": { + "comment": "foobar", + "rejected": True, + }, + } + } + + url = reverse("report-detail", args=[report.id]) + response = internal_employee_client.patch(url, data) + assert response.status_code == status.HTTP_200_OK + assert len(mailoutbox) == 0 + + +def test_report_reject_multiple_notify( + internal_employee_client, + task, + task_factory, + project, + report_factory, + user_factory, + mailoutbox, +): + reviewer = internal_employee_client.user + ProjectAssigneeFactory.create(user=reviewer, project=project, is_reviewer=True) + + user1, user2, user3 = user_factory.create_batch(3) + report1_1 = report_factory(user=user1, task=task) + report1_2 = report_factory(user=user1, task=task) + report2 = report_factory(user=user2, task=task) + report3 = report_factory(user=user3, task=task) + + url = reverse("report-bulk") + + data = { + "data": { + "type": "report-bulks", + "id": None, + "attributes": {"rejected": True}, + } + } + + query_params = ( + "?editable=1" + f"&reviewer={reviewer.id}" + "&id=" + ",".join(str(r.id) for r in [report1_1, report1_2, report2, report3]) + ) + response = internal_employee_client.post(url + query_params, data) + assert response.status_code == status.HTTP_204_NO_CONTENT + + for report in [report1_1, report1_2, report2, report3]: + report.refresh_from_db() + assert report.rejected + + # every user received one mail + assert len(mailoutbox) == 3 + assert all(True for mail in mailoutbox if len(mail.to) == 1) + assert set(mail.to[0] for mail in mailoutbox) == set( + user.email for user in [user1, user2, user3] + ) + + +def test_report_automatic_unreject(internal_employee_client, report_factory, task): + user = internal_employee_client.user + report = report_factory.create(user=user, rejected=True) + + data = { + "data": { + "type": "reports", + "id": report.id, + "attributes": {"comment": "foo bar"}, + "relationships": { + "project": {"data": {"type": "projects", "id": task.project.id}}, + "task": {"data": {"type": "tasks", "id": task.id}}, + }, + } + } + + url = reverse("report-detail", args=[report.id]) + response = internal_employee_client.patch(url, data) + assert response.status_code == status.HTTP_200_OK + + report.refresh_from_db() + assert not report.rejected + + +def test_report_bulk_automatic_unreject( + internal_employee_client, user_factory, report_factory, task +): + reviewer = internal_employee_client.user + + user = user_factory.create() + + report = report_factory.create(user=user, rejected=True) + ProjectAssigneeFactory.create( + user=reviewer, project=report.task.project, is_reviewer=True + ) + + url = reverse("report-bulk") + + data = { + "data": { + "type": "report-bulks", + "id": None, + "relationships": { + "task": {"data": {"type": "tasks", "id": task.id}}, + }, + } + } + + query_params = f"?editable=1&reviewer={reviewer.id}&id={report.id}" + response = internal_employee_client.post(url + query_params, data) + assert response.status_code == status.HTTP_204_NO_CONTENT + + report.refresh_from_db() + assert not report.rejected + + +@pytest.mark.parametrize( + "is_external, remaining_effort_active, is_superuser, expected", + [ + (True, True, False, status.HTTP_403_FORBIDDEN), + (True, False, False, status.HTTP_403_FORBIDDEN), + (False, True, False, status.HTTP_200_OK), + (False, False, False, status.HTTP_400_BAD_REQUEST), + (False, False, True, status.HTTP_400_BAD_REQUEST), + (False, True, True, status.HTTP_200_OK), + ], +) +def test_report_set_remaining_effort( + auth_client, + is_external, + remaining_effort_active, + expected, + is_superuser, + report_factory, +): + user = auth_client.user + EmploymentFactory.create(user=user, is_external=is_external) + report = report_factory.create(user=user) + + if remaining_effort_active: + report.task.project.remaining_effort_tracking = True + report.task.project.save() + + if is_superuser: + user.is_superuser = True + user.save() + + data = { + "data": { + "type": "reports", + "id": report.id, + "attributes": { + "comment": "foo bar", + "remaining_effort": "01:00:00", + }, + } + } + + url = reverse("report-detail", args=[report.id]) + + response = auth_client.patch(url, data) + assert response.status_code == expected + + +@pytest.mark.parametrize( + "remaining_effort_active, expected", + [ + (True, status.HTTP_201_CREATED), + (False, status.HTTP_400_BAD_REQUEST), + ], +) +def test_report_create_remaining_effort( + internal_employee_client, + report_factory, + project_factory, + task_factory, + remaining_effort_active, + expected, +): + user = internal_employee_client.user + project = project_factory.create( + billed=True, remaining_effort_tracking=remaining_effort_active + ) + task = task_factory.create(project=project) + + data = { + "data": { + "type": "reports", + "id": None, + "attributes": { + "comment": "foo", + "duration": "00:15:00", + "date": "2022-02-01", + "remaining_effort": "01:00:00", + }, + "relationships": { + "task": {"data": {"type": "tasks", "id": task.id}}, + }, + } + } + + url = reverse("report-list") + + response = internal_employee_client.post(url, data) + assert response.status_code == expected + + if expected == status.HTTP_201_CREATED: + json = response.json() + assert json["data"]["relationships"]["user"]["data"]["id"] == str(user.id) + assert json["data"]["relationships"]["task"]["data"]["id"] == str(task.id) + + task.refresh_from_db() + assert task.most_recent_remaining_effort == timedelta(hours=1) + assert task.project.total_remaining_effort == timedelta(hours=1) + + +def test_report_remaining_effort_total( + internal_employee_client, + report_factory, +): + user = internal_employee_client.user + report = report_factory.create(user=user) + task_2 = TaskFactory.create(project=report.task.project) + report_2 = report_factory.create(user=user, task=task_2) + report.task.project.remaining_effort_tracking = True + report.task.project.save() + + data = { + "data": { + "type": "reports", + "id": report.id, + "attributes": { + "remaining_effort": "01:00:00", + }, + } + } + + url = reverse("report-detail", args=[report.id]) + + response = internal_employee_client.patch(url, data) + assert response.status_code == status.HTTP_200_OK + + report.task.refresh_from_db() + assert report.task.most_recent_remaining_effort == timedelta(hours=1) + assert report.task.project.total_remaining_effort == timedelta(hours=1) + + data = { + "data": { + "type": "reports", + "id": report_2.id, + "attributes": { + "remaining_effort": "03:00:00", + }, + } + } + + url = reverse("report-detail", args=[report_2.id]) + + response = internal_employee_client.patch(url, data) + assert response.status_code == status.HTTP_200_OK + + task_2.refresh_from_db() + assert task_2.most_recent_remaining_effort == timedelta(hours=3) + assert task_2.project.total_remaining_effort == timedelta(hours=4) + + +def test_report_remaining_effort_update( + internal_employee_client, + report_factory, +): + user = internal_employee_client.user + report = report_factory.create(user=user, date="2022-02-01") + report_2 = report_factory.create(user=user, task=report.task, date="2022-02-01") + report.task.project.remaining_effort_tracking = True + report.task.project.save() + + data = { + "data": { + "type": "reports", + "id": report.id, + "attributes": { + "remaining_effort": "01:00:00", + }, + } + } + + url = reverse("report-detail", args=[report.id]) + + response = internal_employee_client.patch(url, data) + assert response.status_code == status.HTTP_200_OK + + report.task.refresh_from_db() + assert report.task.most_recent_remaining_effort == timedelta(hours=1) + assert report.task.project.total_remaining_effort == timedelta(hours=1) + + data = { + "data": { + "type": "reports", + "id": report_2.id, + "attributes": { + "remaining_effort": "03:00:00", + }, + } + } + + url = reverse("report-detail", args=[report_2.id]) + + response = internal_employee_client.patch(url, data) + assert response.status_code == status.HTTP_200_OK + + report.task.refresh_from_db() + assert report.task.most_recent_remaining_effort == timedelta(hours=3) + assert report.task.project.total_remaining_effort == timedelta(hours=3) + + data = { + "data": { + "type": "reports", + "id": report.id, + "attributes": { + "comment": "foo bar", + }, + } + } + + url = reverse("report-detail", args=[report.id]) + + response = internal_employee_client.patch(url, data) + assert response.status_code == status.HTTP_200_OK + + report.task.refresh_from_db() + assert report.task.most_recent_remaining_effort == timedelta(hours=3) + assert report.task.project.total_remaining_effort == timedelta(hours=3) diff --git a/backend/timed/tracking/urls.py b/backend/timed/tracking/urls.py new file mode 100644 index 000000000..7eaeb2db2 --- /dev/null +++ b/backend/timed/tracking/urls.py @@ -0,0 +1,15 @@ +"""URL to view mapping for the tracking app.""" + +from django.conf import settings +from rest_framework.routers import SimpleRouter + +from timed.tracking import views + +r = SimpleRouter(trailing_slash=settings.APPEND_SLASH) + +r.register(r"activities", views.ActivityViewSet, "activity") +r.register(r"attendances", views.AttendanceViewSet, "attendance") +r.register(r"reports", views.ReportViewSet, "report") +r.register(r"absences", views.AbsenceViewSet, "absence") + +urlpatterns = r.urls diff --git a/backend/timed/tracking/views.py b/backend/timed/tracking/views.py new file mode 100644 index 000000000..d4e7ef74b --- /dev/null +++ b/backend/timed/tracking/views.py @@ -0,0 +1,397 @@ +"""Viewsets for the tracking app.""" + +from datetime import date + +import django_excel +from django.conf import settings +from django.db.models import Case, CharField, F, Q, Value, When +from django.http import HttpResponseBadRequest +from django.utils.translation import gettext_lazy as _ +from rest_framework import exceptions, status +from rest_framework.decorators import action +from rest_framework.response import Response +from rest_framework.viewsets import ModelViewSet + +from timed.employment.models import Employment, PublicHoliday +from timed.permissions import ( + IsAccountant, + IsAuthenticated, + IsExternal, + IsInternal, + IsNotDelete, + IsNotTransferred, + IsOwner, + IsReadOnly, + IsResource, + IsReviewer, + IsSuperUser, + IsSupervisor, + IsUnverified, +) +from timed.projects.models import CustomerAssignee, Task +from timed.serializers import AggregateObject +from timed.tracking import filters, models, serializers + +from . import tasks + + +class ActivityViewSet(ModelViewSet): + """Activity view set.""" + + serializer_class = serializers.ActivitySerializer + filterset_class = filters.ActivityFilterSet + permission_classes = [ + # users may not change transferred activities + IsAuthenticated & IsInternal & IsNotTransferred + | IsAuthenticated & IsReadOnly + # only external employees with resource role may create not transferred activities + | IsAuthenticated & IsExternal & IsResource & IsNotTransferred + ] + + def get_queryset(self): + """Filter the queryset by the user of the request. + + :return: The filtered activities + :rtype: QuerySet + """ + return models.Activity.objects.select_related( + "task", "user", "task__project", "task__project__customer" + ).filter(user=self.request.user) + + +class AttendanceViewSet(ModelViewSet): + """Attendance view set.""" + + serializer_class = serializers.AttendanceSerializer + filterset_class = filters.AttendanceFilterSet + permission_classes = [ + # superuser may edit all reports but not delete + IsSuperUser & IsNotDelete + # internal employees may change own attendances + | IsAuthenticated & IsInternal + # only external employees with resource role may change own attendances + | IsAuthenticated & IsExternal & IsResource + ] + + def get_queryset(self): + """Filter the queryset by the user of the request. + + :return: The filtered attendances + :rtype: QuerySet + """ + return models.Attendance.objects.select_related("user").filter( + user=self.request.user + ) + + +class ReportViewSet(ModelViewSet): + """Report view set.""" + + serializer_class = serializers.ReportSerializer + filterset_class = filters.ReportFilterSet + queryset = models.Report.objects.select_related( + "task", "user", "task__project", "task__project__customer" + ) + permission_classes = [ + # superuser and accountants may edit all reports but not delete + (IsSuperUser | IsAccountant) & IsNotDelete + # reviewer and supervisor may change reports which aren't verfied but not delete them + | (IsReviewer | IsSupervisor) & IsUnverified & IsNotDelete + # internal employees may only change its own unverified reports + # only external employees with resource role may only change its own unverified reports + | IsOwner & IsUnverified & (IsInternal | (IsExternal & IsResource)) + # all authenticated users may read all reports + | IsAuthenticated & IsReadOnly + ] + ordering = ("date", "id") + ordering_fields = ( + "id", + "date", + "duration", + "task__project__customer__name", + "task__project__name", + "task__name", + "user__username", + "comment", + "verified_by__username", + "review", + "not_billable", + "rejected", + ) + + def get_queryset(self): + """Get filtered reports for external employees.""" + user = self.request.user + queryset = super().get_queryset() + queryset.select_related( + "task", "user", "task__project", "task__project__customer" + ) + + try: + current_employment = Employment.objects.get_at(user=user, date=date.today()) + if not current_employment.is_external: + return queryset + + assigned_tasks = Task.objects.filter( + Q(task_assignees__user=user, task_assignees__is_reviewer=True) + | Q( + project__project_assignees__user=user, + project__project_assignees__is_reviewer=True, + ) + | Q( + project__customer__customer_assignees__user=user, + project__customer__customer_assignees__is_reviewer=True, + ) + ) + queryset = queryset.filter(Q(task__in=assigned_tasks) | Q(user=user)) + return queryset + except Employment.DoesNotExist: + if CustomerAssignee.objects.filter(user=user, is_customer=True).exists(): + return queryset.filter( + Q( + task__project__customer__customer_assignees__user=user, + task__project__customer__customer_assignees__is_customer=True, + ) + ) + raise exceptions.PermissionDenied( + "User has no employment and isn't a customer!" + ) + + def update(self, request, *args, **kwargs): + """Override so we can issue emails on update.""" + + partial = kwargs.get("partial", False) + instance = self.get_object() + serializer = self.get_serializer(instance, data=request.data, partial=partial) + serializer.is_valid(raise_exception=True) + + if request.user != instance.user: + # send a notification only when the user is updating someone else's report + fields = { + key: value + for key, value in serializer.validated_data.items() + # value equal None means do not touch + if value is not None + } + if fields: + tasks.notify_user_changed_report(instance, fields, request.user) + if fields.get("rejected"): + tasks.notify_user_rejected_report(instance, request.user) + + return super().update(request, *args, **kwargs) + + @action( + detail=False, + methods=["get"], + serializer_class=serializers.ReportIntersectionSerializer, + ) + def intersection(self, request): + """ + Get intersection in reports of common report fields. + + Use case is for api caller to know what fields are the same + in a list of reports. This will be mainly used for bulk update. + + This will always return a single resource. + """ + queryset = self.get_queryset() + queryset = self.filter_queryset(queryset) + + # filter params represent main indication of result + # so it can be used as id + params = self.request.query_params.copy() + ignore_params = {"ordering", "page", "page_size", "include"} + for param in ignore_params.intersection(params.keys()): + del params[param] + + data = AggregateObject(queryset=queryset, pk=params.urlencode()) + serializer = self.get_serializer(data) + return Response(data=serializer.data) + + @action( + detail=False, + methods=["post"], + # all users are allowed to bulk update but only on filtered result + permission_classes=[IsAuthenticated], + serializer_class=serializers.ReportBulkSerializer, + ) + def bulk(self, request): + user = request.user + queryset = self.get_queryset() + queryset = self.filter_queryset(queryset) + + serializer = self.get_serializer(data=request.data) + serializer.is_valid(raise_exception=True) + + verified = serializer.validated_data.pop("verified", None) + fields = { + key: value + for key, value in serializer.validated_data.items() + # value equal None means do not touch + if value is not None + } + + editable = request.query_params.get("editable") + if not user.is_superuser and not editable: + raise exceptions.ParseError( + _("Editable filter needs to be set for bulk update") + ) + + if verified is not None: + # only reviewer or superuser may verify reports + # this is enforced when reviewer filter is set to current user + reviewer_id = request.query_params.get("reviewer") + if not user.is_superuser and str(reviewer_id) != str(user.id): + raise exceptions.ParseError( + _("Reviewer filter needs to be set to verifying user") + ) + + fields["verified_by"] = verified and user or None + + if ( + "review" in fields + and fields["review"] + or any(queryset.values_list("review", flat=True)) + ): + raise exceptions.ParseError( + _("Reports can't both be set as `review` and `verified`.") + ) + + if serializer.validated_data.get("billed", None) is not None and not ( + user.is_superuser or user.is_accountant + ): + raise exceptions.ParseError( + _("Only superuser and accountants may bill reports") + ) + + if "task" in fields: + # unreject report if task has changed + fields["rejected"] = False + if fields["task"].project.billed: + fields["billed"] = fields["task"].project.billed + + if fields: + # send notification if report was rejected + if fields.get("rejected"): + tasks.notify_user_rejected_reports(queryset, fields, user) + else: + tasks.notify_user_changed_reports(queryset, fields, user) + queryset.update(**fields) + + return Response(status=status.HTTP_204_NO_CONTENT) + + @action(methods=["get"], detail=False) + def export(self, request): + """Export filtered reports to given file format.""" + queryset = self.get_queryset().select_related( + "task__project__billing_type", + "task__cost_center", + "task__project__cost_center", + ) + queryset = self.filter_queryset(queryset) + queryset = queryset.annotate( + cost_center=Case( + # Task cost center has precedence over project cost center + When( + task__cost_center__isnull=False, then=F("task__cost_center__name") + ), + When( + task__project__cost_center__isnull=False, + then=F("task__project__cost_center__name"), + ), + default=Value(""), + output_field=CharField(), + ) + ) + queryset = queryset.annotate( + billing_type=Case( + When( + task__project__billing_type__isnull=False, + then=F("task__project__billing_type__name"), + ), + default=Value(""), + output_field=CharField(), + ) + ) + if ( + settings.REPORTS_EXPORT_MAX_COUNT > 0 + and queryset.count() > settings.REPORTS_EXPORT_MAX_COUNT + ): + return Response( + _( + "Your request exceeds the maximum allowed entries ({0} > {1})".format( + queryset.count(), settings.REPORTS_EXPORT_MAX_COUNT + ) + ), + status=status.HTTP_400_BAD_REQUEST, + ) + + colnames = [ + "Date", + "Duration", + "Customer", + "Project", + "Task", + "User", + "Comment", + "Billing Type", + "Cost Center", + ] + + content = queryset.values_list( + "date", + "duration", + "task__project__customer__name", + "task__project__name", + "task__name", + "user__username", + "comment", + "billing_type", + "cost_center", + ) + + file_type = request.query_params.get("file_type") + if file_type not in ["csv", "xlsx", "ods"]: + return HttpResponseBadRequest() + + sheet = django_excel.pe.Sheet(content, name="Report", colnames=colnames) + return django_excel.make_response( + sheet, file_type=file_type, file_name="report.%s" % file_type + ) + + +class AbsenceViewSet(ModelViewSet): + """Absence view set.""" + + serializer_class = serializers.AbsenceSerializer + filterset_class = filters.AbsenceFilterSet + permission_classes = [ + # superuser can change all but not delete + IsAuthenticated & IsSuperUser & IsNotDelete + # owner may change all its absences + | IsAuthenticated & IsOwner & IsInternal + # all authenticated users may read filtered result + | IsAuthenticated & IsReadOnly + ] + + def get_queryset(self): + """Get absences only for internal employees. + + User should be able to create an absence on a public holiday if the + public holiday is only on user's previous employment location. + """ + user = self.request.user + if user.is_superuser: + queryset = models.Absence.objects.select_related("absence_type", "user") + return queryset + + queryset = ( + models.Absence.objects.select_related("absence_type", "user") + .filter(Q(user=user) | Q(user__in=user.supervisees.all())) + .exclude( + date__in=PublicHoliday.objects.filter( + location=user.get_active_employment().location + ).values("date") + ) + ) + return queryset diff --git a/backend/timed/urls.py b/backend/timed/urls.py new file mode 100644 index 000000000..1a236034a --- /dev/null +++ b/backend/timed/urls.py @@ -0,0 +1,15 @@ +"""Root URL mapping.""" + +from django.contrib import admin +from django.urls import include, re_path + +urlpatterns = [ + re_path(r"^admin/", admin.site.urls), + re_path(r"^api/v1/", include("timed.employment.urls")), + re_path(r"^api/v1/", include("timed.projects.urls")), + re_path(r"^api/v1/", include("timed.tracking.urls")), + re_path(r"^api/v1/", include("timed.reports.urls")), + re_path(r"^api/v1/", include("timed.subscription.urls")), + re_path(r"^oidc/", include("mozilla_django_oidc.urls")), + re_path(r"^prometheus/", include("django_prometheus.urls")), +] diff --git a/backend/timed/wsgi.py b/backend/timed/wsgi.py new file mode 100644 index 000000000..7e15165e2 --- /dev/null +++ b/backend/timed/wsgi.py @@ -0,0 +1,16 @@ +""" +WSGI config for timed project. + +It exposes the WSGI callable as a module-level variable named ``application``. + +For more information on this file, see +https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/ +""" + +import os + +from django.core.wsgi import get_wsgi_application + +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "timed.settings") + +application = get_wsgi_application() diff --git a/dev-config/keycloak-config.json b/dev-config/keycloak-config.json new file mode 100644 index 000000000..f31c94fc1 --- /dev/null +++ b/dev-config/keycloak-config.json @@ -0,0 +1,1802 @@ +{ + "id" : "timed", + "realm" : "timed", + "notBefore" : 0, + "revokeRefreshToken" : false, + "refreshTokenMaxReuse" : 0, + "accessTokenLifespan" : 300, + "accessTokenLifespanForImplicitFlow" : 900, + "ssoSessionIdleTimeout" : 1800, + "ssoSessionMaxLifespan" : 36000, + "ssoSessionIdleTimeoutRememberMe" : 0, + "ssoSessionMaxLifespanRememberMe" : 0, + "offlineSessionIdleTimeout" : 2592000, + "offlineSessionMaxLifespanEnabled" : false, + "offlineSessionMaxLifespan" : 5184000, + "clientSessionIdleTimeout" : 0, + "clientSessionMaxLifespan" : 0, + "accessCodeLifespan" : 60, + "accessCodeLifespanUserAction" : 300, + "accessCodeLifespanLogin" : 1800, + "actionTokenGeneratedByAdminLifespan" : 43200, + "actionTokenGeneratedByUserLifespan" : 300, + "enabled" : true, + "sslRequired" : "external", + "registrationAllowed" : false, + "registrationEmailAsUsername" : false, + "rememberMe" : false, + "verifyEmail" : false, + "loginWithEmailAllowed" : true, + "duplicateEmailsAllowed" : false, + "resetPasswordAllowed" : false, + "editUsernameAllowed" : false, + "bruteForceProtected" : false, + "permanentLockout" : false, + "maxFailureWaitSeconds" : 900, + "minimumQuickLoginWaitSeconds" : 60, + "waitIncrementSeconds" : 60, + "quickLoginCheckMilliSeconds" : 1000, + "maxDeltaTimeSeconds" : 43200, + "failureFactor" : 30, + "roles" : { + "realm" : [ { + "id" : "9ff0d967-aa79-4bf2-8a90-7bd89f66d73c", + "name" : "offline_access", + "description" : "${role_offline-access}", + "composite" : false, + "clientRole" : false, + "containerId" : "timed", + "attributes" : { } + }, { + "id" : "d99bb30b-f4d2-48f4-a053-706e42c4f7ee", + "name" : "uma_authorization", + "description" : "${role_uma_authorization}", + "composite" : false, + "clientRole" : false, + "containerId" : "timed", + "attributes" : { } + }, { + "id" : "40520b35-6d35-476c-bf26-94dbada179bb", + "name" : "admin", + "composite" : false, + "clientRole" : false, + "containerId" : "timed", + "attributes" : { } + } ], + "client" : { + "realm-management" : [ { + "id" : "57a2fe69-be5f-444f-b65e-6c7f03f8d869", + "name" : "realm-admin", + "description" : "${role_realm-admin}", + "composite" : true, + "composites" : { + "client" : { + "realm-management" : [ "create-client", "query-realms", "manage-events", "view-authorization", "manage-authorization", "view-identity-providers", "query-groups", "view-clients", "manage-identity-providers", "view-realm", "query-clients", "query-users", "manage-clients", "view-events", "manage-users", "view-users", "impersonation", "manage-realm" ] + } + }, + "clientRole" : true, + "containerId" : "ea8482c9-4974-4761-afb0-c092d89b5a0e", + "attributes" : { } + }, { + "id" : "da8f521e-a54b-4870-802c-ea03dc9912b1", + "name" : "create-client", + "description" : "${role_create-client}", + "composite" : false, + "clientRole" : true, + "containerId" : "ea8482c9-4974-4761-afb0-c092d89b5a0e", + "attributes" : { } + }, { + "id" : "b631d90e-1240-45d7-9b80-2e56c476d682", + "name" : "query-realms", + "description" : "${role_query-realms}", + "composite" : false, + "clientRole" : true, + "containerId" : "ea8482c9-4974-4761-afb0-c092d89b5a0e", + "attributes" : { } + }, { + "id" : "d718e12e-eb6b-4876-be82-46d8404955cb", + "name" : "manage-events", + "description" : "${role_manage-events}", + "composite" : false, + "clientRole" : true, + "containerId" : "ea8482c9-4974-4761-afb0-c092d89b5a0e", + "attributes" : { } + }, { + "id" : "a0685af3-f0b8-4cca-ba68-43ac45f4d9a4", + "name" : "view-authorization", + "description" : "${role_view-authorization}", + "composite" : false, + "clientRole" : true, + "containerId" : "ea8482c9-4974-4761-afb0-c092d89b5a0e", + "attributes" : { } + }, { + "id" : "885d9844-528e-4ce5-a3e4-97d772c21f83", + "name" : "manage-authorization", + "description" : "${role_manage-authorization}", + "composite" : false, + "clientRole" : true, + "containerId" : "ea8482c9-4974-4761-afb0-c092d89b5a0e", + "attributes" : { } + }, { + "id" : "6df9ee52-f586-4464-b088-a7a9c1f5728f", + "name" : "view-identity-providers", + "description" : "${role_view-identity-providers}", + "composite" : false, + "clientRole" : true, + "containerId" : "ea8482c9-4974-4761-afb0-c092d89b5a0e", + "attributes" : { } + }, { + "id" : "f0c7d896-ea82-41f6-920d-6b3796e4c749", + "name" : "query-groups", + "description" : "${role_query-groups}", + "composite" : false, + "clientRole" : true, + "containerId" : "ea8482c9-4974-4761-afb0-c092d89b5a0e", + "attributes" : { } + }, { + "id" : "a5670fbb-438d-49b1-b27d-9ddcfe429bea", + "name" : "view-clients", + "description" : "${role_view-clients}", + "composite" : true, + "composites" : { + "client" : { + "realm-management" : [ "query-clients" ] + } + }, + "clientRole" : true, + "containerId" : "ea8482c9-4974-4761-afb0-c092d89b5a0e", + "attributes" : { } + }, { + "id" : "1b54aaa2-6b4d-4743-8967-af332be88f3c", + "name" : "manage-identity-providers", + "description" : "${role_manage-identity-providers}", + "composite" : false, + "clientRole" : true, + "containerId" : "ea8482c9-4974-4761-afb0-c092d89b5a0e", + "attributes" : { } + }, { + "id" : "67b6cc9f-8496-40d0-a20a-05a3956247dc", + "name" : "view-realm", + "description" : "${role_view-realm}", + "composite" : false, + "clientRole" : true, + "containerId" : "ea8482c9-4974-4761-afb0-c092d89b5a0e", + "attributes" : { } + }, { + "id" : "a88cfadc-008c-4be0-ab43-d41e86a477bd", + "name" : "query-clients", + "description" : "${role_query-clients}", + "composite" : false, + "clientRole" : true, + "containerId" : "ea8482c9-4974-4761-afb0-c092d89b5a0e", + "attributes" : { } + }, { + "id" : "3eceb96d-7c30-44c5-b24b-f187f2354a81", + "name" : "query-users", + "description" : "${role_query-users}", + "composite" : false, + "clientRole" : true, + "containerId" : "ea8482c9-4974-4761-afb0-c092d89b5a0e", + "attributes" : { } + }, { + "id" : "ee0d161a-ca6b-41c0-bc5f-649241374909", + "name" : "manage-clients", + "description" : "${role_manage-clients}", + "composite" : false, + "clientRole" : true, + "containerId" : "ea8482c9-4974-4761-afb0-c092d89b5a0e", + "attributes" : { } + }, { + "id" : "1f37d868-c309-4e66-9172-4f79c4717cfa", + "name" : "view-events", + "description" : "${role_view-events}", + "composite" : false, + "clientRole" : true, + "containerId" : "ea8482c9-4974-4761-afb0-c092d89b5a0e", + "attributes" : { } + }, { + "id" : "3bb25834-99d1-42da-a548-e1c2ac345f55", + "name" : "manage-users", + "description" : "${role_manage-users}", + "composite" : false, + "clientRole" : true, + "containerId" : "ea8482c9-4974-4761-afb0-c092d89b5a0e", + "attributes" : { } + }, { + "id" : "eec090c0-32de-4af5-a30c-6caddc24f234", + "name" : "view-users", + "description" : "${role_view-users}", + "composite" : true, + "composites" : { + "client" : { + "realm-management" : [ "query-users", "query-groups" ] + } + }, + "clientRole" : true, + "containerId" : "ea8482c9-4974-4761-afb0-c092d89b5a0e", + "attributes" : { } + }, { + "id" : "978bf5c9-f072-483f-a812-1ac7435c32a3", + "name" : "impersonation", + "description" : "${role_impersonation}", + "composite" : false, + "clientRole" : true, + "containerId" : "ea8482c9-4974-4761-afb0-c092d89b5a0e", + "attributes" : { } + }, { + "id" : "7bde3002-188f-4f9e-a2ae-791594539429", + "name" : "manage-realm", + "description" : "${role_manage-realm}", + "composite" : false, + "clientRole" : true, + "containerId" : "ea8482c9-4974-4761-afb0-c092d89b5a0e", + "attributes" : { } + } ], + "timed-public" : [ ], + "security-admin-console" : [ ], + "admin-cli" : [ ], + "account-console" : [ ], + "broker" : [ { + "id" : "9d18b1f6-81a4-45e8-b80d-a7f413435e35", + "name" : "read-token", + "description" : "${role_read-token}", + "composite" : false, + "clientRole" : true, + "containerId" : "1061a27d-9138-4b16-8d34-5aca38a99881", + "attributes" : { } + } ], + "account" : [ { + "id" : "2e91b70b-71d0-43a4-aff2-d511ecbe336b", + "name" : "manage-consent", + "description" : "${role_manage-consent}", + "composite" : true, + "composites" : { + "client" : { + "account" : [ "view-consent" ] + } + }, + "clientRole" : true, + "containerId" : "fca38486-0dca-4d9f-aebc-d219d641e179", + "attributes" : { } + }, { + "id" : "d4cb3e12-deb9-4d08-998b-80d81d59e32a", + "name" : "view-consent", + "description" : "${role_view-consent}", + "composite" : false, + "clientRole" : true, + "containerId" : "fca38486-0dca-4d9f-aebc-d219d641e179", + "attributes" : { } + }, { + "id" : "ed5043f6-5896-44c8-b5d1-604f3963dde4", + "name" : "view-profile", + "description" : "${role_view-profile}", + "composite" : false, + "clientRole" : true, + "containerId" : "fca38486-0dca-4d9f-aebc-d219d641e179", + "attributes" : { } + }, { + "id" : "4f96be37-f919-49ee-82c5-5bd5b8b45216", + "name" : "view-applications", + "description" : "${role_view-applications}", + "composite" : false, + "clientRole" : true, + "containerId" : "fca38486-0dca-4d9f-aebc-d219d641e179", + "attributes" : { } + }, { + "id" : "a66d557e-5e7b-40c2-9b42-9c92c6f7994a", + "name" : "manage-account", + "description" : "${role_manage-account}", + "composite" : true, + "composites" : { + "client" : { + "account" : [ "manage-account-links" ] + } + }, + "clientRole" : true, + "containerId" : "fca38486-0dca-4d9f-aebc-d219d641e179", + "attributes" : { } + }, { + "id" : "285e9a4b-523c-4455-b59c-8d599f5c0d77", + "name" : "manage-account-links", + "description" : "${role_manage-account-links}", + "composite" : false, + "clientRole" : true, + "containerId" : "fca38486-0dca-4d9f-aebc-d219d641e179", + "attributes" : { } + } ] + } + }, + "groups" : [ { + "id" : "cfd54499-a335-48e7-bb1e-1d3a6a063d95", + "name" : "access-cc", + "path" : "/access-cc", + "attributes" : { }, + "realmRoles" : [ ], + "clientRoles" : { }, + "subGroups" : [ ] + }, { + "id" : "f9cc25c4-04bb-4c95-aa07-fd918534d2fd", + "name" : "adfinis-users", + "path" : "/adfinis-users", + "attributes" : { }, + "realmRoles" : [ ], + "clientRoles" : { }, + "subGroups" : [ ] + }, { + "id" : "b3cf5ffb-e50f-4a3d-ad4a-8003aa8fd8d0", + "name" : "cc-admin", + "path" : "/cc-admin", + "attributes" : { }, + "realmRoles" : [ ], + "clientRoles" : { }, + "subGroups" : [ ] + } ], + "defaultRoles" : [ "offline_access", "uma_authorization" ], + "requiredCredentials" : [ "password" ], + "otpPolicyType" : "totp", + "otpPolicyAlgorithm" : "HmacSHA1", + "otpPolicyInitialCounter" : 0, + "otpPolicyDigits" : 6, + "otpPolicyLookAheadWindow" : 1, + "otpPolicyPeriod" : 30, + "otpSupportedApplications" : [ "FreeOTP", "Google Authenticator" ], + "webAuthnPolicyRpEntityName" : "keycloak", + "webAuthnPolicySignatureAlgorithms" : [ "ES256" ], + "webAuthnPolicyRpId" : "", + "webAuthnPolicyAttestationConveyancePreference" : "not specified", + "webAuthnPolicyAuthenticatorAttachment" : "not specified", + "webAuthnPolicyRequireResidentKey" : "not specified", + "webAuthnPolicyUserVerificationRequirement" : "not specified", + "webAuthnPolicyCreateTimeout" : 0, + "webAuthnPolicyAvoidSameAuthenticatorRegister" : false, + "webAuthnPolicyAcceptableAaguids" : [ ], + "webAuthnPolicyPasswordlessRpEntityName" : "keycloak", + "webAuthnPolicyPasswordlessSignatureAlgorithms" : [ "ES256" ], + "webAuthnPolicyPasswordlessRpId" : "", + "webAuthnPolicyPasswordlessAttestationConveyancePreference" : "not specified", + "webAuthnPolicyPasswordlessAuthenticatorAttachment" : "not specified", + "webAuthnPolicyPasswordlessRequireResidentKey" : "not specified", + "webAuthnPolicyPasswordlessUserVerificationRequirement" : "not specified", + "webAuthnPolicyPasswordlessCreateTimeout" : 0, + "webAuthnPolicyPasswordlessAvoidSameAuthenticatorRegister" : false, + "webAuthnPolicyPasswordlessAcceptableAaguids" : [ ], + "users" : [ { + "id" : "e809412f-1920-45f4-92e1-dfcb72d47d3c", + "createdTimestamp" : 1606226264598, + "username" : "admin", + "enabled" : true, + "totp" : false, + "emailVerified" : true, + "firstName" : "Admin", + "lastName" : "Strator", + "credentials" : [ { + "id" : "42fa428a-0ca4-4951-9a4e-cb632f8e3367", + "type" : "password", + "createdDate" : 1606226276012, + "secretData" : "{\"value\":\"tXOrES4hRfM6P6v3/l3ALU4fvd+ATxb/710U5C90xyMFKPxp3KGl8iY9WS9JG43JjmNoLq7MVyj2sKISi07sgg==\",\"salt\":\"s/muBwPqjaauAXOLhGbq6w==\"}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\"}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "offline_access", "uma_authorization", "admin" ], + "clientRoles" : { + "account" : [ "view-profile", "manage-account" ] + }, + "notBefore" : 0, + "groups" : [ "/cc-admin" ] + }, { + "id" : "f51ce893-a8a0-444c-aa4e-b09f4e8df4dc", + "createdTimestamp" : 1606226312625, + "username" : "axels", + "enabled" : true, + "totp" : false, + "emailVerified" : true, + "firstName" : "Axel", + "lastName" : "Schöni", + "credentials" : [ { + "id" : "ded43644-9163-4a1f-944f-0eb925896507", + "type" : "password", + "createdDate" : 1606226316611, + "secretData" : "{\"value\":\"v2TV5f4ahxtKYZVcPdtXrC3wHIGnK5ULql2kTeZGPB6vMScHrhYCGMFO+RdP1X8cJ+0gu6G4L39Uvfy2rHvmXg==\",\"salt\":\"1fc09zAbeQuUbsz3BYXsOw==\"}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\"}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "offline_access", "uma_authorization" ], + "clientRoles" : { + "account" : [ "view-profile", "manage-account" ] + }, + "notBefore" : 0, + "groups" : [ "/adfinis-users" ] + }, { + "id" : "dfabf742-0eff-4699-8d59-311d96afe7a7", + "createdTimestamp" : 1606226293084, + "username" : "fritzm", + "enabled" : true, + "totp" : false, + "emailVerified" : true, + "firstName" : "Fritz", + "lastName" : "Muster", + "credentials" : [ { + "id" : "a4822286-3490-497e-942b-96afbfc52fee", + "type" : "password", + "createdDate" : 1606226300256, + "secretData" : "{\"value\":\"5jgIFyz3NkHrD7CKYzuskE7IGMZoGiZID0KFDi89BzdEAaxq/OO0vSoq27SbXFutZ+LW14F950Yn/SrV830DYA==\",\"salt\":\"UQqh7+SAI4qX08CE1BsjPQ==\"}", + "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\"}" + } ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "offline_access", "uma_authorization" ], + "clientRoles" : { + "account" : [ "view-profile", "manage-account" ] + }, + "notBefore" : 0, + "groups" : [ "/adfinis-users" ] + }, { + "id" : "fddcc94c-011c-47d8-9b01-0f7e651e2f51", + "createdTimestamp" : 1639071866317, + "username" : "wladimirc", + "enabled" : true, + "totp" : false, + "emailVerified" : false, + "firstName" : "Wladimir", + "lastName" : "Customer", + "credentials" : [ ], + "disableableCredentialTypes" : [ ], + "requiredActions" : [ ], + "realmRoles" : [ "offline_access", "uma_authorization" ], + "clientRoles" : { + "account" : [ "view-profile", "manage-account" ] + }, + "notBefore" : 0, + "groups" : [ "/access-cc" ] + } ], + "scopeMappings" : [ { + "clientScope" : "offline_access", + "roles" : [ "offline_access" ] + } ], + "clientScopeMappings" : { + "account" : [ { + "client" : "account-console", + "roles" : [ "manage-account" ] + } ] + }, + "clients" : [ { + "id" : "fca38486-0dca-4d9f-aebc-d219d641e179", + "clientId" : "account", + "name" : "${client_account}", + "rootUrl" : "${authBaseUrl}", + "baseUrl" : "/realms/timed/account/", + "surrogateAuthRequired" : false, + "enabled" : true, + "alwaysDisplayInConsole" : false, + "clientAuthenticatorType" : "client-secret", + "secret" : "731474de-9346-457a-a514-888887f78683", + "defaultRoles" : [ "view-profile", "manage-account" ], + "redirectUris" : [ "/realms/timed/account/*" ], + "webOrigins" : [ ], + "notBefore" : 0, + "bearerOnly" : false, + "consentRequired" : false, + "standardFlowEnabled" : true, + "implicitFlowEnabled" : false, + "directAccessGrantsEnabled" : false, + "serviceAccountsEnabled" : false, + "publicClient" : false, + "frontchannelLogout" : false, + "protocol" : "openid-connect", + "attributes" : { }, + "authenticationFlowBindingOverrides" : { }, + "fullScopeAllowed" : false, + "nodeReRegistrationTimeout" : 0, + "defaultClientScopes" : [ "web-origins", "role_list", "profile", "roles", "email" ], + "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ] + }, { + "id" : "0953aa7e-fac7-43c9-aba9-83ce83d91c7c", + "clientId" : "account-console", + "name" : "${client_account-console}", + "rootUrl" : "${authBaseUrl}", + "baseUrl" : "/realms/timed/account/", + "surrogateAuthRequired" : false, + "enabled" : true, + "alwaysDisplayInConsole" : false, + "clientAuthenticatorType" : "client-secret", + "secret" : "ec538ba5-bdd1-4f84-918d-90fc5e89874c", + "redirectUris" : [ "/realms/timed/account/*" ], + "webOrigins" : [ ], + "notBefore" : 0, + "bearerOnly" : false, + "consentRequired" : false, + "standardFlowEnabled" : true, + "implicitFlowEnabled" : false, + "directAccessGrantsEnabled" : false, + "serviceAccountsEnabled" : false, + "publicClient" : true, + "frontchannelLogout" : false, + "protocol" : "openid-connect", + "attributes" : { + "pkce.code.challenge.method" : "S256" + }, + "authenticationFlowBindingOverrides" : { }, + "fullScopeAllowed" : false, + "nodeReRegistrationTimeout" : 0, + "protocolMappers" : [ { + "id" : "2cd22dff-9478-416a-99fe-b2b70d18ca72", + "name" : "audience resolve", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-audience-resolve-mapper", + "consentRequired" : false, + "config" : { } + } ], + "defaultClientScopes" : [ "web-origins", "role_list", "profile", "roles", "email" ], + "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ] + }, { + "id" : "63a6baba-3d47-4308-af73-7c763fd31cfd", + "clientId" : "admin-cli", + "name" : "${client_admin-cli}", + "surrogateAuthRequired" : false, + "enabled" : true, + "alwaysDisplayInConsole" : false, + "clientAuthenticatorType" : "client-secret", + "secret" : "97bb38b7-d47c-4d37-88c2-7fed912b1f8b", + "redirectUris" : [ ], + "webOrigins" : [ ], + "notBefore" : 0, + "bearerOnly" : false, + "consentRequired" : false, + "standardFlowEnabled" : false, + "implicitFlowEnabled" : false, + "directAccessGrantsEnabled" : true, + "serviceAccountsEnabled" : false, + "publicClient" : true, + "frontchannelLogout" : false, + "protocol" : "openid-connect", + "attributes" : { }, + "authenticationFlowBindingOverrides" : { }, + "fullScopeAllowed" : false, + "nodeReRegistrationTimeout" : 0, + "defaultClientScopes" : [ "web-origins", "role_list", "profile", "roles", "email" ], + "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ] + }, { + "id" : "1061a27d-9138-4b16-8d34-5aca38a99881", + "clientId" : "broker", + "name" : "${client_broker}", + "surrogateAuthRequired" : false, + "enabled" : true, + "alwaysDisplayInConsole" : false, + "clientAuthenticatorType" : "client-secret", + "secret" : "66d4120f-76dc-49fe-a958-7983be2aeee8", + "redirectUris" : [ ], + "webOrigins" : [ ], + "notBefore" : 0, + "bearerOnly" : false, + "consentRequired" : false, + "standardFlowEnabled" : true, + "implicitFlowEnabled" : false, + "directAccessGrantsEnabled" : false, + "serviceAccountsEnabled" : false, + "publicClient" : false, + "frontchannelLogout" : false, + "protocol" : "openid-connect", + "attributes" : { }, + "authenticationFlowBindingOverrides" : { }, + "fullScopeAllowed" : false, + "nodeReRegistrationTimeout" : 0, + "defaultClientScopes" : [ "web-origins", "role_list", "profile", "roles", "email" ], + "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ] + }, { + "id" : "ea8482c9-4974-4761-afb0-c092d89b5a0e", + "clientId" : "realm-management", + "name" : "${client_realm-management}", + "surrogateAuthRequired" : false, + "enabled" : true, + "alwaysDisplayInConsole" : false, + "clientAuthenticatorType" : "client-secret", + "secret" : "e4205e45-7122-4cb5-a3fd-c63cc58ee0a1", + "redirectUris" : [ ], + "webOrigins" : [ ], + "notBefore" : 0, + "bearerOnly" : true, + "consentRequired" : false, + "standardFlowEnabled" : true, + "implicitFlowEnabled" : false, + "directAccessGrantsEnabled" : false, + "serviceAccountsEnabled" : false, + "publicClient" : false, + "frontchannelLogout" : false, + "protocol" : "openid-connect", + "attributes" : { }, + "authenticationFlowBindingOverrides" : { }, + "fullScopeAllowed" : false, + "nodeReRegistrationTimeout" : 0, + "defaultClientScopes" : [ "web-origins", "role_list", "profile", "roles", "email" ], + "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ] + }, { + "id" : "aa1b6e9c-92fe-426b-9eb0-23041d027c2d", + "clientId" : "security-admin-console", + "name" : "${client_security-admin-console}", + "rootUrl" : "${authAdminUrl}", + "baseUrl" : "/admin/timed/console/", + "surrogateAuthRequired" : false, + "enabled" : true, + "alwaysDisplayInConsole" : false, + "clientAuthenticatorType" : "client-secret", + "secret" : "99ae8d54-620c-4bfb-9447-62dbbce5786b", + "redirectUris" : [ "/admin/timed/console/*" ], + "webOrigins" : [ "+" ], + "notBefore" : 0, + "bearerOnly" : false, + "consentRequired" : false, + "standardFlowEnabled" : true, + "implicitFlowEnabled" : false, + "directAccessGrantsEnabled" : false, + "serviceAccountsEnabled" : false, + "publicClient" : true, + "frontchannelLogout" : false, + "protocol" : "openid-connect", + "attributes" : { + "pkce.code.challenge.method" : "S256" + }, + "authenticationFlowBindingOverrides" : { }, + "fullScopeAllowed" : false, + "nodeReRegistrationTimeout" : 0, + "protocolMappers" : [ { + "id" : "09f41dd1-e9f6-44eb-b847-0532ea9ea522", + "name" : "locale", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "locale", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "locale", + "jsonType.label" : "String" + } + } ], + "defaultClientScopes" : [ "web-origins", "role_list", "profile", "roles", "email" ], + "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ] + }, { + "id" : "30472c69-7906-44be-acbc-619d1cb7d183", + "clientId" : "timed-public", + "rootUrl" : "http://timed.local", + "adminUrl" : "http://timed.local", + "surrogateAuthRequired" : false, + "enabled" : true, + "alwaysDisplayInConsole" : false, + "clientAuthenticatorType" : "client-secret", + "secret" : "bde8e0d9-c4f8-4ab6-b1db-4724b85e8db0", + "redirectUris" : [ "http://timed.local/*", "http://localhost/*" ], + "webOrigins" : [ "*" ], + "notBefore" : 0, + "bearerOnly" : false, + "consentRequired" : false, + "standardFlowEnabled" : true, + "implicitFlowEnabled" : false, + "directAccessGrantsEnabled" : false, + "serviceAccountsEnabled" : false, + "publicClient" : true, + "frontchannelLogout" : false, + "protocol" : "openid-connect", + "attributes" : { + "saml.assertion.signature" : "false", + "saml.force.post.binding" : "false", + "saml.multivalued.roles" : "false", + "saml.encrypt" : "false", + "saml.server.signature" : "false", + "saml.server.signature.keyinfo.ext" : "false", + "exclude.session.state.from.auth.response" : "false", + "saml_force_name_id_format" : "false", + "saml.client.signature" : "false", + "tls.client.certificate.bound.access.tokens" : "false", + "saml.authnstatement" : "false", + "display.on.consent.screen" : "false", + "saml.onetimeuse.condition" : "false" + }, + "authenticationFlowBindingOverrides" : { }, + "fullScopeAllowed" : true, + "nodeReRegistrationTimeout" : -1, + "protocolMappers" : [ { + "id" : "72b658f0-da69-4e56-bf2a-7813e570aba8", + "name" : "Groups", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-group-membership-mapper", + "consentRequired" : false, + "config" : { + "full.path" : "true", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "groups", + "userinfo.token.claim" : "true" + } + } ], + "defaultClientScopes" : [ "web-origins", "role_list", "profile", "roles", "email" ], + "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ] + } ], + "clientScopes" : [ { + "id" : "430142db-2fac-4c47-a4d1-0893d0918da4", + "name" : "address", + "description" : "OpenID Connect built-in scope: address", + "protocol" : "openid-connect", + "attributes" : { + "include.in.token.scope" : "true", + "display.on.consent.screen" : "true", + "consent.screen.text" : "${addressScopeConsentText}" + }, + "protocolMappers" : [ { + "id" : "b964306d-914a-4442-8445-f3cc43bf4ef4", + "name" : "address", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-address-mapper", + "consentRequired" : false, + "config" : { + "user.attribute.formatted" : "formatted", + "user.attribute.country" : "country", + "user.attribute.postal_code" : "postal_code", + "userinfo.token.claim" : "true", + "user.attribute.street" : "street", + "id.token.claim" : "true", + "user.attribute.region" : "region", + "access.token.claim" : "true", + "user.attribute.locality" : "locality" + } + } ] + }, { + "id" : "b86d691d-c6b7-45b2-993f-c4f3c6ed9f21", + "name" : "email", + "description" : "OpenID Connect built-in scope: email", + "protocol" : "openid-connect", + "attributes" : { + "include.in.token.scope" : "true", + "display.on.consent.screen" : "true", + "consent.screen.text" : "${emailScopeConsentText}" + }, + "protocolMappers" : [ { + "id" : "caf47f60-07c5-4975-a2e2-709aa6aee27e", + "name" : "email verified", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-property-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "emailVerified", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "email_verified", + "jsonType.label" : "boolean" + } + }, { + "id" : "6521805d-46b1-4fb9-988b-ae4d13e1b8e5", + "name" : "email", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-property-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "email", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "email", + "jsonType.label" : "String" + } + } ] + }, { + "id" : "a28910e0-e6b4-4d2b-9fa0-bb6b2c6ea78a", + "name" : "microprofile-jwt", + "description" : "Microprofile - JWT built-in scope", + "protocol" : "openid-connect", + "attributes" : { + "include.in.token.scope" : "true", + "display.on.consent.screen" : "false" + }, + "protocolMappers" : [ { + "id" : "d0b1f606-d23e-4762-9d29-24a1f060c879", + "name" : "groups", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-realm-role-mapper", + "consentRequired" : false, + "config" : { + "multivalued" : "true", + "userinfo.token.claim" : "true", + "user.attribute" : "foo", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "groups", + "jsonType.label" : "String" + } + }, { + "id" : "349b94d1-e9d9-464e-93da-fdc6c715037f", + "name" : "upn", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-property-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "username", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "upn", + "jsonType.label" : "String" + } + } ] + }, { + "id" : "60b03eb6-b050-4454-a337-f8928575ee7d", + "name" : "offline_access", + "description" : "OpenID Connect built-in scope: offline_access", + "protocol" : "openid-connect", + "attributes" : { + "consent.screen.text" : "${offlineAccessScopeConsentText}", + "display.on.consent.screen" : "true" + } + }, { + "id" : "fca7028e-0e94-4c09-988e-e661d46482b9", + "name" : "phone", + "description" : "OpenID Connect built-in scope: phone", + "protocol" : "openid-connect", + "attributes" : { + "include.in.token.scope" : "true", + "display.on.consent.screen" : "true", + "consent.screen.text" : "${phoneScopeConsentText}" + }, + "protocolMappers" : [ { + "id" : "a1feecd1-a101-444a-b8d1-c4a459e7ea6d", + "name" : "phone number", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "phoneNumber", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "phone_number", + "jsonType.label" : "String" + } + }, { + "id" : "301418c7-ab4e-4a8b-a13f-331e1ab094f3", + "name" : "phone number verified", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "phoneNumberVerified", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "phone_number_verified", + "jsonType.label" : "boolean" + } + } ] + }, { + "id" : "d8c1471f-1e13-4e37-9639-40257ca784cc", + "name" : "profile", + "description" : "OpenID Connect built-in scope: profile", + "protocol" : "openid-connect", + "attributes" : { + "include.in.token.scope" : "true", + "display.on.consent.screen" : "true", + "consent.screen.text" : "${profileScopeConsentText}" + }, + "protocolMappers" : [ { + "id" : "8e2378db-ebe8-4141-af99-a7d8cbfb31bd", + "name" : "username", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-property-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "username", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "preferred_username", + "jsonType.label" : "String" + } + }, { + "id" : "9aea5061-7e49-4a9f-9dd6-b0ec6aa31a33", + "name" : "nickname", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "nickname", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "nickname", + "jsonType.label" : "String" + } + }, { + "id" : "63233679-5038-434f-9cf1-1b4cac25eb9d", + "name" : "family name", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-property-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "lastName", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "family_name", + "jsonType.label" : "String" + } + }, { + "id" : "0e4034e3-4bf8-45a6-9b03-5fe58406a235", + "name" : "middle name", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "middleName", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "middle_name", + "jsonType.label" : "String" + } + }, { + "id" : "db51ecec-798e-4407-a750-2eba3c74bf5c", + "name" : "gender", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "gender", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "gender", + "jsonType.label" : "String" + } + }, { + "id" : "f66647a8-cc03-4936-a4c8-a2d43f2fb605", + "name" : "locale", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "locale", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "locale", + "jsonType.label" : "String" + } + }, { + "id" : "a581ee00-615b-4ae2-9123-14889580a95b", + "name" : "profile", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "profile", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "profile", + "jsonType.label" : "String" + } + }, { + "id" : "30bec6a9-f8c3-4a50-9a1b-d07cae3df8d8", + "name" : "picture", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "picture", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "picture", + "jsonType.label" : "String" + } + }, { + "id" : "16ac837d-ff47-4052-9a68-8ed6b399ec2b", + "name" : "birthdate", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "birthdate", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "birthdate", + "jsonType.label" : "String" + } + }, { + "id" : "82c27e74-153a-47b2-8148-71af806b1f99", + "name" : "updated at", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "updatedAt", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "updated_at", + "jsonType.label" : "String" + } + }, { + "id" : "83701e99-d46b-4ddb-9e87-696d41a97984", + "name" : "website", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "website", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "website", + "jsonType.label" : "String" + } + }, { + "id" : "69977a6d-5238-4385-a922-0c1b99f1c15a", + "name" : "zoneinfo", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-attribute-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "zoneinfo", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "zoneinfo", + "jsonType.label" : "String" + } + }, { + "id" : "aa8e440f-7812-408f-b55c-a154337aae2d", + "name" : "full name", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-full-name-mapper", + "consentRequired" : false, + "config" : { + "id.token.claim" : "true", + "access.token.claim" : "true", + "userinfo.token.claim" : "true" + } + }, { + "id" : "84c655de-34d7-4d58-a63c-9c78475da136", + "name" : "given name", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-property-mapper", + "consentRequired" : false, + "config" : { + "userinfo.token.claim" : "true", + "user.attribute" : "firstName", + "id.token.claim" : "true", + "access.token.claim" : "true", + "claim.name" : "given_name", + "jsonType.label" : "String" + } + } ] + }, { + "id" : "8e350b44-837b-42f1-9cf2-445f282ea9da", + "name" : "role_list", + "description" : "SAML role list", + "protocol" : "saml", + "attributes" : { + "consent.screen.text" : "${samlRoleListScopeConsentText}", + "display.on.consent.screen" : "true" + }, + "protocolMappers" : [ { + "id" : "78f6f58f-4cd6-4635-ba1c-e8f7c9ec29f1", + "name" : "role list", + "protocol" : "saml", + "protocolMapper" : "saml-role-list-mapper", + "consentRequired" : false, + "config" : { + "single" : "false", + "attribute.nameformat" : "Basic", + "attribute.name" : "Role" + } + } ] + }, { + "id" : "93d7c992-6651-4300-9b85-832e8929bce4", + "name" : "roles", + "description" : "OpenID Connect scope for add user roles to the access token", + "protocol" : "openid-connect", + "attributes" : { + "include.in.token.scope" : "false", + "display.on.consent.screen" : "true", + "consent.screen.text" : "${rolesScopeConsentText}" + }, + "protocolMappers" : [ { + "id" : "e7134f4a-d194-4dfc-bf3e-40de3b3bf087", + "name" : "client roles", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-client-role-mapper", + "consentRequired" : false, + "config" : { + "user.attribute" : "foo", + "access.token.claim" : "true", + "claim.name" : "resource_access.${client_id}.roles", + "jsonType.label" : "String", + "multivalued" : "true" + } + }, { + "id" : "936536a3-9b53-44a5-ad90-1e82871c93e8", + "name" : "audience resolve", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-audience-resolve-mapper", + "consentRequired" : false, + "config" : { } + }, { + "id" : "275e146a-73cc-4930-a752-2bd18764f65b", + "name" : "realm roles", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-usermodel-realm-role-mapper", + "consentRequired" : false, + "config" : { + "user.attribute" : "foo", + "access.token.claim" : "true", + "claim.name" : "realm_access.roles", + "jsonType.label" : "String", + "multivalued" : "true" + } + } ] + }, { + "id" : "17745df8-9dda-4ded-b2e1-10c10892a341", + "name" : "web-origins", + "description" : "OpenID Connect scope for add allowed web origins to the access token", + "protocol" : "openid-connect", + "attributes" : { + "include.in.token.scope" : "false", + "display.on.consent.screen" : "false", + "consent.screen.text" : "" + }, + "protocolMappers" : [ { + "id" : "15a58940-f2e8-4032-bdf0-f5d586fa86e1", + "name" : "allowed web origins", + "protocol" : "openid-connect", + "protocolMapper" : "oidc-allowed-origins-mapper", + "consentRequired" : false, + "config" : { } + } ] + } ], + "defaultDefaultClientScopes" : [ "role_list", "profile", "email", "roles", "web-origins" ], + "defaultOptionalClientScopes" : [ "offline_access", "address", "phone", "microprofile-jwt" ], + "browserSecurityHeaders" : { + "contentSecurityPolicyReportOnly" : "", + "xContentTypeOptions" : "nosniff", + "xRobotsTag" : "none", + "xFrameOptions" : "SAMEORIGIN", + "contentSecurityPolicy" : "frame-src 'self'; frame-ancestors 'self'; object-src 'none';", + "xXSSProtection" : "1; mode=block", + "strictTransportSecurity" : "max-age=31536000; includeSubDomains" + }, + "smtpServer" : { }, + "eventsEnabled" : false, + "eventsListeners" : [ "jboss-logging" ], + "enabledEventTypes" : [ ], + "adminEventsEnabled" : false, + "adminEventsDetailsEnabled" : false, + "components" : { + "org.keycloak.services.clientregistration.policy.ClientRegistrationPolicy" : [ { + "id" : "0c68916d-29e5-4bc3-8f34-66d3e074d0ba", + "name" : "Trusted Hosts", + "providerId" : "trusted-hosts", + "subType" : "anonymous", + "subComponents" : { }, + "config" : { + "host-sending-registration-request-must-match" : [ "true" ], + "client-uris-must-match" : [ "true" ] + } + }, { + "id" : "01017e9d-ccee-4682-a43d-dedc779456df", + "name" : "Allowed Protocol Mapper Types", + "providerId" : "allowed-protocol-mappers", + "subType" : "authenticated", + "subComponents" : { }, + "config" : { + "allowed-protocol-mapper-types" : [ "oidc-usermodel-attribute-mapper", "oidc-full-name-mapper", "saml-user-property-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-user-attribute-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper", "oidc-address-mapper" ] + } + }, { + "id" : "04a61e10-fc46-4e0c-ad0a-3eec163d6e24", + "name" : "Allowed Client Scopes", + "providerId" : "allowed-client-templates", + "subType" : "anonymous", + "subComponents" : { }, + "config" : { + "allow-default-scopes" : [ "true" ] + } + }, { + "id" : "aae5ad0e-f00b-4ebf-8b7f-dac3e020d5d5", + "name" : "Allowed Client Scopes", + "providerId" : "allowed-client-templates", + "subType" : "authenticated", + "subComponents" : { }, + "config" : { + "allow-default-scopes" : [ "true" ] + } + }, { + "id" : "cae2d534-9164-4760-a10d-19cfdf36ac0d", + "name" : "Consent Required", + "providerId" : "consent-required", + "subType" : "anonymous", + "subComponents" : { }, + "config" : { } + }, { + "id" : "cb3d8398-9fda-423a-bc1a-9504ead9d10f", + "name" : "Allowed Protocol Mapper Types", + "providerId" : "allowed-protocol-mappers", + "subType" : "anonymous", + "subComponents" : { }, + "config" : { + "allowed-protocol-mapper-types" : [ "oidc-usermodel-property-mapper", "oidc-usermodel-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-role-list-mapper", "saml-user-property-mapper", "saml-user-attribute-mapper", "oidc-full-name-mapper", "oidc-address-mapper" ] + } + }, { + "id" : "65750361-be2e-41cc-ae63-1abdda285720", + "name" : "Full Scope Disabled", + "providerId" : "scope", + "subType" : "anonymous", + "subComponents" : { }, + "config" : { } + }, { + "id" : "f1669b22-e29d-4eda-97b3-f4ab3ec57673", + "name" : "Max Clients Limit", + "providerId" : "max-clients", + "subType" : "anonymous", + "subComponents" : { }, + "config" : { + "max-clients" : [ "200" ] + } + } ], + "org.keycloak.keys.KeyProvider" : [ { + "id" : "ffd05084-ba7b-4d5f-bec3-9a2c9cabb7eb", + "name" : "rsa-generated", + "providerId" : "rsa-generated", + "subComponents" : { }, + "config" : { + "privateKey" : [ "MIIEowIBAAKCAQEAmKi1AOCgn2rmzdroi54yETw7vo7sVFoQsKYf3L68TgLMjHxsjZLj7o6aVi9wv3rZ0krm7JBD3nxcGSPyEkBGYwaGzO0qxSnjkEfqRFxOODjWcQglcfycY3JyQE8Uz2bJ45BQ3lebviP6AEF3RW0xmuiQ6BKr26sK8iRa7ivZH2SbLsKMVlR/7kCHNGnCnCqWXuCmMMItGT0eqclbnT0BBKBN6aTou7Nh95Uj3Iwh9EfPUIQjnrRK9YOCdh3mjvuadojiPWUvRkVOOGg9yBU32J/WKJUbO4qp5VsJGkSwBPgN5rUa0np1vP2WwDihPIFbQMUqm+ZBOOCbhbYMx4KzXwIDAQABAoIBAQCOdNi74eJiAZsyPHbHWy+zn7bM44isSoPKpKuVDjSgw8Hn03BlSM8E3fQuOwUG2niL4jPOS+3Zn8k9+Ko719kXLY77itJfvPBLwqBdfJnNo1SRlB2FWksCDlmJo4Jy7KO3hQPCCJUggWgZdv37PqOMwDwBJPNVAS8suTpViXuK66EcDsB2m4R+rRXqVXz2w20CSCT1zathxEIQsBBxKR4lJHBgCE6GDwGf7SZGIxLgQhnnYUib3rSh3RyHoexPwPUjhQmJIVPWK1krRwpW93QXGL0wWXAvROyM0kg3Qd8evqqfkPHtO+zqnYa632l93DhgeESykTImxTykPcHxz2kJAoGBAM2EXe4CiLGLbjmYxatXOpQmI8rmsskvL6FE5tjYHh2XF57U3OoNhdHyU0H7Qz8z3mRj2irwRHG6QhjH+yU1TSWgiaPLc9rZ7PE4tRCOkG1c9vjMdvexOrXs16FUY1xWXMwVhvKlZOO3t1D7yzQvzibYyrkvHST61u+bf+fbtIW1AoGBAL4ocN1HPhcro9DTyiK/AJgntnwNA4jv6QwzvK+hPqd+DJoVHhBghbHhhdpZKiPRC/2nCLuiBSNmXzG2Awapn6JoBely319InxvKrPZOFcvOxKVHw1XDtrkUaL6yM6EgvLsHU8yR8Ov6gaLdg2NbpfA+VXL1KxvVDois8s/+hgFDAoGAWMiOK3w8wTaS757oBhUw4T94xvbS1cbktK6na5YxrGbRdXRP22zsGr6s6Rw6+NrXgFcCsPoLF3Z3h20dOf3EzjSEQZZq/miWy77LudNc4WH/74uk+Ww/CMjAfpmOMx28CQ5jtf9tjlKXhwy/xFPCo1WUflu0I32ZzPlIUEnBuuECgYAhqeMhKUWSsIUVqQi10f528UDbASrJCT/GizoyFWeUGzp75JUn7Q5+CSC7IOHW6WEoDHP9U5d5Rtw/Xqt2eHzsMWIqi82DfsW8E8s+51/wbrBdWjD4c+dbKIPKjp2ZPsRqj8eEBaoS/IwKmxBxfH4J498YtNJm4Pbrt0JdFAABJQKBgHFqGu3a5cCbcuo0wDPyvibq1BcFVQSgXHBhztu8LPRHTo3BzACrVEN3MX3H+uzafb0YHDb/tpPYw/RWC8luE1CrKSr9l/qG6r3nNaTnG7rxj2dRffsyXuUwSvc+DCu1aTR2Zk/uMoH1pr66cM5YjwVzQcuPA820+1BXyFsyGDy9" ], + "certificate" : [ "MIICmTCCAYECBgFyVvIFkjANBgkqhkiG9w0BAQsFADAQMQ4wDAYDVQQDDAV0aW1lZDAeFw0yMDA1MjcxNjIxNDFaFw0zMDA1MjcxNjIzMjFaMBAxDjAMBgNVBAMMBXRpbWVkMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAmKi1AOCgn2rmzdroi54yETw7vo7sVFoQsKYf3L68TgLMjHxsjZLj7o6aVi9wv3rZ0krm7JBD3nxcGSPyEkBGYwaGzO0qxSnjkEfqRFxOODjWcQglcfycY3JyQE8Uz2bJ45BQ3lebviP6AEF3RW0xmuiQ6BKr26sK8iRa7ivZH2SbLsKMVlR/7kCHNGnCnCqWXuCmMMItGT0eqclbnT0BBKBN6aTou7Nh95Uj3Iwh9EfPUIQjnrRK9YOCdh3mjvuadojiPWUvRkVOOGg9yBU32J/WKJUbO4qp5VsJGkSwBPgN5rUa0np1vP2WwDihPIFbQMUqm+ZBOOCbhbYMx4KzXwIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQA/52H5X2MAH6aw445RbeU6fVYnKJu4hBkcIGHPjKR++Gq54M6bcbnNKNunhVbGZUqdPHX4+ktnQSZauq2hd9HRcezhL3OlmtEnLNW8BoFPaB11gmdjOiVQGm6iJsSJGxrrO7q3YzY+IB/ZTZlWmuOGnUDprFxDv0LR3M8X0ls0ygmw4/CmXZ6Fhg43Ey27ZArlSmzohAq8YGpV8HEcfQFp/h6+B0hgMbufHXvXOSF3Rj7Es1XXrusOhTGPEVv3qC4AaSHSjVVk8C18gFm5hpUjwN7O5u/lSov7WV5iSJMcFnSOxIv9+CboMQehtBpIvczEmRDE+r4/hq5mlpnc/ba9" ], + "priority" : [ "100" ] + } + }, { + "id" : "6fb96b2c-f93d-47c0-bd8b-5e9568f71a43", + "name" : "hmac-generated", + "providerId" : "hmac-generated", + "subComponents" : { }, + "config" : { + "kid" : [ "14d96e27-1ef3-4c48-bbc3-95968353669f" ], + "secret" : [ "HjQ76-HhFsIZkDoEkmVxlVYCoXwFysEsmhK3LsyyMaI9FVyuc0Tb4kYuP2f5Pz--NYxPcvJr3Q8M8OwN9kHSTw" ], + "priority" : [ "100" ], + "algorithm" : [ "HS256" ] + } + }, { + "id" : "6eba3413-03c0-4359-8b90-471f2628550e", + "name" : "aes-generated", + "providerId" : "aes-generated", + "subComponents" : { }, + "config" : { + "kid" : [ "e99ccacc-1cf8-42e3-ac6a-23553f29efd6" ], + "secret" : [ "FpK8RqyaSzxuyi83SsilRA" ], + "priority" : [ "100" ] + } + } ] + }, + "internationalizationEnabled" : false, + "supportedLocales" : [ ], + "authenticationFlows" : [ { + "id" : "7d365b56-6fda-4795-8db1-ca66814fbbec", + "alias" : "Account verification options", + "description" : "Method with which to verity the existing account", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "idp-email-verification", + "requirement" : "ALTERNATIVE", + "priority" : 10, + "userSetupAllowed" : false, + "autheticatorFlow" : false + }, { + "requirement" : "ALTERNATIVE", + "priority" : 20, + "flowAlias" : "Verify Existing Account by Re-authentication", + "userSetupAllowed" : false, + "autheticatorFlow" : true + } ] + }, { + "id" : "19ea7c3b-f0d6-4864-a241-e031459ce42c", + "alias" : "Authentication Options", + "description" : "Authentication options.", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "basic-auth", + "requirement" : "REQUIRED", + "priority" : 10, + "userSetupAllowed" : false, + "autheticatorFlow" : false + }, { + "authenticator" : "basic-auth-otp", + "requirement" : "DISABLED", + "priority" : 20, + "userSetupAllowed" : false, + "autheticatorFlow" : false + }, { + "authenticator" : "auth-spnego", + "requirement" : "DISABLED", + "priority" : 30, + "userSetupAllowed" : false, + "autheticatorFlow" : false + } ] + }, { + "id" : "93775a29-9b62-4fc8-8287-4431102cd3d7", + "alias" : "Browser - Conditional OTP", + "description" : "Flow to determine if the OTP is required for the authentication", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "conditional-user-configured", + "requirement" : "REQUIRED", + "priority" : 10, + "userSetupAllowed" : false, + "autheticatorFlow" : false + }, { + "authenticator" : "auth-otp-form", + "requirement" : "REQUIRED", + "priority" : 20, + "userSetupAllowed" : false, + "autheticatorFlow" : false + } ] + }, { + "id" : "ca658338-b3c5-4380-a012-b1c460bc3180", + "alias" : "Direct Grant - Conditional OTP", + "description" : "Flow to determine if the OTP is required for the authentication", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "conditional-user-configured", + "requirement" : "REQUIRED", + "priority" : 10, + "userSetupAllowed" : false, + "autheticatorFlow" : false + }, { + "authenticator" : "direct-grant-validate-otp", + "requirement" : "REQUIRED", + "priority" : 20, + "userSetupAllowed" : false, + "autheticatorFlow" : false + } ] + }, { + "id" : "0abce959-cb79-4717-a9d6-cc9894b6c279", + "alias" : "First broker login - Conditional OTP", + "description" : "Flow to determine if the OTP is required for the authentication", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "conditional-user-configured", + "requirement" : "REQUIRED", + "priority" : 10, + "userSetupAllowed" : false, + "autheticatorFlow" : false + }, { + "authenticator" : "auth-otp-form", + "requirement" : "REQUIRED", + "priority" : 20, + "userSetupAllowed" : false, + "autheticatorFlow" : false + } ] + }, { + "id" : "8b5547f7-6077-4628-9886-f9cc7dfae286", + "alias" : "Handle Existing Account", + "description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "idp-confirm-link", + "requirement" : "REQUIRED", + "priority" : 10, + "userSetupAllowed" : false, + "autheticatorFlow" : false + }, { + "requirement" : "REQUIRED", + "priority" : 20, + "flowAlias" : "Account verification options", + "userSetupAllowed" : false, + "autheticatorFlow" : true + } ] + }, { + "id" : "3be9a096-a01c-483a-a1e0-26cce94b0408", + "alias" : "Reset - Conditional OTP", + "description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "conditional-user-configured", + "requirement" : "REQUIRED", + "priority" : 10, + "userSetupAllowed" : false, + "autheticatorFlow" : false + }, { + "authenticator" : "reset-otp", + "requirement" : "REQUIRED", + "priority" : 20, + "userSetupAllowed" : false, + "autheticatorFlow" : false + } ] + }, { + "id" : "30ce88d5-6c70-4619-a341-b31ec4d7404d", + "alias" : "User creation or linking", + "description" : "Flow for the existing/non-existing user alternatives", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticatorConfig" : "create unique user config", + "authenticator" : "idp-create-user-if-unique", + "requirement" : "ALTERNATIVE", + "priority" : 10, + "userSetupAllowed" : false, + "autheticatorFlow" : false + }, { + "requirement" : "ALTERNATIVE", + "priority" : 20, + "flowAlias" : "Handle Existing Account", + "userSetupAllowed" : false, + "autheticatorFlow" : true + } ] + }, { + "id" : "ae7b80e2-92a2-439e-a876-ab54cbafdcd1", + "alias" : "Verify Existing Account by Re-authentication", + "description" : "Reauthentication of existing account", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "idp-username-password-form", + "requirement" : "REQUIRED", + "priority" : 10, + "userSetupAllowed" : false, + "autheticatorFlow" : false + }, { + "requirement" : "CONDITIONAL", + "priority" : 20, + "flowAlias" : "First broker login - Conditional OTP", + "userSetupAllowed" : false, + "autheticatorFlow" : true + } ] + }, { + "id" : "3d32657d-a96c-493b-bd88-6eba457755cc", + "alias" : "browser", + "description" : "browser based authentication", + "providerId" : "basic-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "auth-cookie", + "requirement" : "ALTERNATIVE", + "priority" : 10, + "userSetupAllowed" : false, + "autheticatorFlow" : false + }, { + "authenticator" : "auth-spnego", + "requirement" : "DISABLED", + "priority" : 20, + "userSetupAllowed" : false, + "autheticatorFlow" : false + }, { + "authenticator" : "identity-provider-redirector", + "requirement" : "ALTERNATIVE", + "priority" : 25, + "userSetupAllowed" : false, + "autheticatorFlow" : false + }, { + "requirement" : "ALTERNATIVE", + "priority" : 30, + "flowAlias" : "forms", + "userSetupAllowed" : false, + "autheticatorFlow" : true + } ] + }, { + "id" : "efd36136-a562-44a2-993c-4cb2b795df5f", + "alias" : "clients", + "description" : "Base authentication for clients", + "providerId" : "client-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "client-secret", + "requirement" : "ALTERNATIVE", + "priority" : 10, + "userSetupAllowed" : false, + "autheticatorFlow" : false + }, { + "authenticator" : "client-jwt", + "requirement" : "ALTERNATIVE", + "priority" : 20, + "userSetupAllowed" : false, + "autheticatorFlow" : false + }, { + "authenticator" : "client-secret-jwt", + "requirement" : "ALTERNATIVE", + "priority" : 30, + "userSetupAllowed" : false, + "autheticatorFlow" : false + }, { + "authenticator" : "client-x509", + "requirement" : "ALTERNATIVE", + "priority" : 40, + "userSetupAllowed" : false, + "autheticatorFlow" : false + } ] + }, { + "id" : "42adffb6-c0f9-4f20-a1bf-c372c751a8b4", + "alias" : "direct grant", + "description" : "OpenID Connect Resource Owner Grant", + "providerId" : "basic-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "direct-grant-validate-username", + "requirement" : "REQUIRED", + "priority" : 10, + "userSetupAllowed" : false, + "autheticatorFlow" : false + }, { + "authenticator" : "direct-grant-validate-password", + "requirement" : "REQUIRED", + "priority" : 20, + "userSetupAllowed" : false, + "autheticatorFlow" : false + }, { + "requirement" : "CONDITIONAL", + "priority" : 30, + "flowAlias" : "Direct Grant - Conditional OTP", + "userSetupAllowed" : false, + "autheticatorFlow" : true + } ] + }, { + "id" : "3c90b3b3-46cc-4172-a5ef-9da549b994af", + "alias" : "docker auth", + "description" : "Used by Docker clients to authenticate against the IDP", + "providerId" : "basic-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "docker-http-basic-authenticator", + "requirement" : "REQUIRED", + "priority" : 10, + "userSetupAllowed" : false, + "autheticatorFlow" : false + } ] + }, { + "id" : "697c72a6-e218-499c-9101-1061b32c400d", + "alias" : "first broker login", + "description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account", + "providerId" : "basic-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticatorConfig" : "review profile config", + "authenticator" : "idp-review-profile", + "requirement" : "REQUIRED", + "priority" : 10, + "userSetupAllowed" : false, + "autheticatorFlow" : false + }, { + "requirement" : "REQUIRED", + "priority" : 20, + "flowAlias" : "User creation or linking", + "userSetupAllowed" : false, + "autheticatorFlow" : true + } ] + }, { + "id" : "524257c8-af12-401c-a91f-f2ca61603c2f", + "alias" : "forms", + "description" : "Username, password, otp and other auth forms.", + "providerId" : "basic-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "auth-username-password-form", + "requirement" : "REQUIRED", + "priority" : 10, + "userSetupAllowed" : false, + "autheticatorFlow" : false + }, { + "requirement" : "CONDITIONAL", + "priority" : 20, + "flowAlias" : "Browser - Conditional OTP", + "userSetupAllowed" : false, + "autheticatorFlow" : true + } ] + }, { + "id" : "c6dfe369-c661-4308-a0ae-b452bb595175", + "alias" : "http challenge", + "description" : "An authentication flow based on challenge-response HTTP Authentication Schemes", + "providerId" : "basic-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "no-cookie-redirect", + "requirement" : "REQUIRED", + "priority" : 10, + "userSetupAllowed" : false, + "autheticatorFlow" : false + }, { + "requirement" : "REQUIRED", + "priority" : 20, + "flowAlias" : "Authentication Options", + "userSetupAllowed" : false, + "autheticatorFlow" : true + } ] + }, { + "id" : "872c96d6-2147-403a-96fe-08d1e36b6c3f", + "alias" : "registration", + "description" : "registration flow", + "providerId" : "basic-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "registration-page-form", + "requirement" : "REQUIRED", + "priority" : 10, + "flowAlias" : "registration form", + "userSetupAllowed" : false, + "autheticatorFlow" : true + } ] + }, { + "id" : "6bb44f04-e913-4a3c-b245-c9fb7d531a95", + "alias" : "registration form", + "description" : "registration form", + "providerId" : "form-flow", + "topLevel" : false, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "registration-user-creation", + "requirement" : "REQUIRED", + "priority" : 20, + "userSetupAllowed" : false, + "autheticatorFlow" : false + }, { + "authenticator" : "registration-profile-action", + "requirement" : "REQUIRED", + "priority" : 40, + "userSetupAllowed" : false, + "autheticatorFlow" : false + }, { + "authenticator" : "registration-password-action", + "requirement" : "REQUIRED", + "priority" : 50, + "userSetupAllowed" : false, + "autheticatorFlow" : false + }, { + "authenticator" : "registration-recaptcha-action", + "requirement" : "DISABLED", + "priority" : 60, + "userSetupAllowed" : false, + "autheticatorFlow" : false + } ] + }, { + "id" : "0ca3feab-db41-4856-be2d-305e6c687f72", + "alias" : "reset credentials", + "description" : "Reset credentials for a user if they forgot their password or something", + "providerId" : "basic-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "reset-credentials-choose-user", + "requirement" : "REQUIRED", + "priority" : 10, + "userSetupAllowed" : false, + "autheticatorFlow" : false + }, { + "authenticator" : "reset-credential-email", + "requirement" : "REQUIRED", + "priority" : 20, + "userSetupAllowed" : false, + "autheticatorFlow" : false + }, { + "authenticator" : "reset-password", + "requirement" : "REQUIRED", + "priority" : 30, + "userSetupAllowed" : false, + "autheticatorFlow" : false + }, { + "requirement" : "CONDITIONAL", + "priority" : 40, + "flowAlias" : "Reset - Conditional OTP", + "userSetupAllowed" : false, + "autheticatorFlow" : true + } ] + }, { + "id" : "f43b4f94-eb1b-4c77-81b3-ab8ba2c851a8", + "alias" : "saml ecp", + "description" : "SAML ECP Profile Authentication Flow", + "providerId" : "basic-flow", + "topLevel" : true, + "builtIn" : true, + "authenticationExecutions" : [ { + "authenticator" : "http-basic-authenticator", + "requirement" : "REQUIRED", + "priority" : 10, + "userSetupAllowed" : false, + "autheticatorFlow" : false + } ] + } ], + "authenticatorConfig" : [ { + "id" : "c058c42a-70ae-4d29-8824-b65784892abc", + "alias" : "create unique user config", + "config" : { + "require.password.update.after.registration" : "false" + } + }, { + "id" : "0982062f-651f-4670-bf88-7808e653af56", + "alias" : "review profile config", + "config" : { + "update.profile.on.first.login" : "missing" + } + } ], + "requiredActions" : [ { + "alias" : "CONFIGURE_TOTP", + "name" : "Configure OTP", + "providerId" : "CONFIGURE_TOTP", + "enabled" : true, + "defaultAction" : false, + "priority" : 10, + "config" : { } + }, { + "alias" : "terms_and_conditions", + "name" : "Terms and Conditions", + "providerId" : "terms_and_conditions", + "enabled" : false, + "defaultAction" : false, + "priority" : 20, + "config" : { } + }, { + "alias" : "UPDATE_PASSWORD", + "name" : "Update Password", + "providerId" : "UPDATE_PASSWORD", + "enabled" : true, + "defaultAction" : false, + "priority" : 30, + "config" : { } + }, { + "alias" : "UPDATE_PROFILE", + "name" : "Update Profile", + "providerId" : "UPDATE_PROFILE", + "enabled" : true, + "defaultAction" : false, + "priority" : 40, + "config" : { } + }, { + "alias" : "VERIFY_EMAIL", + "name" : "Verify Email", + "providerId" : "VERIFY_EMAIL", + "enabled" : true, + "defaultAction" : false, + "priority" : 50, + "config" : { } + }, { + "alias" : "update_user_locale", + "name" : "Update User Locale", + "providerId" : "update_user_locale", + "enabled" : true, + "defaultAction" : false, + "priority" : 1000, + "config" : { } + } ], + "browserFlow" : "browser", + "registrationFlow" : "registration", + "directGrantFlow" : "direct grant", + "resetCredentialsFlow" : "reset credentials", + "clientAuthenticationFlow" : "clients", + "dockerAuthenticationFlow" : "docker auth", + "attributes" : { + "clientSessionIdleTimeout" : "0", + "clientSessionMaxLifespan" : "0" + }, + "keycloakVersion" : "10.0.1", + "userManagedAccessAllowed" : false +} \ No newline at end of file diff --git a/dev-config/nginx.conf b/dev-config/nginx.conf new file mode 100644 index 000000000..f48f0245f --- /dev/null +++ b/dev-config/nginx.conf @@ -0,0 +1,31 @@ +resolver 127.0.0.11 valid=2s; + +server { + listen 80; + listen [::]:80; + + server_name timed.local; + + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header Host $http_host; + proxy_http_version 1.1; + proxy_redirect off; + + client_max_body_size 50m; + + # db-flush may not be exposed in PRODUCTION! + location ~ ^/(api|admin|oidc|static|db-flush)/ { + set $backend http://backend; + proxy_pass $backend; + } + + location ~ ^/auth/ { + set $keycloak http://keycloak:8080; + proxy_pass $keycloak; + } + + location / { + set $frontend http://frontend; + proxy_pass $frontend; + } +} diff --git a/docker-compose.override.yml b/docker-compose.override.yml new file mode 100644 index 000000000..49a5716d0 --- /dev/null +++ b/docker-compose.override.yml @@ -0,0 +1,62 @@ +version: "3.7" + +services: + backend: + build: + context: ./backend/ + args: + INSTALL_DEV_DEPENDENCIES: "true" + depends_on: + - mailhog + environment: + - PYTHONDONTWRITEBYTECODE=1 + - EMAIL_URL=smtp://mailhog:1025 + - DJANGO_OIDC_USERNAME_CLAIM=preferred_username + volumes: + - ./backend/:/app + command: /bin/sh cmd.sh --autoreload --static + ports: + - "81:81" + networks: + - timed.local + + keycloak: + image: jboss/keycloak:10.0.1 + volumes: + - ./dev-config/keycloak-config.json:/etc/keycloak/keycloak-config.json:rw + depends_on: + - db + environment: + - DB_VENDOR=postgres + - DB_ADDR=db + - DB_USER=timed + - DB_DATABASE=timed + - DB_PASSWORD=timed + - PROXY_ADDRESS_FORWARDING=true + - KEYCLOAK_USER=admin + - KEYCLOAK_PASSWORD=admin + # start keycloak with the following command to perform an export of the `timed` realm. + #command: ["-Dkeycloak.migration.action=export", "-Dkeycloak.migration.realmName=timed", "-Dkeycloak.migration.provider=singleFile", "-Dkeycloak.migration.file=/etc/keycloak/keycloak-config.json", "-b", "0.0.0.0"] + command: ["-Dkeycloak.migration.action=import", "-Dkeycloak.migration.provider=singleFile", "-Dkeycloak.migration.file=/etc/keycloak/keycloak-config.json", "-b", "0.0.0.0"] + networks: + - timed.local + + proxy: + image: nginx:1.17.10-alpine + ports: + - 80:80 + volumes: + - ./dev-config/nginx.conf:/etc/nginx/conf.d/default.conf:ro + networks: + timed.local: + aliases: + - timed.local + + mailhog: + image: mailhog/mailhog + ports: + - 8025:8025 + environment: + - MH_UI_WEB_PATH=mailhog + networks: + - timed.local diff --git a/docker-compose.yml b/docker-compose.yml index 2b2341598..785ad1b1a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,8 +1,8 @@ -version: "3" +version: "3.7" services: db: - image: postgres:9.4 + image: postgres:12.4 ports: - 5432:5432 volumes: @@ -10,34 +10,35 @@ services: environment: - POSTGRES_USER=timed - POSTGRES_PASSWORD=timed - - frontend: - build: - context: . - ports: - - 4200:80 + networks: + - timed.local backend: - image: ghcr.io/adfinis/timed-backend:latest + build: + context: ./backend/ ports: - 8000:80 depends_on: - db - - mailhog environment: - DJANGO_DATABASE_HOST=db - DJANGO_DATABASE_PORT=5432 - - ENV=docker - STATIC_ROOT=/var/www/static - - EMAIL_URL=smtp://mailhog:1025 - command: /bin/sh -c "wait-for-it.sh -t 60 db:5432 -- ./manage.py migrate && ./manage.py loaddata timed/fixtures/test_data.json && uwsgi" - - mailhog: - image: mailhog/mailhog + networks: + - timed.local + + frontend: + build: ./frontend/ ports: - - 8025:8025 + - 4200:80 environment: - - MH_UI_WEB_PATH=mailhog + - TIMED_SSO_CLIENT_HOST=http://timed.local/auth/realms/timed/protocol/openid-connect + - TIMED_SSO_CLIENT_ID=timed-public + networks: + - timed.local volumes: dbdata: + +networks: + timed.local: diff --git a/.bowerrc b/frontend/.bowerrc similarity index 100% rename from .bowerrc rename to frontend/.bowerrc diff --git a/.dockerignore b/frontend/.dockerignore similarity index 100% rename from .dockerignore rename to frontend/.dockerignore diff --git a/.ember-cli b/frontend/.ember-cli similarity index 100% rename from .ember-cli rename to frontend/.ember-cli diff --git a/.eslintignore b/frontend/.eslintignore similarity index 100% rename from .eslintignore rename to frontend/.eslintignore diff --git a/.eslintrc.js b/frontend/.eslintrc.js similarity index 100% rename from .eslintrc.js rename to frontend/.eslintrc.js diff --git a/frontend/.gitignore b/frontend/.gitignore new file mode 100644 index 000000000..20e20b5a0 --- /dev/null +++ b/frontend/.gitignore @@ -0,0 +1,31 @@ +# See https://help.github.com/ignore-files/ for more about ignoring files. + +# compiled output +/dist +/tmp + +# dependencies +/node_modules +/bower_components + +# misc +/.sass-cache +/.eslintcache +/connect.lock +/coverage/* +/libpeerconnection.log +npm-debug.log* +testem.log +*.swp +*.orig + +# ember-try +/.node_modules.ember-try/ +/bower.json.ember-try +/npm-shrinkwrap.json.ember-try +/package.json.ember-try +/package-lock.json.ember-try +/yarn.lock.ember-try + +# broccoli-debug +/DEBUG/ diff --git a/frontend/.husky/commit-msg b/frontend/.husky/commit-msg new file mode 100755 index 000000000..5efdc4be6 --- /dev/null +++ b/frontend/.husky/commit-msg @@ -0,0 +1,8 @@ +# #!/bin/sh +# . "$(dirname "$0")/_/husky.sh" + +# # skip in CI +# [ -n "$CI" ] && exit 0 + +# # lint commit message +# pnpm commitlint --edit $1 diff --git a/frontend/.husky/pre-commit b/frontend/.husky/pre-commit new file mode 100755 index 000000000..404c80e07 --- /dev/null +++ b/frontend/.husky/pre-commit @@ -0,0 +1,8 @@ +# #!/bin/sh +# . "$(dirname "$0")/_/husky.sh" + +# # skip in CI +# [ -n "$CI" ] && exit 0 + +# # lint staged files +# pnpm lint-staged diff --git a/.npmrc b/frontend/.npmrc similarity index 100% rename from .npmrc rename to frontend/.npmrc diff --git a/.prettierignore b/frontend/.prettierignore similarity index 100% rename from .prettierignore rename to frontend/.prettierignore diff --git a/.template-lintrc-ci.js b/frontend/.template-lintrc-ci.js similarity index 100% rename from .template-lintrc-ci.js rename to frontend/.template-lintrc-ci.js diff --git a/.template-lintrc.js b/frontend/.template-lintrc.js similarity index 100% rename from .template-lintrc.js rename to frontend/.template-lintrc.js diff --git a/.watchmanconfig b/frontend/.watchmanconfig similarity index 100% rename from .watchmanconfig rename to frontend/.watchmanconfig diff --git a/CHANGELOG.md b/frontend/CHANGELOG.md similarity index 100% rename from CHANGELOG.md rename to frontend/CHANGELOG.md diff --git a/Dockerfile b/frontend/Dockerfile similarity index 100% rename from Dockerfile rename to frontend/Dockerfile diff --git a/app/abilities/absence-credit.js b/frontend/app/abilities/absence-credit.js similarity index 100% rename from app/abilities/absence-credit.js rename to frontend/app/abilities/absence-credit.js diff --git a/app/abilities/overtime-credit.js b/frontend/app/abilities/overtime-credit.js similarity index 100% rename from app/abilities/overtime-credit.js rename to frontend/app/abilities/overtime-credit.js diff --git a/app/abilities/page.js b/frontend/app/abilities/page.js similarity index 100% rename from app/abilities/page.js rename to frontend/app/abilities/page.js diff --git a/app/abilities/report.js b/frontend/app/abilities/report.js similarity index 100% rename from app/abilities/report.js rename to frontend/app/abilities/report.js diff --git a/app/abilities/user.js b/frontend/app/abilities/user.js similarity index 100% rename from app/abilities/user.js rename to frontend/app/abilities/user.js diff --git a/app/adapters/activity-block.js b/frontend/app/adapters/activity-block.js similarity index 100% rename from app/adapters/activity-block.js rename to frontend/app/adapters/activity-block.js diff --git a/app/adapters/application.js b/frontend/app/adapters/application.js similarity index 100% rename from app/adapters/application.js rename to frontend/app/adapters/application.js diff --git a/app/analysis/edit/controller.js b/frontend/app/analysis/edit/controller.js similarity index 100% rename from app/analysis/edit/controller.js rename to frontend/app/analysis/edit/controller.js diff --git a/app/analysis/edit/route.js b/frontend/app/analysis/edit/route.js similarity index 100% rename from app/analysis/edit/route.js rename to frontend/app/analysis/edit/route.js diff --git a/app/analysis/edit/template.hbs b/frontend/app/analysis/edit/template.hbs similarity index 100% rename from app/analysis/edit/template.hbs rename to frontend/app/analysis/edit/template.hbs diff --git a/app/analysis/index/controller.js b/frontend/app/analysis/index/controller.js similarity index 100% rename from app/analysis/index/controller.js rename to frontend/app/analysis/index/controller.js diff --git a/app/analysis/index/route.js b/frontend/app/analysis/index/route.js similarity index 100% rename from app/analysis/index/route.js rename to frontend/app/analysis/index/route.js diff --git a/app/analysis/index/template.hbs b/frontend/app/analysis/index/template.hbs similarity index 100% rename from app/analysis/index/template.hbs rename to frontend/app/analysis/index/template.hbs diff --git a/app/analysis/route.js b/frontend/app/analysis/route.js similarity index 100% rename from app/analysis/route.js rename to frontend/app/analysis/route.js diff --git a/app/app.js b/frontend/app/app.js similarity index 100% rename from app/app.js rename to frontend/app/app.js diff --git a/app/application/route.js b/frontend/app/application/route.js similarity index 100% rename from app/application/route.js rename to frontend/app/application/route.js diff --git a/app/application/template.hbs b/frontend/app/application/template.hbs similarity index 100% rename from app/application/template.hbs rename to frontend/app/application/template.hbs diff --git a/app/breakpoints.js b/frontend/app/breakpoints.js similarity index 100% rename from app/breakpoints.js rename to frontend/app/breakpoints.js diff --git a/app/components/async-list/template.hbs b/frontend/app/components/async-list/template.hbs similarity index 100% rename from app/components/async-list/template.hbs rename to frontend/app/components/async-list/template.hbs diff --git a/app/components/attendance-slider/component.js b/frontend/app/components/attendance-slider/component.js similarity index 100% rename from app/components/attendance-slider/component.js rename to frontend/app/components/attendance-slider/component.js diff --git a/app/components/attendance-slider/template.hbs b/frontend/app/components/attendance-slider/template.hbs similarity index 100% rename from app/components/attendance-slider/template.hbs rename to frontend/app/components/attendance-slider/template.hbs diff --git a/app/components/balance-donut/component.js b/frontend/app/components/balance-donut/component.js similarity index 100% rename from app/components/balance-donut/component.js rename to frontend/app/components/balance-donut/component.js diff --git a/app/components/balance-donut/template.hbs b/frontend/app/components/balance-donut/template.hbs similarity index 100% rename from app/components/balance-donut/template.hbs rename to frontend/app/components/balance-donut/template.hbs diff --git a/app/components/changed-warning/template.hbs b/frontend/app/components/changed-warning/template.hbs similarity index 100% rename from app/components/changed-warning/template.hbs rename to frontend/app/components/changed-warning/template.hbs diff --git a/app/components/customer-visible-icon/template.hbs b/frontend/app/components/customer-visible-icon/template.hbs similarity index 100% rename from app/components/customer-visible-icon/template.hbs rename to frontend/app/components/customer-visible-icon/template.hbs diff --git a/app/components/date-buttons/component.js b/frontend/app/components/date-buttons/component.js similarity index 100% rename from app/components/date-buttons/component.js rename to frontend/app/components/date-buttons/component.js diff --git a/app/components/date-buttons/template.hbs b/frontend/app/components/date-buttons/template.hbs similarity index 100% rename from app/components/date-buttons/template.hbs rename to frontend/app/components/date-buttons/template.hbs diff --git a/app/components/date-navigation/component.js b/frontend/app/components/date-navigation/component.js similarity index 100% rename from app/components/date-navigation/component.js rename to frontend/app/components/date-navigation/component.js diff --git a/app/components/date-navigation/template.hbs b/frontend/app/components/date-navigation/template.hbs similarity index 100% rename from app/components/date-navigation/template.hbs rename to frontend/app/components/date-navigation/template.hbs diff --git a/app/components/duration-since/component.js b/frontend/app/components/duration-since/component.js similarity index 100% rename from app/components/duration-since/component.js rename to frontend/app/components/duration-since/component.js diff --git a/app/components/duration-since/template.hbs b/frontend/app/components/duration-since/template.hbs similarity index 100% rename from app/components/duration-since/template.hbs rename to frontend/app/components/duration-since/template.hbs diff --git a/app/components/filter-sidebar/component.js b/frontend/app/components/filter-sidebar/component.js similarity index 100% rename from app/components/filter-sidebar/component.js rename to frontend/app/components/filter-sidebar/component.js diff --git a/app/components/filter-sidebar/filter/template.hbs b/frontend/app/components/filter-sidebar/filter/template.hbs similarity index 100% rename from app/components/filter-sidebar/filter/template.hbs rename to frontend/app/components/filter-sidebar/filter/template.hbs diff --git a/app/components/filter-sidebar/group/component.js b/frontend/app/components/filter-sidebar/group/component.js similarity index 100% rename from app/components/filter-sidebar/group/component.js rename to frontend/app/components/filter-sidebar/group/component.js diff --git a/frontend/app/components/filter-sidebar/group/styles.scss b/frontend/app/components/filter-sidebar/group/styles.scss new file mode 100644 index 000000000..e69de29bb diff --git a/app/components/filter-sidebar/group/template.hbs b/frontend/app/components/filter-sidebar/group/template.hbs similarity index 100% rename from app/components/filter-sidebar/group/template.hbs rename to frontend/app/components/filter-sidebar/group/template.hbs diff --git a/app/components/filter-sidebar/label/template.hbs b/frontend/app/components/filter-sidebar/label/template.hbs similarity index 100% rename from app/components/filter-sidebar/label/template.hbs rename to frontend/app/components/filter-sidebar/label/template.hbs diff --git a/app/components/filter-sidebar/template.hbs b/frontend/app/components/filter-sidebar/template.hbs similarity index 100% rename from app/components/filter-sidebar/template.hbs rename to frontend/app/components/filter-sidebar/template.hbs diff --git a/app/components/in-viewport/component.js b/frontend/app/components/in-viewport/component.js similarity index 100% rename from app/components/in-viewport/component.js rename to frontend/app/components/in-viewport/component.js diff --git a/app/components/in-viewport/template.hbs b/frontend/app/components/in-viewport/template.hbs similarity index 100% rename from app/components/in-viewport/template.hbs rename to frontend/app/components/in-viewport/template.hbs diff --git a/app/components/loading-icon/template.hbs b/frontend/app/components/loading-icon/template.hbs similarity index 100% rename from app/components/loading-icon/template.hbs rename to frontend/app/components/loading-icon/template.hbs diff --git a/app/components/magic-link-btn/component.js b/frontend/app/components/magic-link-btn/component.js similarity index 100% rename from app/components/magic-link-btn/component.js rename to frontend/app/components/magic-link-btn/component.js diff --git a/app/components/magic-link-btn/template.hbs b/frontend/app/components/magic-link-btn/template.hbs similarity index 100% rename from app/components/magic-link-btn/template.hbs rename to frontend/app/components/magic-link-btn/template.hbs diff --git a/app/components/magic-link-modal/component.js b/frontend/app/components/magic-link-modal/component.js similarity index 100% rename from app/components/magic-link-modal/component.js rename to frontend/app/components/magic-link-modal/component.js diff --git a/app/components/magic-link-modal/template.hbs b/frontend/app/components/magic-link-modal/template.hbs similarity index 100% rename from app/components/magic-link-modal/template.hbs rename to frontend/app/components/magic-link-modal/template.hbs diff --git a/app/components/no-mobile-message/template.hbs b/frontend/app/components/no-mobile-message/template.hbs similarity index 100% rename from app/components/no-mobile-message/template.hbs rename to frontend/app/components/no-mobile-message/template.hbs diff --git a/app/components/no-permission/template.hbs b/frontend/app/components/no-permission/template.hbs similarity index 100% rename from app/components/no-permission/template.hbs rename to frontend/app/components/no-permission/template.hbs diff --git a/app/components/not-identical-warning/template.hbs b/frontend/app/components/not-identical-warning/template.hbs similarity index 100% rename from app/components/not-identical-warning/template.hbs rename to frontend/app/components/not-identical-warning/template.hbs diff --git a/app/components/optimized-power-select/component.js b/frontend/app/components/optimized-power-select/component.js similarity index 100% rename from app/components/optimized-power-select/component.js rename to frontend/app/components/optimized-power-select/component.js diff --git a/app/components/optimized-power-select/custom-options/customer-option.hbs b/frontend/app/components/optimized-power-select/custom-options/customer-option.hbs similarity index 100% rename from app/components/optimized-power-select/custom-options/customer-option.hbs rename to frontend/app/components/optimized-power-select/custom-options/customer-option.hbs diff --git a/app/components/optimized-power-select/custom-options/project-option.hbs b/frontend/app/components/optimized-power-select/custom-options/project-option.hbs similarity index 100% rename from app/components/optimized-power-select/custom-options/project-option.hbs rename to frontend/app/components/optimized-power-select/custom-options/project-option.hbs diff --git a/app/components/optimized-power-select/custom-options/task-option.hbs b/frontend/app/components/optimized-power-select/custom-options/task-option.hbs similarity index 100% rename from app/components/optimized-power-select/custom-options/task-option.hbs rename to frontend/app/components/optimized-power-select/custom-options/task-option.hbs diff --git a/app/components/optimized-power-select/custom-options/user-option.hbs b/frontend/app/components/optimized-power-select/custom-options/user-option.hbs similarity index 100% rename from app/components/optimized-power-select/custom-options/user-option.hbs rename to frontend/app/components/optimized-power-select/custom-options/user-option.hbs diff --git a/app/components/optimized-power-select/custom-select/task-selection.hbs b/frontend/app/components/optimized-power-select/custom-select/task-selection.hbs similarity index 100% rename from app/components/optimized-power-select/custom-select/task-selection.hbs rename to frontend/app/components/optimized-power-select/custom-select/task-selection.hbs diff --git a/app/components/optimized-power-select/custom-select/user-selection.hbs b/frontend/app/components/optimized-power-select/custom-select/user-selection.hbs similarity index 100% rename from app/components/optimized-power-select/custom-select/user-selection.hbs rename to frontend/app/components/optimized-power-select/custom-select/user-selection.hbs diff --git a/app/components/optimized-power-select/options/component.js b/frontend/app/components/optimized-power-select/options/component.js similarity index 100% rename from app/components/optimized-power-select/options/component.js rename to frontend/app/components/optimized-power-select/options/component.js diff --git a/app/components/optimized-power-select/options/template.hbs b/frontend/app/components/optimized-power-select/options/template.hbs similarity index 100% rename from app/components/optimized-power-select/options/template.hbs rename to frontend/app/components/optimized-power-select/options/template.hbs diff --git a/app/components/optimized-power-select/template.hbs b/frontend/app/components/optimized-power-select/template.hbs similarity index 100% rename from app/components/optimized-power-select/template.hbs rename to frontend/app/components/optimized-power-select/template.hbs diff --git a/app/components/optimized-power-select/trigger/component.js b/frontend/app/components/optimized-power-select/trigger/component.js similarity index 100% rename from app/components/optimized-power-select/trigger/component.js rename to frontend/app/components/optimized-power-select/trigger/component.js diff --git a/app/components/optimized-power-select/trigger/template.hbs b/frontend/app/components/optimized-power-select/trigger/template.hbs similarity index 100% rename from app/components/optimized-power-select/trigger/template.hbs rename to frontend/app/components/optimized-power-select/trigger/template.hbs diff --git a/app/components/page-permission/template.hbs b/frontend/app/components/page-permission/template.hbs similarity index 100% rename from app/components/page-permission/template.hbs rename to frontend/app/components/page-permission/template.hbs diff --git a/app/components/progress-tooltip/component.js b/frontend/app/components/progress-tooltip/component.js similarity index 100% rename from app/components/progress-tooltip/component.js rename to frontend/app/components/progress-tooltip/component.js diff --git a/app/components/progress-tooltip/template.hbs b/frontend/app/components/progress-tooltip/template.hbs similarity index 100% rename from app/components/progress-tooltip/template.hbs rename to frontend/app/components/progress-tooltip/template.hbs diff --git a/app/components/record-button/component.js b/frontend/app/components/record-button/component.js similarity index 100% rename from app/components/record-button/component.js rename to frontend/app/components/record-button/component.js diff --git a/app/components/record-button/template.hbs b/frontend/app/components/record-button/template.hbs similarity index 100% rename from app/components/record-button/template.hbs rename to frontend/app/components/record-button/template.hbs diff --git a/app/components/report-review-warning/component.js b/frontend/app/components/report-review-warning/component.js similarity index 100% rename from app/components/report-review-warning/component.js rename to frontend/app/components/report-review-warning/component.js diff --git a/app/components/report-review-warning/template.hbs b/frontend/app/components/report-review-warning/template.hbs similarity index 100% rename from app/components/report-review-warning/template.hbs rename to frontend/app/components/report-review-warning/template.hbs diff --git a/app/components/report-row/component.js b/frontend/app/components/report-row/component.js similarity index 100% rename from app/components/report-row/component.js rename to frontend/app/components/report-row/component.js diff --git a/app/components/report-row/template.hbs b/frontend/app/components/report-row/template.hbs similarity index 100% rename from app/components/report-row/template.hbs rename to frontend/app/components/report-row/template.hbs diff --git a/app/components/scroll-container.hbs b/frontend/app/components/scroll-container.hbs similarity index 100% rename from app/components/scroll-container.hbs rename to frontend/app/components/scroll-container.hbs diff --git a/app/components/sort-header/component.js b/frontend/app/components/sort-header/component.js similarity index 100% rename from app/components/sort-header/component.js rename to frontend/app/components/sort-header/component.js diff --git a/app/components/sort-header/template.hbs b/frontend/app/components/sort-header/template.hbs similarity index 100% rename from app/components/sort-header/template.hbs rename to frontend/app/components/sort-header/template.hbs diff --git a/app/components/statistic-list/bar/component.js b/frontend/app/components/statistic-list/bar/component.js similarity index 100% rename from app/components/statistic-list/bar/component.js rename to frontend/app/components/statistic-list/bar/component.js diff --git a/app/components/statistic-list/bar/template.hbs b/frontend/app/components/statistic-list/bar/template.hbs similarity index 100% rename from app/components/statistic-list/bar/template.hbs rename to frontend/app/components/statistic-list/bar/template.hbs diff --git a/app/components/statistic-list/column/template.hbs b/frontend/app/components/statistic-list/column/template.hbs similarity index 100% rename from app/components/statistic-list/column/template.hbs rename to frontend/app/components/statistic-list/column/template.hbs diff --git a/app/components/statistic-list/component.js b/frontend/app/components/statistic-list/component.js similarity index 100% rename from app/components/statistic-list/component.js rename to frontend/app/components/statistic-list/component.js diff --git a/app/components/statistic-list/template.hbs b/frontend/app/components/statistic-list/template.hbs similarity index 100% rename from app/components/statistic-list/template.hbs rename to frontend/app/components/statistic-list/template.hbs diff --git a/app/components/sy-calendar/component.js b/frontend/app/components/sy-calendar/component.js similarity index 100% rename from app/components/sy-calendar/component.js rename to frontend/app/components/sy-calendar/component.js diff --git a/app/components/sy-calendar/styles.scss b/frontend/app/components/sy-calendar/styles.scss similarity index 100% rename from app/components/sy-calendar/styles.scss rename to frontend/app/components/sy-calendar/styles.scss diff --git a/app/components/sy-calendar/template.hbs b/frontend/app/components/sy-calendar/template.hbs similarity index 100% rename from app/components/sy-calendar/template.hbs rename to frontend/app/components/sy-calendar/template.hbs diff --git a/app/components/sy-checkbox/component.js b/frontend/app/components/sy-checkbox/component.js similarity index 100% rename from app/components/sy-checkbox/component.js rename to frontend/app/components/sy-checkbox/component.js diff --git a/app/components/sy-checkbox/template.hbs b/frontend/app/components/sy-checkbox/template.hbs similarity index 100% rename from app/components/sy-checkbox/template.hbs rename to frontend/app/components/sy-checkbox/template.hbs diff --git a/app/components/sy-checkmark/component.js b/frontend/app/components/sy-checkmark/component.js similarity index 100% rename from app/components/sy-checkmark/component.js rename to frontend/app/components/sy-checkmark/component.js diff --git a/app/components/sy-checkmark/template.hbs b/frontend/app/components/sy-checkmark/template.hbs similarity index 100% rename from app/components/sy-checkmark/template.hbs rename to frontend/app/components/sy-checkmark/template.hbs diff --git a/app/components/sy-datepicker-btn/component.js b/frontend/app/components/sy-datepicker-btn/component.js similarity index 100% rename from app/components/sy-datepicker-btn/component.js rename to frontend/app/components/sy-datepicker-btn/component.js diff --git a/app/components/sy-datepicker-btn/template.hbs b/frontend/app/components/sy-datepicker-btn/template.hbs similarity index 100% rename from app/components/sy-datepicker-btn/template.hbs rename to frontend/app/components/sy-datepicker-btn/template.hbs diff --git a/app/components/sy-datepicker/component.js b/frontend/app/components/sy-datepicker/component.js similarity index 100% rename from app/components/sy-datepicker/component.js rename to frontend/app/components/sy-datepicker/component.js diff --git a/app/components/sy-datepicker/template.hbs b/frontend/app/components/sy-datepicker/template.hbs similarity index 100% rename from app/components/sy-datepicker/template.hbs rename to frontend/app/components/sy-datepicker/template.hbs diff --git a/app/components/sy-durationpicker-day/component.js b/frontend/app/components/sy-durationpicker-day/component.js similarity index 100% rename from app/components/sy-durationpicker-day/component.js rename to frontend/app/components/sy-durationpicker-day/component.js diff --git a/app/components/sy-durationpicker-day/template.hbs b/frontend/app/components/sy-durationpicker-day/template.hbs similarity index 100% rename from app/components/sy-durationpicker-day/template.hbs rename to frontend/app/components/sy-durationpicker-day/template.hbs diff --git a/app/components/sy-durationpicker/component.js b/frontend/app/components/sy-durationpicker/component.js similarity index 100% rename from app/components/sy-durationpicker/component.js rename to frontend/app/components/sy-durationpicker/component.js diff --git a/app/components/sy-durationpicker/template.hbs b/frontend/app/components/sy-durationpicker/template.hbs similarity index 100% rename from app/components/sy-durationpicker/template.hbs rename to frontend/app/components/sy-durationpicker/template.hbs diff --git a/app/components/sy-modal-target/template.hbs b/frontend/app/components/sy-modal-target/template.hbs similarity index 100% rename from app/components/sy-modal-target/template.hbs rename to frontend/app/components/sy-modal-target/template.hbs diff --git a/app/components/sy-modal/body/styles.scss b/frontend/app/components/sy-modal/body/styles.scss similarity index 100% rename from app/components/sy-modal/body/styles.scss rename to frontend/app/components/sy-modal/body/styles.scss diff --git a/app/components/sy-modal/body/template.hbs b/frontend/app/components/sy-modal/body/template.hbs similarity index 100% rename from app/components/sy-modal/body/template.hbs rename to frontend/app/components/sy-modal/body/template.hbs diff --git a/app/components/sy-modal/footer/template.hbs b/frontend/app/components/sy-modal/footer/template.hbs similarity index 100% rename from app/components/sy-modal/footer/template.hbs rename to frontend/app/components/sy-modal/footer/template.hbs diff --git a/app/components/sy-modal/header/template.hbs b/frontend/app/components/sy-modal/header/template.hbs similarity index 100% rename from app/components/sy-modal/header/template.hbs rename to frontend/app/components/sy-modal/header/template.hbs diff --git a/app/components/sy-modal/overlay/component.js b/frontend/app/components/sy-modal/overlay/component.js similarity index 100% rename from app/components/sy-modal/overlay/component.js rename to frontend/app/components/sy-modal/overlay/component.js diff --git a/app/components/sy-modal/overlay/template.hbs b/frontend/app/components/sy-modal/overlay/template.hbs similarity index 100% rename from app/components/sy-modal/overlay/template.hbs rename to frontend/app/components/sy-modal/overlay/template.hbs diff --git a/app/components/sy-modal/template.hbs b/frontend/app/components/sy-modal/template.hbs similarity index 100% rename from app/components/sy-modal/template.hbs rename to frontend/app/components/sy-modal/template.hbs diff --git a/app/components/sy-timepicker/component.js b/frontend/app/components/sy-timepicker/component.js similarity index 100% rename from app/components/sy-timepicker/component.js rename to frontend/app/components/sy-timepicker/component.js diff --git a/app/components/sy-timepicker/template.hbs b/frontend/app/components/sy-timepicker/template.hbs similarity index 100% rename from app/components/sy-timepicker/template.hbs rename to frontend/app/components/sy-timepicker/template.hbs diff --git a/app/components/sy-toggle/component.js b/frontend/app/components/sy-toggle/component.js similarity index 100% rename from app/components/sy-toggle/component.js rename to frontend/app/components/sy-toggle/component.js diff --git a/app/components/sy-toggle/template.hbs b/frontend/app/components/sy-toggle/template.hbs similarity index 100% rename from app/components/sy-toggle/template.hbs rename to frontend/app/components/sy-toggle/template.hbs diff --git a/app/components/sy-topnav/component.js b/frontend/app/components/sy-topnav/component.js similarity index 100% rename from app/components/sy-topnav/component.js rename to frontend/app/components/sy-topnav/component.js diff --git a/app/components/sy-topnav/template.hbs b/frontend/app/components/sy-topnav/template.hbs similarity index 100% rename from app/components/sy-topnav/template.hbs rename to frontend/app/components/sy-topnav/template.hbs diff --git a/app/components/task-selection/component.js b/frontend/app/components/task-selection/component.js similarity index 100% rename from app/components/task-selection/component.js rename to frontend/app/components/task-selection/component.js diff --git a/app/components/task-selection/template.hbs b/frontend/app/components/task-selection/template.hbs similarity index 100% rename from app/components/task-selection/template.hbs rename to frontend/app/components/task-selection/template.hbs diff --git a/app/components/timed-clock/component.js b/frontend/app/components/timed-clock/component.js similarity index 100% rename from app/components/timed-clock/component.js rename to frontend/app/components/timed-clock/component.js diff --git a/app/components/timed-clock/template.hbs b/frontend/app/components/timed-clock/template.hbs similarity index 100% rename from app/components/timed-clock/template.hbs rename to frontend/app/components/timed-clock/template.hbs diff --git a/app/components/tracking-bar/component.js b/frontend/app/components/tracking-bar/component.js similarity index 100% rename from app/components/tracking-bar/component.js rename to frontend/app/components/tracking-bar/component.js diff --git a/app/components/tracking-bar/template.hbs b/frontend/app/components/tracking-bar/template.hbs similarity index 100% rename from app/components/tracking-bar/template.hbs rename to frontend/app/components/tracking-bar/template.hbs diff --git a/app/components/user-selection/component.js b/frontend/app/components/user-selection/component.js similarity index 100% rename from app/components/user-selection/component.js rename to frontend/app/components/user-selection/component.js diff --git a/app/components/user-selection/template.hbs b/frontend/app/components/user-selection/template.hbs similarity index 100% rename from app/components/user-selection/template.hbs rename to frontend/app/components/user-selection/template.hbs diff --git a/app/components/vertical-collection/component.js b/frontend/app/components/vertical-collection/component.js similarity index 100% rename from app/components/vertical-collection/component.js rename to frontend/app/components/vertical-collection/component.js diff --git a/app/components/weekly-overview-benchmark/component.js b/frontend/app/components/weekly-overview-benchmark/component.js similarity index 100% rename from app/components/weekly-overview-benchmark/component.js rename to frontend/app/components/weekly-overview-benchmark/component.js diff --git a/app/components/weekly-overview-benchmark/template.hbs b/frontend/app/components/weekly-overview-benchmark/template.hbs similarity index 100% rename from app/components/weekly-overview-benchmark/template.hbs rename to frontend/app/components/weekly-overview-benchmark/template.hbs diff --git a/app/components/weekly-overview-day/component.js b/frontend/app/components/weekly-overview-day/component.js similarity index 100% rename from app/components/weekly-overview-day/component.js rename to frontend/app/components/weekly-overview-day/component.js diff --git a/app/components/weekly-overview-day/template.hbs b/frontend/app/components/weekly-overview-day/template.hbs similarity index 100% rename from app/components/weekly-overview-day/template.hbs rename to frontend/app/components/weekly-overview-day/template.hbs diff --git a/app/components/weekly-overview/component.js b/frontend/app/components/weekly-overview/component.js similarity index 100% rename from app/components/weekly-overview/component.js rename to frontend/app/components/weekly-overview/component.js diff --git a/app/components/weekly-overview/template.hbs b/frontend/app/components/weekly-overview/template.hbs similarity index 100% rename from app/components/weekly-overview/template.hbs rename to frontend/app/components/weekly-overview/template.hbs diff --git a/app/components/welcome-modal/template.hbs b/frontend/app/components/welcome-modal/template.hbs similarity index 100% rename from app/components/welcome-modal/template.hbs rename to frontend/app/components/welcome-modal/template.hbs diff --git a/app/components/worktime-balance-chart/component.js b/frontend/app/components/worktime-balance-chart/component.js similarity index 100% rename from app/components/worktime-balance-chart/component.js rename to frontend/app/components/worktime-balance-chart/component.js diff --git a/app/components/worktime-balance-chart/template.hbs b/frontend/app/components/worktime-balance-chart/template.hbs similarity index 100% rename from app/components/worktime-balance-chart/template.hbs rename to frontend/app/components/worktime-balance-chart/template.hbs diff --git a/app/controllers/qpcontroller.js b/frontend/app/controllers/qpcontroller.js similarity index 100% rename from app/controllers/qpcontroller.js rename to frontend/app/controllers/qpcontroller.js diff --git a/app/helpers/balance-highlight-class.js b/frontend/app/helpers/balance-highlight-class.js similarity index 100% rename from app/helpers/balance-highlight-class.js rename to frontend/app/helpers/balance-highlight-class.js diff --git a/app/helpers/format-duration.js b/frontend/app/helpers/format-duration.js similarity index 100% rename from app/helpers/format-duration.js rename to frontend/app/helpers/format-duration.js diff --git a/app/helpers/humanize-duration.js b/frontend/app/helpers/humanize-duration.js similarity index 100% rename from app/helpers/humanize-duration.js rename to frontend/app/helpers/humanize-duration.js diff --git a/app/helpers/parse-django-duration.js b/frontend/app/helpers/parse-django-duration.js similarity index 100% rename from app/helpers/parse-django-duration.js rename to frontend/app/helpers/parse-django-duration.js diff --git a/app/index.html b/frontend/app/index.html similarity index 100% rename from app/index.html rename to frontend/app/index.html diff --git a/app/index/activities/controller.js b/frontend/app/index/activities/controller.js similarity index 100% rename from app/index/activities/controller.js rename to frontend/app/index/activities/controller.js diff --git a/app/index/activities/edit/controller.js b/frontend/app/index/activities/edit/controller.js similarity index 100% rename from app/index/activities/edit/controller.js rename to frontend/app/index/activities/edit/controller.js diff --git a/app/index/activities/edit/route.js b/frontend/app/index/activities/edit/route.js similarity index 100% rename from app/index/activities/edit/route.js rename to frontend/app/index/activities/edit/route.js diff --git a/app/index/activities/edit/template.hbs b/frontend/app/index/activities/edit/template.hbs similarity index 100% rename from app/index/activities/edit/template.hbs rename to frontend/app/index/activities/edit/template.hbs diff --git a/app/index/activities/route.js b/frontend/app/index/activities/route.js similarity index 100% rename from app/index/activities/route.js rename to frontend/app/index/activities/route.js diff --git a/app/index/activities/template.hbs b/frontend/app/index/activities/template.hbs similarity index 100% rename from app/index/activities/template.hbs rename to frontend/app/index/activities/template.hbs diff --git a/app/index/attendances/controller.js b/frontend/app/index/attendances/controller.js similarity index 100% rename from app/index/attendances/controller.js rename to frontend/app/index/attendances/controller.js diff --git a/app/index/attendances/route.js b/frontend/app/index/attendances/route.js similarity index 100% rename from app/index/attendances/route.js rename to frontend/app/index/attendances/route.js diff --git a/app/index/attendances/template.hbs b/frontend/app/index/attendances/template.hbs similarity index 100% rename from app/index/attendances/template.hbs rename to frontend/app/index/attendances/template.hbs diff --git a/app/index/controller.js b/frontend/app/index/controller.js similarity index 100% rename from app/index/controller.js rename to frontend/app/index/controller.js diff --git a/app/index/reports/controller.js b/frontend/app/index/reports/controller.js similarity index 100% rename from app/index/reports/controller.js rename to frontend/app/index/reports/controller.js diff --git a/app/index/reports/route.js b/frontend/app/index/reports/route.js similarity index 100% rename from app/index/reports/route.js rename to frontend/app/index/reports/route.js diff --git a/app/index/reports/template.hbs b/frontend/app/index/reports/template.hbs similarity index 100% rename from app/index/reports/template.hbs rename to frontend/app/index/reports/template.hbs diff --git a/app/index/route.js b/frontend/app/index/route.js similarity index 100% rename from app/index/route.js rename to frontend/app/index/route.js diff --git a/app/index/template.hbs b/frontend/app/index/template.hbs similarity index 100% rename from app/index/template.hbs rename to frontend/app/index/template.hbs diff --git a/app/initializers/responsive.js b/frontend/app/initializers/responsive.js similarity index 100% rename from app/initializers/responsive.js rename to frontend/app/initializers/responsive.js diff --git a/app/login/route.js b/frontend/app/login/route.js similarity index 100% rename from app/login/route.js rename to frontend/app/login/route.js diff --git a/app/login/template.hbs b/frontend/app/login/template.hbs similarity index 100% rename from app/login/template.hbs rename to frontend/app/login/template.hbs diff --git a/app/models/absence-balance.js b/frontend/app/models/absence-balance.js similarity index 100% rename from app/models/absence-balance.js rename to frontend/app/models/absence-balance.js diff --git a/app/models/absence-credit.js b/frontend/app/models/absence-credit.js similarity index 100% rename from app/models/absence-credit.js rename to frontend/app/models/absence-credit.js diff --git a/app/models/absence-type.js b/frontend/app/models/absence-type.js similarity index 100% rename from app/models/absence-type.js rename to frontend/app/models/absence-type.js diff --git a/app/models/absence.js b/frontend/app/models/absence.js similarity index 100% rename from app/models/absence.js rename to frontend/app/models/absence.js diff --git a/app/models/activity.js b/frontend/app/models/activity.js similarity index 100% rename from app/models/activity.js rename to frontend/app/models/activity.js diff --git a/app/models/attendance.js b/frontend/app/models/attendance.js similarity index 100% rename from app/models/attendance.js rename to frontend/app/models/attendance.js diff --git a/app/models/billing-type.js b/frontend/app/models/billing-type.js similarity index 100% rename from app/models/billing-type.js rename to frontend/app/models/billing-type.js diff --git a/app/models/cost-center.js b/frontend/app/models/cost-center.js similarity index 100% rename from app/models/cost-center.js rename to frontend/app/models/cost-center.js diff --git a/app/models/customer-assignee.js b/frontend/app/models/customer-assignee.js similarity index 100% rename from app/models/customer-assignee.js rename to frontend/app/models/customer-assignee.js diff --git a/app/models/customer-statistic.js b/frontend/app/models/customer-statistic.js similarity index 100% rename from app/models/customer-statistic.js rename to frontend/app/models/customer-statistic.js diff --git a/app/models/customer.js b/frontend/app/models/customer.js similarity index 100% rename from app/models/customer.js rename to frontend/app/models/customer.js diff --git a/app/models/employment.js b/frontend/app/models/employment.js similarity index 100% rename from app/models/employment.js rename to frontend/app/models/employment.js diff --git a/app/models/location.js b/frontend/app/models/location.js similarity index 100% rename from app/models/location.js rename to frontend/app/models/location.js diff --git a/app/models/month-statistic.js b/frontend/app/models/month-statistic.js similarity index 100% rename from app/models/month-statistic.js rename to frontend/app/models/month-statistic.js diff --git a/app/models/overtime-credit.js b/frontend/app/models/overtime-credit.js similarity index 100% rename from app/models/overtime-credit.js rename to frontend/app/models/overtime-credit.js diff --git a/app/models/project-assignee.js b/frontend/app/models/project-assignee.js similarity index 100% rename from app/models/project-assignee.js rename to frontend/app/models/project-assignee.js diff --git a/app/models/project-statistic.js b/frontend/app/models/project-statistic.js similarity index 100% rename from app/models/project-statistic.js rename to frontend/app/models/project-statistic.js diff --git a/app/models/project.js b/frontend/app/models/project.js similarity index 100% rename from app/models/project.js rename to frontend/app/models/project.js diff --git a/app/models/public-holiday.js b/frontend/app/models/public-holiday.js similarity index 100% rename from app/models/public-holiday.js rename to frontend/app/models/public-holiday.js diff --git a/app/models/report-intersection.js b/frontend/app/models/report-intersection.js similarity index 100% rename from app/models/report-intersection.js rename to frontend/app/models/report-intersection.js diff --git a/app/models/report.js b/frontend/app/models/report.js similarity index 100% rename from app/models/report.js rename to frontend/app/models/report.js diff --git a/app/models/task-assignee.js b/frontend/app/models/task-assignee.js similarity index 100% rename from app/models/task-assignee.js rename to frontend/app/models/task-assignee.js diff --git a/app/models/task-statistic.js b/frontend/app/models/task-statistic.js similarity index 100% rename from app/models/task-statistic.js rename to frontend/app/models/task-statistic.js diff --git a/app/models/task.js b/frontend/app/models/task.js similarity index 100% rename from app/models/task.js rename to frontend/app/models/task.js diff --git a/app/models/user-statistic.js b/frontend/app/models/user-statistic.js similarity index 100% rename from app/models/user-statistic.js rename to frontend/app/models/user-statistic.js diff --git a/app/models/user.js b/frontend/app/models/user.js similarity index 100% rename from app/models/user.js rename to frontend/app/models/user.js diff --git a/app/models/worktime-balance.js b/frontend/app/models/worktime-balance.js similarity index 100% rename from app/models/worktime-balance.js rename to frontend/app/models/worktime-balance.js diff --git a/app/models/year-statistic.js b/frontend/app/models/year-statistic.js similarity index 100% rename from app/models/year-statistic.js rename to frontend/app/models/year-statistic.js diff --git a/app/no-access/route.js b/frontend/app/no-access/route.js similarity index 100% rename from app/no-access/route.js rename to frontend/app/no-access/route.js diff --git a/app/no-access/template.hbs b/frontend/app/no-access/template.hbs similarity index 100% rename from app/no-access/template.hbs rename to frontend/app/no-access/template.hbs diff --git a/app/notfound/route.js b/frontend/app/notfound/route.js similarity index 100% rename from app/notfound/route.js rename to frontend/app/notfound/route.js diff --git a/app/notfound/template.hbs b/frontend/app/notfound/template.hbs similarity index 100% rename from app/notfound/template.hbs rename to frontend/app/notfound/template.hbs diff --git a/app/projects/controller.js b/frontend/app/projects/controller.js similarity index 100% rename from app/projects/controller.js rename to frontend/app/projects/controller.js diff --git a/app/projects/route.js b/frontend/app/projects/route.js similarity index 100% rename from app/projects/route.js rename to frontend/app/projects/route.js diff --git a/app/projects/template.hbs b/frontend/app/projects/template.hbs similarity index 100% rename from app/projects/template.hbs rename to frontend/app/projects/template.hbs diff --git a/app/protected/controller.js b/frontend/app/protected/controller.js similarity index 100% rename from app/protected/controller.js rename to frontend/app/protected/controller.js diff --git a/app/protected/route.js b/frontend/app/protected/route.js similarity index 100% rename from app/protected/route.js rename to frontend/app/protected/route.js diff --git a/app/protected/template.hbs b/frontend/app/protected/template.hbs similarity index 100% rename from app/protected/template.hbs rename to frontend/app/protected/template.hbs diff --git a/app/router.js b/frontend/app/router.js similarity index 100% rename from app/router.js rename to frontend/app/router.js diff --git a/app/serializers/application.js b/frontend/app/serializers/application.js similarity index 100% rename from app/serializers/application.js rename to frontend/app/serializers/application.js diff --git a/app/serializers/attendance.js b/frontend/app/serializers/attendance.js similarity index 100% rename from app/serializers/attendance.js rename to frontend/app/serializers/attendance.js diff --git a/app/serializers/employment.js b/frontend/app/serializers/employment.js similarity index 100% rename from app/serializers/employment.js rename to frontend/app/serializers/employment.js diff --git a/app/services/autostart-tour.js b/frontend/app/services/autostart-tour.js similarity index 100% rename from app/services/autostart-tour.js rename to frontend/app/services/autostart-tour.js diff --git a/app/services/fetch.js b/frontend/app/services/fetch.js similarity index 100% rename from app/services/fetch.js rename to frontend/app/services/fetch.js diff --git a/app/services/metadata-fetcher.js b/frontend/app/services/metadata-fetcher.js similarity index 100% rename from app/services/metadata-fetcher.js rename to frontend/app/services/metadata-fetcher.js diff --git a/app/services/rejected-reports.js b/frontend/app/services/rejected-reports.js similarity index 100% rename from app/services/rejected-reports.js rename to frontend/app/services/rejected-reports.js diff --git a/app/services/tour.js b/frontend/app/services/tour.js similarity index 100% rename from app/services/tour.js rename to frontend/app/services/tour.js diff --git a/app/services/tracking.js b/frontend/app/services/tracking.js similarity index 100% rename from app/services/tracking.js rename to frontend/app/services/tracking.js diff --git a/app/services/unverified-reports.js b/frontend/app/services/unverified-reports.js similarity index 100% rename from app/services/unverified-reports.js rename to frontend/app/services/unverified-reports.js diff --git a/app/sso-login/route.js b/frontend/app/sso-login/route.js similarity index 100% rename from app/sso-login/route.js rename to frontend/app/sso-login/route.js diff --git a/app/statistics/controller.js b/frontend/app/statistics/controller.js similarity index 100% rename from app/statistics/controller.js rename to frontend/app/statistics/controller.js diff --git a/app/statistics/route.js b/frontend/app/statistics/route.js similarity index 100% rename from app/statistics/route.js rename to frontend/app/statistics/route.js diff --git a/app/statistics/template.hbs b/frontend/app/statistics/template.hbs similarity index 100% rename from app/statistics/template.hbs rename to frontend/app/statistics/template.hbs diff --git a/app/styles/activities.scss b/frontend/app/styles/activities.scss similarity index 100% rename from app/styles/activities.scss rename to frontend/app/styles/activities.scss diff --git a/app/styles/adcssy.scss b/frontend/app/styles/adcssy.scss similarity index 100% rename from app/styles/adcssy.scss rename to frontend/app/styles/adcssy.scss diff --git a/app/styles/analysis.scss b/frontend/app/styles/analysis.scss similarity index 100% rename from app/styles/analysis.scss rename to frontend/app/styles/analysis.scss diff --git a/app/styles/app.scss b/frontend/app/styles/app.scss similarity index 100% rename from app/styles/app.scss rename to frontend/app/styles/app.scss diff --git a/app/styles/attendances.scss b/frontend/app/styles/attendances.scss similarity index 100% rename from app/styles/attendances.scss rename to frontend/app/styles/attendances.scss diff --git a/app/styles/badge.scss b/frontend/app/styles/badge.scss similarity index 100% rename from app/styles/badge.scss rename to frontend/app/styles/badge.scss diff --git a/app/styles/components/attendance-slider.scss b/frontend/app/styles/components/attendance-slider.scss similarity index 100% rename from app/styles/components/attendance-slider.scss rename to frontend/app/styles/components/attendance-slider.scss diff --git a/app/styles/components/balance-donut.scss b/frontend/app/styles/components/balance-donut.scss similarity index 100% rename from app/styles/components/balance-donut.scss rename to frontend/app/styles/components/balance-donut.scss diff --git a/app/styles/components/date-buttons.scss b/frontend/app/styles/components/date-buttons.scss similarity index 100% rename from app/styles/components/date-buttons.scss rename to frontend/app/styles/components/date-buttons.scss diff --git a/app/styles/components/date-navigation.scss b/frontend/app/styles/components/date-navigation.scss similarity index 100% rename from app/styles/components/date-navigation.scss rename to frontend/app/styles/components/date-navigation.scss diff --git a/app/styles/components/filter-sidebar--group.scss b/frontend/app/styles/components/filter-sidebar--group.scss similarity index 100% rename from app/styles/components/filter-sidebar--group.scss rename to frontend/app/styles/components/filter-sidebar--group.scss diff --git a/app/styles/components/filter-sidebar--label.scss b/frontend/app/styles/components/filter-sidebar--label.scss similarity index 100% rename from app/styles/components/filter-sidebar--label.scss rename to frontend/app/styles/components/filter-sidebar--label.scss diff --git a/app/styles/components/loading-icon.scss b/frontend/app/styles/components/loading-icon.scss similarity index 100% rename from app/styles/components/loading-icon.scss rename to frontend/app/styles/components/loading-icon.scss diff --git a/app/styles/components/magic-link-btn.scss b/frontend/app/styles/components/magic-link-btn.scss similarity index 100% rename from app/styles/components/magic-link-btn.scss rename to frontend/app/styles/components/magic-link-btn.scss diff --git a/app/styles/components/nav-top.scss b/frontend/app/styles/components/nav-top.scss similarity index 100% rename from app/styles/components/nav-top.scss rename to frontend/app/styles/components/nav-top.scss diff --git a/app/styles/components/progress-tooltip.scss b/frontend/app/styles/components/progress-tooltip.scss similarity index 100% rename from app/styles/components/progress-tooltip.scss rename to frontend/app/styles/components/progress-tooltip.scss diff --git a/app/styles/components/record-button.scss b/frontend/app/styles/components/record-button.scss similarity index 100% rename from app/styles/components/record-button.scss rename to frontend/app/styles/components/record-button.scss diff --git a/app/styles/components/scroll-container.scss b/frontend/app/styles/components/scroll-container.scss similarity index 100% rename from app/styles/components/scroll-container.scss rename to frontend/app/styles/components/scroll-container.scss diff --git a/app/styles/components/sort-header.scss b/frontend/app/styles/components/sort-header.scss similarity index 100% rename from app/styles/components/sort-header.scss rename to frontend/app/styles/components/sort-header.scss diff --git a/app/styles/components/statistic-list-bar.scss b/frontend/app/styles/components/statistic-list-bar.scss similarity index 100% rename from app/styles/components/statistic-list-bar.scss rename to frontend/app/styles/components/statistic-list-bar.scss diff --git a/app/styles/components/sy-calendar.scss b/frontend/app/styles/components/sy-calendar.scss similarity index 100% rename from app/styles/components/sy-calendar.scss rename to frontend/app/styles/components/sy-calendar.scss diff --git a/app/styles/components/sy-checkbox.scss b/frontend/app/styles/components/sy-checkbox.scss similarity index 100% rename from app/styles/components/sy-checkbox.scss rename to frontend/app/styles/components/sy-checkbox.scss diff --git a/app/styles/components/sy-datepicker.scss b/frontend/app/styles/components/sy-datepicker.scss similarity index 100% rename from app/styles/components/sy-datepicker.scss rename to frontend/app/styles/components/sy-datepicker.scss diff --git a/app/styles/components/sy-durationpicker-day.scss b/frontend/app/styles/components/sy-durationpicker-day.scss similarity index 100% rename from app/styles/components/sy-durationpicker-day.scss rename to frontend/app/styles/components/sy-durationpicker-day.scss diff --git a/app/styles/components/sy-modal--footer.scss b/frontend/app/styles/components/sy-modal--footer.scss similarity index 100% rename from app/styles/components/sy-modal--footer.scss rename to frontend/app/styles/components/sy-modal--footer.scss diff --git a/app/styles/components/sy-modal--overlay.scss b/frontend/app/styles/components/sy-modal--overlay.scss similarity index 100% rename from app/styles/components/sy-modal--overlay.scss rename to frontend/app/styles/components/sy-modal--overlay.scss diff --git a/app/styles/components/sy-toggle.scss b/frontend/app/styles/components/sy-toggle.scss similarity index 100% rename from app/styles/components/sy-toggle.scss rename to frontend/app/styles/components/sy-toggle.scss diff --git a/app/styles/components/timed-clock.scss b/frontend/app/styles/components/timed-clock.scss similarity index 100% rename from app/styles/components/timed-clock.scss rename to frontend/app/styles/components/timed-clock.scss diff --git a/app/styles/components/tracking-bar.scss b/frontend/app/styles/components/tracking-bar.scss similarity index 100% rename from app/styles/components/tracking-bar.scss rename to frontend/app/styles/components/tracking-bar.scss diff --git a/app/styles/components/weekly-overview-benchmark.scss b/frontend/app/styles/components/weekly-overview-benchmark.scss similarity index 100% rename from app/styles/components/weekly-overview-benchmark.scss rename to frontend/app/styles/components/weekly-overview-benchmark.scss diff --git a/app/styles/components/weekly-overview-day.scss b/frontend/app/styles/components/weekly-overview-day.scss similarity index 100% rename from app/styles/components/weekly-overview-day.scss rename to frontend/app/styles/components/weekly-overview-day.scss diff --git a/app/styles/components/weekly-overview.scss b/frontend/app/styles/components/weekly-overview.scss similarity index 100% rename from app/styles/components/weekly-overview.scss rename to frontend/app/styles/components/weekly-overview.scss diff --git a/app/styles/components/welcome-modal.scss b/frontend/app/styles/components/welcome-modal.scss similarity index 100% rename from app/styles/components/welcome-modal.scss rename to frontend/app/styles/components/welcome-modal.scss diff --git a/app/styles/ember-power-select-custom.scss b/frontend/app/styles/ember-power-select-custom.scss similarity index 100% rename from app/styles/ember-power-select-custom.scss rename to frontend/app/styles/ember-power-select-custom.scss diff --git a/app/styles/filter-sidebar.scss b/frontend/app/styles/filter-sidebar.scss similarity index 100% rename from app/styles/filter-sidebar.scss rename to frontend/app/styles/filter-sidebar.scss diff --git a/app/styles/form-list.scss b/frontend/app/styles/form-list.scss similarity index 100% rename from app/styles/form-list.scss rename to frontend/app/styles/form-list.scss diff --git a/app/styles/loader.scss b/frontend/app/styles/loader.scss similarity index 100% rename from app/styles/loader.scss rename to frontend/app/styles/loader.scss diff --git a/app/styles/login.scss b/frontend/app/styles/login.scss similarity index 100% rename from app/styles/login.scss rename to frontend/app/styles/login.scss diff --git a/app/styles/projects.scss b/frontend/app/styles/projects.scss similarity index 100% rename from app/styles/projects.scss rename to frontend/app/styles/projects.scss diff --git a/app/styles/reports.scss b/frontend/app/styles/reports.scss similarity index 100% rename from app/styles/reports.scss rename to frontend/app/styles/reports.scss diff --git a/app/styles/statistics.scss b/frontend/app/styles/statistics.scss similarity index 100% rename from app/styles/statistics.scss rename to frontend/app/styles/statistics.scss diff --git a/app/styles/toolbar.scss b/frontend/app/styles/toolbar.scss similarity index 100% rename from app/styles/toolbar.scss rename to frontend/app/styles/toolbar.scss diff --git a/app/styles/tour.scss b/frontend/app/styles/tour.scss similarity index 100% rename from app/styles/tour.scss rename to frontend/app/styles/tour.scss diff --git a/app/styles/users-navigation.scss b/frontend/app/styles/users-navigation.scss similarity index 100% rename from app/styles/users-navigation.scss rename to frontend/app/styles/users-navigation.scss diff --git a/app/styles/users.scss b/frontend/app/styles/users.scss similarity index 100% rename from app/styles/users.scss rename to frontend/app/styles/users.scss diff --git a/app/styles/variables.scss b/frontend/app/styles/variables.scss similarity index 100% rename from app/styles/variables.scss rename to frontend/app/styles/variables.scss diff --git a/app/tours/index.js b/frontend/app/tours/index.js similarity index 100% rename from app/tours/index.js rename to frontend/app/tours/index.js diff --git a/app/tours/index/activities.js b/frontend/app/tours/index/activities.js similarity index 100% rename from app/tours/index/activities.js rename to frontend/app/tours/index/activities.js diff --git a/app/tours/index/attendances.js b/frontend/app/tours/index/attendances.js similarity index 100% rename from app/tours/index/attendances.js rename to frontend/app/tours/index/attendances.js diff --git a/app/tours/index/reports.js b/frontend/app/tours/index/reports.js similarity index 100% rename from app/tours/index/reports.js rename to frontend/app/tours/index/reports.js diff --git a/app/transforms/django-date.js b/frontend/app/transforms/django-date.js similarity index 100% rename from app/transforms/django-date.js rename to frontend/app/transforms/django-date.js diff --git a/app/transforms/django-datetime.js b/frontend/app/transforms/django-datetime.js similarity index 100% rename from app/transforms/django-datetime.js rename to frontend/app/transforms/django-datetime.js diff --git a/app/transforms/django-duration.js b/frontend/app/transforms/django-duration.js similarity index 100% rename from app/transforms/django-duration.js rename to frontend/app/transforms/django-duration.js diff --git a/app/transforms/django-time.js b/frontend/app/transforms/django-time.js similarity index 100% rename from app/transforms/django-time.js rename to frontend/app/transforms/django-time.js diff --git a/app/transforms/django-workdays.js b/frontend/app/transforms/django-workdays.js similarity index 100% rename from app/transforms/django-workdays.js rename to frontend/app/transforms/django-workdays.js diff --git a/app/transforms/moment.js b/frontend/app/transforms/moment.js similarity index 100% rename from app/transforms/moment.js rename to frontend/app/transforms/moment.js diff --git a/app/users/edit/controller.js b/frontend/app/users/edit/controller.js similarity index 100% rename from app/users/edit/controller.js rename to frontend/app/users/edit/controller.js diff --git a/app/users/edit/credits/absence-credits/edit/controller.js b/frontend/app/users/edit/credits/absence-credits/edit/controller.js similarity index 100% rename from app/users/edit/credits/absence-credits/edit/controller.js rename to frontend/app/users/edit/credits/absence-credits/edit/controller.js diff --git a/app/users/edit/credits/absence-credits/edit/route.js b/frontend/app/users/edit/credits/absence-credits/edit/route.js similarity index 100% rename from app/users/edit/credits/absence-credits/edit/route.js rename to frontend/app/users/edit/credits/absence-credits/edit/route.js diff --git a/app/users/edit/credits/absence-credits/edit/template.hbs b/frontend/app/users/edit/credits/absence-credits/edit/template.hbs similarity index 100% rename from app/users/edit/credits/absence-credits/edit/template.hbs rename to frontend/app/users/edit/credits/absence-credits/edit/template.hbs diff --git a/app/users/edit/credits/absence-credits/new/route.js b/frontend/app/users/edit/credits/absence-credits/new/route.js similarity index 100% rename from app/users/edit/credits/absence-credits/new/route.js rename to frontend/app/users/edit/credits/absence-credits/new/route.js diff --git a/app/users/edit/credits/index/controller.js b/frontend/app/users/edit/credits/index/controller.js similarity index 100% rename from app/users/edit/credits/index/controller.js rename to frontend/app/users/edit/credits/index/controller.js diff --git a/app/users/edit/credits/index/route.js b/frontend/app/users/edit/credits/index/route.js similarity index 100% rename from app/users/edit/credits/index/route.js rename to frontend/app/users/edit/credits/index/route.js diff --git a/app/users/edit/credits/index/template.hbs b/frontend/app/users/edit/credits/index/template.hbs similarity index 100% rename from app/users/edit/credits/index/template.hbs rename to frontend/app/users/edit/credits/index/template.hbs diff --git a/app/users/edit/credits/overtime-credits/edit/controller.js b/frontend/app/users/edit/credits/overtime-credits/edit/controller.js similarity index 100% rename from app/users/edit/credits/overtime-credits/edit/controller.js rename to frontend/app/users/edit/credits/overtime-credits/edit/controller.js diff --git a/app/users/edit/credits/overtime-credits/edit/route.js b/frontend/app/users/edit/credits/overtime-credits/edit/route.js similarity index 100% rename from app/users/edit/credits/overtime-credits/edit/route.js rename to frontend/app/users/edit/credits/overtime-credits/edit/route.js diff --git a/app/users/edit/credits/overtime-credits/edit/template.hbs b/frontend/app/users/edit/credits/overtime-credits/edit/template.hbs similarity index 100% rename from app/users/edit/credits/overtime-credits/edit/template.hbs rename to frontend/app/users/edit/credits/overtime-credits/edit/template.hbs diff --git a/app/users/edit/credits/overtime-credits/new/route.js b/frontend/app/users/edit/credits/overtime-credits/new/route.js similarity index 100% rename from app/users/edit/credits/overtime-credits/new/route.js rename to frontend/app/users/edit/credits/overtime-credits/new/route.js diff --git a/app/users/edit/credits/route.js b/frontend/app/users/edit/credits/route.js similarity index 100% rename from app/users/edit/credits/route.js rename to frontend/app/users/edit/credits/route.js diff --git a/app/users/edit/credits/template.hbs b/frontend/app/users/edit/credits/template.hbs similarity index 100% rename from app/users/edit/credits/template.hbs rename to frontend/app/users/edit/credits/template.hbs diff --git a/app/users/edit/index/controller.js b/frontend/app/users/edit/index/controller.js similarity index 100% rename from app/users/edit/index/controller.js rename to frontend/app/users/edit/index/controller.js diff --git a/app/users/edit/index/route.js b/frontend/app/users/edit/index/route.js similarity index 100% rename from app/users/edit/index/route.js rename to frontend/app/users/edit/index/route.js diff --git a/app/users/edit/index/template.hbs b/frontend/app/users/edit/index/template.hbs similarity index 100% rename from app/users/edit/index/template.hbs rename to frontend/app/users/edit/index/template.hbs diff --git a/app/users/edit/responsibilities/controller.js b/frontend/app/users/edit/responsibilities/controller.js similarity index 100% rename from app/users/edit/responsibilities/controller.js rename to frontend/app/users/edit/responsibilities/controller.js diff --git a/app/users/edit/responsibilities/route.js b/frontend/app/users/edit/responsibilities/route.js similarity index 100% rename from app/users/edit/responsibilities/route.js rename to frontend/app/users/edit/responsibilities/route.js diff --git a/app/users/edit/responsibilities/template.hbs b/frontend/app/users/edit/responsibilities/template.hbs similarity index 100% rename from app/users/edit/responsibilities/template.hbs rename to frontend/app/users/edit/responsibilities/template.hbs diff --git a/app/users/edit/route.js b/frontend/app/users/edit/route.js similarity index 100% rename from app/users/edit/route.js rename to frontend/app/users/edit/route.js diff --git a/app/users/edit/template.hbs b/frontend/app/users/edit/template.hbs similarity index 100% rename from app/users/edit/template.hbs rename to frontend/app/users/edit/template.hbs diff --git a/app/users/index/controller.js b/frontend/app/users/index/controller.js similarity index 100% rename from app/users/index/controller.js rename to frontend/app/users/index/controller.js diff --git a/app/users/index/route.js b/frontend/app/users/index/route.js similarity index 100% rename from app/users/index/route.js rename to frontend/app/users/index/route.js diff --git a/app/users/index/template.hbs b/frontend/app/users/index/template.hbs similarity index 100% rename from app/users/index/template.hbs rename to frontend/app/users/index/template.hbs diff --git a/app/users/route.js b/frontend/app/users/route.js similarity index 100% rename from app/users/route.js rename to frontend/app/users/route.js diff --git a/app/users/template.hbs b/frontend/app/users/template.hbs similarity index 100% rename from app/users/template.hbs rename to frontend/app/users/template.hbs diff --git a/app/utils/format-duration.js b/frontend/app/utils/format-duration.js similarity index 100% rename from app/utils/format-duration.js rename to frontend/app/utils/format-duration.js diff --git a/app/utils/humanize-duration.js b/frontend/app/utils/humanize-duration.js similarity index 100% rename from app/utils/humanize-duration.js rename to frontend/app/utils/humanize-duration.js diff --git a/app/utils/parse-django-duration.js b/frontend/app/utils/parse-django-duration.js similarity index 100% rename from app/utils/parse-django-duration.js rename to frontend/app/utils/parse-django-duration.js diff --git a/app/utils/query-params.js b/frontend/app/utils/query-params.js similarity index 100% rename from app/utils/query-params.js rename to frontend/app/utils/query-params.js diff --git a/app/utils/serialize-moment.js b/frontend/app/utils/serialize-moment.js similarity index 100% rename from app/utils/serialize-moment.js rename to frontend/app/utils/serialize-moment.js diff --git a/app/utils/url.js b/frontend/app/utils/url.js similarity index 100% rename from app/utils/url.js rename to frontend/app/utils/url.js diff --git a/app/validations/absence-credit.js b/frontend/app/validations/absence-credit.js similarity index 100% rename from app/validations/absence-credit.js rename to frontend/app/validations/absence-credit.js diff --git a/app/validations/absence.js b/frontend/app/validations/absence.js similarity index 100% rename from app/validations/absence.js rename to frontend/app/validations/absence.js diff --git a/app/validations/activity.js b/frontend/app/validations/activity.js similarity index 100% rename from app/validations/activity.js rename to frontend/app/validations/activity.js diff --git a/app/validations/attendance.js b/frontend/app/validations/attendance.js similarity index 100% rename from app/validations/attendance.js rename to frontend/app/validations/attendance.js diff --git a/app/validations/intersection.js b/frontend/app/validations/intersection.js similarity index 100% rename from app/validations/intersection.js rename to frontend/app/validations/intersection.js diff --git a/app/validations/multiple-absence.js b/frontend/app/validations/multiple-absence.js similarity index 100% rename from app/validations/multiple-absence.js rename to frontend/app/validations/multiple-absence.js diff --git a/app/validations/overtime-credit.js b/frontend/app/validations/overtime-credit.js similarity index 100% rename from app/validations/overtime-credit.js rename to frontend/app/validations/overtime-credit.js diff --git a/app/validations/project.js b/frontend/app/validations/project.js similarity index 100% rename from app/validations/project.js rename to frontend/app/validations/project.js diff --git a/app/validations/report.js b/frontend/app/validations/report.js similarity index 100% rename from app/validations/report.js rename to frontend/app/validations/report.js diff --git a/app/validations/task.js b/frontend/app/validations/task.js similarity index 100% rename from app/validations/task.js rename to frontend/app/validations/task.js diff --git a/app/validators/intersection-task.js b/frontend/app/validators/intersection-task.js similarity index 100% rename from app/validators/intersection-task.js rename to frontend/app/validators/intersection-task.js diff --git a/app/validators/moment.js b/frontend/app/validators/moment.js similarity index 100% rename from app/validators/moment.js rename to frontend/app/validators/moment.js diff --git a/app/validators/null-or-not-blank.js b/frontend/app/validators/null-or-not-blank.js similarity index 100% rename from app/validators/null-or-not-blank.js rename to frontend/app/validators/null-or-not-blank.js diff --git a/config/coverage.js b/frontend/config/coverage.js similarity index 100% rename from config/coverage.js rename to frontend/config/coverage.js diff --git a/config/dependency-lint.js b/frontend/config/dependency-lint.js similarity index 100% rename from config/dependency-lint.js rename to frontend/config/dependency-lint.js diff --git a/config/deprecation-workflow.js b/frontend/config/deprecation-workflow.js similarity index 100% rename from config/deprecation-workflow.js rename to frontend/config/deprecation-workflow.js diff --git a/config/ember-cli-update.json b/frontend/config/ember-cli-update.json similarity index 100% rename from config/ember-cli-update.json rename to frontend/config/ember-cli-update.json diff --git a/config/environment.js b/frontend/config/environment.js similarity index 100% rename from config/environment.js rename to frontend/config/environment.js diff --git a/config/icons.js b/frontend/config/icons.js similarity index 100% rename from config/icons.js rename to frontend/config/icons.js diff --git a/config/optional-features.json b/frontend/config/optional-features.json similarity index 100% rename from config/optional-features.json rename to frontend/config/optional-features.json diff --git a/config/targets.js b/frontend/config/targets.js similarity index 100% rename from config/targets.js rename to frontend/config/targets.js diff --git a/contrib/nginx.conf b/frontend/contrib/nginx.conf similarity index 100% rename from contrib/nginx.conf rename to frontend/contrib/nginx.conf diff --git a/docker-entrypoint.sh b/frontend/docker-entrypoint.sh similarity index 100% rename from docker-entrypoint.sh rename to frontend/docker-entrypoint.sh diff --git a/ember-cli-build.js b/frontend/ember-cli-build.js similarity index 100% rename from ember-cli-build.js rename to frontend/ember-cli-build.js diff --git a/mirage/config.js b/frontend/mirage/config.js similarity index 100% rename from mirage/config.js rename to frontend/mirage/config.js diff --git a/mirage/factories/absence-balance.js b/frontend/mirage/factories/absence-balance.js similarity index 100% rename from mirage/factories/absence-balance.js rename to frontend/mirage/factories/absence-balance.js diff --git a/mirage/factories/absence-credit.js b/frontend/mirage/factories/absence-credit.js similarity index 100% rename from mirage/factories/absence-credit.js rename to frontend/mirage/factories/absence-credit.js diff --git a/mirage/factories/absence-type.js b/frontend/mirage/factories/absence-type.js similarity index 100% rename from mirage/factories/absence-type.js rename to frontend/mirage/factories/absence-type.js diff --git a/mirage/factories/absence.js b/frontend/mirage/factories/absence.js similarity index 100% rename from mirage/factories/absence.js rename to frontend/mirage/factories/absence.js diff --git a/mirage/factories/activity.js b/frontend/mirage/factories/activity.js similarity index 100% rename from mirage/factories/activity.js rename to frontend/mirage/factories/activity.js diff --git a/mirage/factories/attendance.js b/frontend/mirage/factories/attendance.js similarity index 100% rename from mirage/factories/attendance.js rename to frontend/mirage/factories/attendance.js diff --git a/mirage/factories/billing-type.js b/frontend/mirage/factories/billing-type.js similarity index 100% rename from mirage/factories/billing-type.js rename to frontend/mirage/factories/billing-type.js diff --git a/mirage/factories/cost-center.js b/frontend/mirage/factories/cost-center.js similarity index 100% rename from mirage/factories/cost-center.js rename to frontend/mirage/factories/cost-center.js diff --git a/mirage/factories/customer-statistic.js b/frontend/mirage/factories/customer-statistic.js similarity index 100% rename from mirage/factories/customer-statistic.js rename to frontend/mirage/factories/customer-statistic.js diff --git a/mirage/factories/customer.js b/frontend/mirage/factories/customer.js similarity index 100% rename from mirage/factories/customer.js rename to frontend/mirage/factories/customer.js diff --git a/mirage/factories/employment.js b/frontend/mirage/factories/employment.js similarity index 100% rename from mirage/factories/employment.js rename to frontend/mirage/factories/employment.js diff --git a/mirage/factories/location.js b/frontend/mirage/factories/location.js similarity index 100% rename from mirage/factories/location.js rename to frontend/mirage/factories/location.js diff --git a/mirage/factories/month-statistic.js b/frontend/mirage/factories/month-statistic.js similarity index 100% rename from mirage/factories/month-statistic.js rename to frontend/mirage/factories/month-statistic.js diff --git a/mirage/factories/overtime-credit.js b/frontend/mirage/factories/overtime-credit.js similarity index 100% rename from mirage/factories/overtime-credit.js rename to frontend/mirage/factories/overtime-credit.js diff --git a/mirage/factories/project-assignee.js b/frontend/mirage/factories/project-assignee.js similarity index 100% rename from mirage/factories/project-assignee.js rename to frontend/mirage/factories/project-assignee.js diff --git a/mirage/factories/project-statistic.js b/frontend/mirage/factories/project-statistic.js similarity index 100% rename from mirage/factories/project-statistic.js rename to frontend/mirage/factories/project-statistic.js diff --git a/mirage/factories/project.js b/frontend/mirage/factories/project.js similarity index 100% rename from mirage/factories/project.js rename to frontend/mirage/factories/project.js diff --git a/mirage/factories/public-holiday.js b/frontend/mirage/factories/public-holiday.js similarity index 100% rename from mirage/factories/public-holiday.js rename to frontend/mirage/factories/public-holiday.js diff --git a/mirage/factories/report-intersection.js b/frontend/mirage/factories/report-intersection.js similarity index 100% rename from mirage/factories/report-intersection.js rename to frontend/mirage/factories/report-intersection.js diff --git a/mirage/factories/report.js b/frontend/mirage/factories/report.js similarity index 100% rename from mirage/factories/report.js rename to frontend/mirage/factories/report.js diff --git a/mirage/factories/task-statistic.js b/frontend/mirage/factories/task-statistic.js similarity index 100% rename from mirage/factories/task-statistic.js rename to frontend/mirage/factories/task-statistic.js diff --git a/mirage/factories/task.js b/frontend/mirage/factories/task.js similarity index 100% rename from mirage/factories/task.js rename to frontend/mirage/factories/task.js diff --git a/mirage/factories/user-statistic.js b/frontend/mirage/factories/user-statistic.js similarity index 100% rename from mirage/factories/user-statistic.js rename to frontend/mirage/factories/user-statistic.js diff --git a/mirage/factories/user.js b/frontend/mirage/factories/user.js similarity index 100% rename from mirage/factories/user.js rename to frontend/mirage/factories/user.js diff --git a/mirage/factories/worktime-balance.js b/frontend/mirage/factories/worktime-balance.js similarity index 100% rename from mirage/factories/worktime-balance.js rename to frontend/mirage/factories/worktime-balance.js diff --git a/mirage/factories/year-statistic.js b/frontend/mirage/factories/year-statistic.js similarity index 100% rename from mirage/factories/year-statistic.js rename to frontend/mirage/factories/year-statistic.js diff --git a/mirage/fixtures/absence-types.js b/frontend/mirage/fixtures/absence-types.js similarity index 100% rename from mirage/fixtures/absence-types.js rename to frontend/mirage/fixtures/absence-types.js diff --git a/mirage/helpers/duration.js b/frontend/mirage/helpers/duration.js similarity index 100% rename from mirage/helpers/duration.js rename to frontend/mirage/helpers/duration.js diff --git a/mirage/scenarios/default.js b/frontend/mirage/scenarios/default.js similarity index 100% rename from mirage/scenarios/default.js rename to frontend/mirage/scenarios/default.js diff --git a/mirage/serializers/application.js b/frontend/mirage/serializers/application.js similarity index 100% rename from mirage/serializers/application.js rename to frontend/mirage/serializers/application.js diff --git a/package.json b/frontend/package.json similarity index 98% rename from package.json rename to frontend/package.json index cf3f08ee0..3ff5102ba 100644 --- a/package.json +++ b/frontend/package.json @@ -17,7 +17,7 @@ "lint:hbs:fix": "ember-template-lint . --fix", "lint:js": "eslint --config .eslintrc.js .", "lint:js:fix": "eslint --config .eslintrc.js . --fix", - "prepare": "husky install", + "prepare": "cd .. && husky install frontend/.husky", "preinstall": "npx only-allow pnpm", "start": "ember server --proxy http://localhost:8000", "test": "npm-run-all test:*", diff --git a/pnpm-lock.yaml b/frontend/pnpm-lock.yaml similarity index 100% rename from pnpm-lock.yaml rename to frontend/pnpm-lock.yaml diff --git a/public/assets/favicon-16x16.png b/frontend/public/assets/favicon-16x16.png similarity index 100% rename from public/assets/favicon-16x16.png rename to frontend/public/assets/favicon-16x16.png diff --git a/public/assets/favicon-32x32.png b/frontend/public/assets/favicon-32x32.png similarity index 100% rename from public/assets/favicon-32x32.png rename to frontend/public/assets/favicon-32x32.png diff --git a/public/assets/favicon.ico b/frontend/public/assets/favicon.ico similarity index 100% rename from public/assets/favicon.ico rename to frontend/public/assets/favicon.ico diff --git a/public/assets/logo.png b/frontend/public/assets/logo.png similarity index 100% rename from public/assets/logo.png rename to frontend/public/assets/logo.png diff --git a/public/assets/logo.svg b/frontend/public/assets/logo.svg similarity index 100% rename from public/assets/logo.svg rename to frontend/public/assets/logo.svg diff --git a/public/assets/logo_text.png b/frontend/public/assets/logo_text.png similarity index 100% rename from public/assets/logo_text.png rename to frontend/public/assets/logo_text.png diff --git a/public/crossdomain.xml b/frontend/public/crossdomain.xml similarity index 100% rename from public/crossdomain.xml rename to frontend/public/crossdomain.xml diff --git a/public/robots.txt b/frontend/public/robots.txt similarity index 100% rename from public/robots.txt rename to frontend/public/robots.txt diff --git a/renovate.json b/frontend/renovate.json similarity index 100% rename from renovate.json rename to frontend/renovate.json diff --git a/testem.js b/frontend/testem.js similarity index 100% rename from testem.js rename to frontend/testem.js diff --git a/tests/.eslintrc.js b/frontend/tests/.eslintrc.js similarity index 100% rename from tests/.eslintrc.js rename to frontend/tests/.eslintrc.js diff --git a/tests/acceptance/analysis-edit-test.js b/frontend/tests/acceptance/analysis-edit-test.js similarity index 100% rename from tests/acceptance/analysis-edit-test.js rename to frontend/tests/acceptance/analysis-edit-test.js diff --git a/tests/acceptance/analysis-test.js b/frontend/tests/acceptance/analysis-test.js similarity index 100% rename from tests/acceptance/analysis-test.js rename to frontend/tests/acceptance/analysis-test.js diff --git a/tests/acceptance/auth-test.js b/frontend/tests/acceptance/auth-test.js similarity index 100% rename from tests/acceptance/auth-test.js rename to frontend/tests/acceptance/auth-test.js diff --git a/tests/acceptance/external-employee-test.js b/frontend/tests/acceptance/external-employee-test.js similarity index 100% rename from tests/acceptance/external-employee-test.js rename to frontend/tests/acceptance/external-employee-test.js diff --git a/tests/acceptance/index-activities-edit-test.js b/frontend/tests/acceptance/index-activities-edit-test.js similarity index 100% rename from tests/acceptance/index-activities-edit-test.js rename to frontend/tests/acceptance/index-activities-edit-test.js diff --git a/tests/acceptance/index-activities-test.js b/frontend/tests/acceptance/index-activities-test.js similarity index 100% rename from tests/acceptance/index-activities-test.js rename to frontend/tests/acceptance/index-activities-test.js diff --git a/tests/acceptance/index-attendances-test.js b/frontend/tests/acceptance/index-attendances-test.js similarity index 100% rename from tests/acceptance/index-attendances-test.js rename to frontend/tests/acceptance/index-attendances-test.js diff --git a/tests/acceptance/index-reports-test.js b/frontend/tests/acceptance/index-reports-test.js similarity index 100% rename from tests/acceptance/index-reports-test.js rename to frontend/tests/acceptance/index-reports-test.js diff --git a/tests/acceptance/index-test.js b/frontend/tests/acceptance/index-test.js similarity index 100% rename from tests/acceptance/index-test.js rename to frontend/tests/acceptance/index-test.js diff --git a/tests/acceptance/magic-link-test.js b/frontend/tests/acceptance/magic-link-test.js similarity index 100% rename from tests/acceptance/magic-link-test.js rename to frontend/tests/acceptance/magic-link-test.js diff --git a/tests/acceptance/notfound-test.js b/frontend/tests/acceptance/notfound-test.js similarity index 100% rename from tests/acceptance/notfound-test.js rename to frontend/tests/acceptance/notfound-test.js diff --git a/tests/acceptance/project-test.js b/frontend/tests/acceptance/project-test.js similarity index 100% rename from tests/acceptance/project-test.js rename to frontend/tests/acceptance/project-test.js diff --git a/tests/acceptance/statistics-test.js b/frontend/tests/acceptance/statistics-test.js similarity index 100% rename from tests/acceptance/statistics-test.js rename to frontend/tests/acceptance/statistics-test.js diff --git a/tests/acceptance/tour-test.js b/frontend/tests/acceptance/tour-test.js similarity index 100% rename from tests/acceptance/tour-test.js rename to frontend/tests/acceptance/tour-test.js diff --git a/tests/acceptance/users-edit-credits-absence-credit-test.js b/frontend/tests/acceptance/users-edit-credits-absence-credit-test.js similarity index 100% rename from tests/acceptance/users-edit-credits-absence-credit-test.js rename to frontend/tests/acceptance/users-edit-credits-absence-credit-test.js diff --git a/tests/acceptance/users-edit-credits-overtime-credit-test.js b/frontend/tests/acceptance/users-edit-credits-overtime-credit-test.js similarity index 100% rename from tests/acceptance/users-edit-credits-overtime-credit-test.js rename to frontend/tests/acceptance/users-edit-credits-overtime-credit-test.js diff --git a/tests/acceptance/users-edit-credits-test.js b/frontend/tests/acceptance/users-edit-credits-test.js similarity index 100% rename from tests/acceptance/users-edit-credits-test.js rename to frontend/tests/acceptance/users-edit-credits-test.js diff --git a/tests/acceptance/users-edit-responsibilities-test.js b/frontend/tests/acceptance/users-edit-responsibilities-test.js similarity index 100% rename from tests/acceptance/users-edit-responsibilities-test.js rename to frontend/tests/acceptance/users-edit-responsibilities-test.js diff --git a/tests/acceptance/users-edit-test.js b/frontend/tests/acceptance/users-edit-test.js similarity index 100% rename from tests/acceptance/users-edit-test.js rename to frontend/tests/acceptance/users-edit-test.js diff --git a/tests/acceptance/users-test.js b/frontend/tests/acceptance/users-test.js similarity index 100% rename from tests/acceptance/users-test.js rename to frontend/tests/acceptance/users-test.js diff --git a/tests/helpers/index.js b/frontend/tests/helpers/index.js similarity index 100% rename from tests/helpers/index.js rename to frontend/tests/helpers/index.js diff --git a/tests/helpers/responsive.js b/frontend/tests/helpers/responsive.js similarity index 100% rename from tests/helpers/responsive.js rename to frontend/tests/helpers/responsive.js diff --git a/tests/helpers/session-mock.js b/frontend/tests/helpers/session-mock.js similarity index 100% rename from tests/helpers/session-mock.js rename to frontend/tests/helpers/session-mock.js diff --git a/tests/helpers/task-select.js b/frontend/tests/helpers/task-select.js similarity index 100% rename from tests/helpers/task-select.js rename to frontend/tests/helpers/task-select.js diff --git a/tests/helpers/tracking-mock.js b/frontend/tests/helpers/tracking-mock.js similarity index 100% rename from tests/helpers/tracking-mock.js rename to frontend/tests/helpers/tracking-mock.js diff --git a/tests/helpers/user-select.js b/frontend/tests/helpers/user-select.js similarity index 100% rename from tests/helpers/user-select.js rename to frontend/tests/helpers/user-select.js diff --git a/tests/index.html b/frontend/tests/index.html similarity index 100% rename from tests/index.html rename to frontend/tests/index.html diff --git a/tests/integration/components/async-list/component-test.js b/frontend/tests/integration/components/async-list/component-test.js similarity index 100% rename from tests/integration/components/async-list/component-test.js rename to frontend/tests/integration/components/async-list/component-test.js diff --git a/tests/integration/components/attendance-slider/component-test.js b/frontend/tests/integration/components/attendance-slider/component-test.js similarity index 100% rename from tests/integration/components/attendance-slider/component-test.js rename to frontend/tests/integration/components/attendance-slider/component-test.js diff --git a/tests/integration/components/balance-donut/component-test.js b/frontend/tests/integration/components/balance-donut/component-test.js similarity index 100% rename from tests/integration/components/balance-donut/component-test.js rename to frontend/tests/integration/components/balance-donut/component-test.js diff --git a/tests/integration/components/changed-warning/component-test.js b/frontend/tests/integration/components/changed-warning/component-test.js similarity index 100% rename from tests/integration/components/changed-warning/component-test.js rename to frontend/tests/integration/components/changed-warning/component-test.js diff --git a/tests/integration/components/customer-visible-icon/component-test.js b/frontend/tests/integration/components/customer-visible-icon/component-test.js similarity index 100% rename from tests/integration/components/customer-visible-icon/component-test.js rename to frontend/tests/integration/components/customer-visible-icon/component-test.js diff --git a/tests/integration/components/date-buttons/component-test.js b/frontend/tests/integration/components/date-buttons/component-test.js similarity index 100% rename from tests/integration/components/date-buttons/component-test.js rename to frontend/tests/integration/components/date-buttons/component-test.js diff --git a/tests/integration/components/date-navigation/component-test.js b/frontend/tests/integration/components/date-navigation/component-test.js similarity index 100% rename from tests/integration/components/date-navigation/component-test.js rename to frontend/tests/integration/components/date-navigation/component-test.js diff --git a/tests/integration/components/duration-since/component-test.js b/frontend/tests/integration/components/duration-since/component-test.js similarity index 100% rename from tests/integration/components/duration-since/component-test.js rename to frontend/tests/integration/components/duration-since/component-test.js diff --git a/tests/integration/components/filter-sidebar/component-test.js b/frontend/tests/integration/components/filter-sidebar/component-test.js similarity index 100% rename from tests/integration/components/filter-sidebar/component-test.js rename to frontend/tests/integration/components/filter-sidebar/component-test.js diff --git a/tests/integration/components/filter-sidebar/filter/component-test.js b/frontend/tests/integration/components/filter-sidebar/filter/component-test.js similarity index 100% rename from tests/integration/components/filter-sidebar/filter/component-test.js rename to frontend/tests/integration/components/filter-sidebar/filter/component-test.js diff --git a/tests/integration/components/filter-sidebar/group/component-test.js b/frontend/tests/integration/components/filter-sidebar/group/component-test.js similarity index 100% rename from tests/integration/components/filter-sidebar/group/component-test.js rename to frontend/tests/integration/components/filter-sidebar/group/component-test.js diff --git a/tests/integration/components/filter-sidebar/label/component-test.js b/frontend/tests/integration/components/filter-sidebar/label/component-test.js similarity index 100% rename from tests/integration/components/filter-sidebar/label/component-test.js rename to frontend/tests/integration/components/filter-sidebar/label/component-test.js diff --git a/tests/integration/components/in-viewport/component-test.js b/frontend/tests/integration/components/in-viewport/component-test.js similarity index 100% rename from tests/integration/components/in-viewport/component-test.js rename to frontend/tests/integration/components/in-viewport/component-test.js diff --git a/tests/integration/components/loading-icon/component-test.js b/frontend/tests/integration/components/loading-icon/component-test.js similarity index 100% rename from tests/integration/components/loading-icon/component-test.js rename to frontend/tests/integration/components/loading-icon/component-test.js diff --git a/tests/integration/components/no-mobile-message/component-test.js b/frontend/tests/integration/components/no-mobile-message/component-test.js similarity index 100% rename from tests/integration/components/no-mobile-message/component-test.js rename to frontend/tests/integration/components/no-mobile-message/component-test.js diff --git a/tests/integration/components/no-permission/component-test.js b/frontend/tests/integration/components/no-permission/component-test.js similarity index 100% rename from tests/integration/components/no-permission/component-test.js rename to frontend/tests/integration/components/no-permission/component-test.js diff --git a/tests/integration/components/not-identical-warning/component-test.js b/frontend/tests/integration/components/not-identical-warning/component-test.js similarity index 100% rename from tests/integration/components/not-identical-warning/component-test.js rename to frontend/tests/integration/components/not-identical-warning/component-test.js diff --git a/tests/integration/components/optimized-power-select/component-test.js b/frontend/tests/integration/components/optimized-power-select/component-test.js similarity index 100% rename from tests/integration/components/optimized-power-select/component-test.js rename to frontend/tests/integration/components/optimized-power-select/component-test.js diff --git a/tests/integration/components/progress-tooltip/component-test.js b/frontend/tests/integration/components/progress-tooltip/component-test.js similarity index 100% rename from tests/integration/components/progress-tooltip/component-test.js rename to frontend/tests/integration/components/progress-tooltip/component-test.js diff --git a/tests/integration/components/record-button/component-test.js b/frontend/tests/integration/components/record-button/component-test.js similarity index 100% rename from tests/integration/components/record-button/component-test.js rename to frontend/tests/integration/components/record-button/component-test.js diff --git a/tests/integration/components/report-review-warning/component-test.js b/frontend/tests/integration/components/report-review-warning/component-test.js similarity index 100% rename from tests/integration/components/report-review-warning/component-test.js rename to frontend/tests/integration/components/report-review-warning/component-test.js diff --git a/tests/integration/components/report-row/component-test.js b/frontend/tests/integration/components/report-row/component-test.js similarity index 100% rename from tests/integration/components/report-row/component-test.js rename to frontend/tests/integration/components/report-row/component-test.js diff --git a/tests/integration/components/sort-header/component-test.js b/frontend/tests/integration/components/sort-header/component-test.js similarity index 100% rename from tests/integration/components/sort-header/component-test.js rename to frontend/tests/integration/components/sort-header/component-test.js diff --git a/tests/integration/components/statistic-list/bar/component-test.js b/frontend/tests/integration/components/statistic-list/bar/component-test.js similarity index 100% rename from tests/integration/components/statistic-list/bar/component-test.js rename to frontend/tests/integration/components/statistic-list/bar/component-test.js diff --git a/tests/integration/components/statistic-list/column/component-test.js b/frontend/tests/integration/components/statistic-list/column/component-test.js similarity index 100% rename from tests/integration/components/statistic-list/column/component-test.js rename to frontend/tests/integration/components/statistic-list/column/component-test.js diff --git a/tests/integration/components/statistic-list/component-test.js b/frontend/tests/integration/components/statistic-list/component-test.js similarity index 100% rename from tests/integration/components/statistic-list/component-test.js rename to frontend/tests/integration/components/statistic-list/component-test.js diff --git a/tests/integration/components/sy-calendar/component-test.js b/frontend/tests/integration/components/sy-calendar/component-test.js similarity index 100% rename from tests/integration/components/sy-calendar/component-test.js rename to frontend/tests/integration/components/sy-calendar/component-test.js diff --git a/tests/integration/components/sy-checkbox/component-test.js b/frontend/tests/integration/components/sy-checkbox/component-test.js similarity index 100% rename from tests/integration/components/sy-checkbox/component-test.js rename to frontend/tests/integration/components/sy-checkbox/component-test.js diff --git a/tests/integration/components/sy-checkmark/component-test.js b/frontend/tests/integration/components/sy-checkmark/component-test.js similarity index 100% rename from tests/integration/components/sy-checkmark/component-test.js rename to frontend/tests/integration/components/sy-checkmark/component-test.js diff --git a/tests/integration/components/sy-datepicker-btn/component-test.js b/frontend/tests/integration/components/sy-datepicker-btn/component-test.js similarity index 100% rename from tests/integration/components/sy-datepicker-btn/component-test.js rename to frontend/tests/integration/components/sy-datepicker-btn/component-test.js diff --git a/tests/integration/components/sy-datepicker/component-test.js b/frontend/tests/integration/components/sy-datepicker/component-test.js similarity index 100% rename from tests/integration/components/sy-datepicker/component-test.js rename to frontend/tests/integration/components/sy-datepicker/component-test.js diff --git a/tests/integration/components/sy-durationpicker-day/component-test.js b/frontend/tests/integration/components/sy-durationpicker-day/component-test.js similarity index 100% rename from tests/integration/components/sy-durationpicker-day/component-test.js rename to frontend/tests/integration/components/sy-durationpicker-day/component-test.js diff --git a/tests/integration/components/sy-durationpicker/component-test.js b/frontend/tests/integration/components/sy-durationpicker/component-test.js similarity index 100% rename from tests/integration/components/sy-durationpicker/component-test.js rename to frontend/tests/integration/components/sy-durationpicker/component-test.js diff --git a/tests/integration/components/sy-modal-target/component-test.js b/frontend/tests/integration/components/sy-modal-target/component-test.js similarity index 100% rename from tests/integration/components/sy-modal-target/component-test.js rename to frontend/tests/integration/components/sy-modal-target/component-test.js diff --git a/tests/integration/components/sy-modal/body/component-test.js b/frontend/tests/integration/components/sy-modal/body/component-test.js similarity index 100% rename from tests/integration/components/sy-modal/body/component-test.js rename to frontend/tests/integration/components/sy-modal/body/component-test.js diff --git a/tests/integration/components/sy-modal/component-test.js b/frontend/tests/integration/components/sy-modal/component-test.js similarity index 100% rename from tests/integration/components/sy-modal/component-test.js rename to frontend/tests/integration/components/sy-modal/component-test.js diff --git a/tests/integration/components/sy-modal/footer/component-test.js b/frontend/tests/integration/components/sy-modal/footer/component-test.js similarity index 100% rename from tests/integration/components/sy-modal/footer/component-test.js rename to frontend/tests/integration/components/sy-modal/footer/component-test.js diff --git a/tests/integration/components/sy-modal/header/component-test.js b/frontend/tests/integration/components/sy-modal/header/component-test.js similarity index 100% rename from tests/integration/components/sy-modal/header/component-test.js rename to frontend/tests/integration/components/sy-modal/header/component-test.js diff --git a/tests/integration/components/sy-modal/overlay/component-test.js b/frontend/tests/integration/components/sy-modal/overlay/component-test.js similarity index 100% rename from tests/integration/components/sy-modal/overlay/component-test.js rename to frontend/tests/integration/components/sy-modal/overlay/component-test.js diff --git a/tests/integration/components/sy-timepicker/component-test.js b/frontend/tests/integration/components/sy-timepicker/component-test.js similarity index 100% rename from tests/integration/components/sy-timepicker/component-test.js rename to frontend/tests/integration/components/sy-timepicker/component-test.js diff --git a/tests/integration/components/sy-toggle/component-test.js b/frontend/tests/integration/components/sy-toggle/component-test.js similarity index 100% rename from tests/integration/components/sy-toggle/component-test.js rename to frontend/tests/integration/components/sy-toggle/component-test.js diff --git a/tests/integration/components/sy-topnav/component-test.js b/frontend/tests/integration/components/sy-topnav/component-test.js similarity index 100% rename from tests/integration/components/sy-topnav/component-test.js rename to frontend/tests/integration/components/sy-topnav/component-test.js diff --git a/tests/integration/components/task-selection/component-test.js b/frontend/tests/integration/components/task-selection/component-test.js similarity index 100% rename from tests/integration/components/task-selection/component-test.js rename to frontend/tests/integration/components/task-selection/component-test.js diff --git a/tests/integration/components/timed-clock/component-test.js b/frontend/tests/integration/components/timed-clock/component-test.js similarity index 100% rename from tests/integration/components/timed-clock/component-test.js rename to frontend/tests/integration/components/timed-clock/component-test.js diff --git a/tests/integration/components/tracking-bar/component-test.js b/frontend/tests/integration/components/tracking-bar/component-test.js similarity index 100% rename from tests/integration/components/tracking-bar/component-test.js rename to frontend/tests/integration/components/tracking-bar/component-test.js diff --git a/tests/integration/components/user-selection/component-test.js b/frontend/tests/integration/components/user-selection/component-test.js similarity index 100% rename from tests/integration/components/user-selection/component-test.js rename to frontend/tests/integration/components/user-selection/component-test.js diff --git a/tests/integration/components/weekly-overview-benchmark/component-test.js b/frontend/tests/integration/components/weekly-overview-benchmark/component-test.js similarity index 100% rename from tests/integration/components/weekly-overview-benchmark/component-test.js rename to frontend/tests/integration/components/weekly-overview-benchmark/component-test.js diff --git a/tests/integration/components/weekly-overview-day/component-test.js b/frontend/tests/integration/components/weekly-overview-day/component-test.js similarity index 100% rename from tests/integration/components/weekly-overview-day/component-test.js rename to frontend/tests/integration/components/weekly-overview-day/component-test.js diff --git a/tests/integration/components/weekly-overview/component-test.js b/frontend/tests/integration/components/weekly-overview/component-test.js similarity index 100% rename from tests/integration/components/weekly-overview/component-test.js rename to frontend/tests/integration/components/weekly-overview/component-test.js diff --git a/tests/integration/components/welcome-modal/component-test.js b/frontend/tests/integration/components/welcome-modal/component-test.js similarity index 100% rename from tests/integration/components/welcome-modal/component-test.js rename to frontend/tests/integration/components/welcome-modal/component-test.js diff --git a/tests/integration/components/worktime-balance-chart/component-test.js b/frontend/tests/integration/components/worktime-balance-chart/component-test.js similarity index 100% rename from tests/integration/components/worktime-balance-chart/component-test.js rename to frontend/tests/integration/components/worktime-balance-chart/component-test.js diff --git a/tests/test-helper.js b/frontend/tests/test-helper.js similarity index 100% rename from tests/test-helper.js rename to frontend/tests/test-helper.js diff --git a/tests/unit/abilities/report-test.js b/frontend/tests/unit/abilities/report-test.js similarity index 100% rename from tests/unit/abilities/report-test.js rename to frontend/tests/unit/abilities/report-test.js diff --git a/tests/unit/analysis/edit/controller-test.js b/frontend/tests/unit/analysis/edit/controller-test.js similarity index 100% rename from tests/unit/analysis/edit/controller-test.js rename to frontend/tests/unit/analysis/edit/controller-test.js diff --git a/tests/unit/analysis/edit/route-test.js b/frontend/tests/unit/analysis/edit/route-test.js similarity index 100% rename from tests/unit/analysis/edit/route-test.js rename to frontend/tests/unit/analysis/edit/route-test.js diff --git a/tests/unit/analysis/index/controller-test.js b/frontend/tests/unit/analysis/index/controller-test.js similarity index 100% rename from tests/unit/analysis/index/controller-test.js rename to frontend/tests/unit/analysis/index/controller-test.js diff --git a/tests/unit/analysis/index/route-test.js b/frontend/tests/unit/analysis/index/route-test.js similarity index 100% rename from tests/unit/analysis/index/route-test.js rename to frontend/tests/unit/analysis/index/route-test.js diff --git a/tests/unit/analysis/route-test.js b/frontend/tests/unit/analysis/route-test.js similarity index 100% rename from tests/unit/analysis/route-test.js rename to frontend/tests/unit/analysis/route-test.js diff --git a/tests/unit/controllers/qpcontroller/controller-test.js b/frontend/tests/unit/controllers/qpcontroller/controller-test.js similarity index 100% rename from tests/unit/controllers/qpcontroller/controller-test.js rename to frontend/tests/unit/controllers/qpcontroller/controller-test.js diff --git a/tests/unit/helpers/balance-highlight-class-test.js b/frontend/tests/unit/helpers/balance-highlight-class-test.js similarity index 100% rename from tests/unit/helpers/balance-highlight-class-test.js rename to frontend/tests/unit/helpers/balance-highlight-class-test.js diff --git a/tests/unit/helpers/format-duration-test.js b/frontend/tests/unit/helpers/format-duration-test.js similarity index 100% rename from tests/unit/helpers/format-duration-test.js rename to frontend/tests/unit/helpers/format-duration-test.js diff --git a/tests/unit/helpers/humanize-duration-test.js b/frontend/tests/unit/helpers/humanize-duration-test.js similarity index 100% rename from tests/unit/helpers/humanize-duration-test.js rename to frontend/tests/unit/helpers/humanize-duration-test.js diff --git a/tests/unit/helpers/parse-django-duration-test.js b/frontend/tests/unit/helpers/parse-django-duration-test.js similarity index 100% rename from tests/unit/helpers/parse-django-duration-test.js rename to frontend/tests/unit/helpers/parse-django-duration-test.js diff --git a/tests/unit/index/activities/controller-test.js b/frontend/tests/unit/index/activities/controller-test.js similarity index 100% rename from tests/unit/index/activities/controller-test.js rename to frontend/tests/unit/index/activities/controller-test.js diff --git a/tests/unit/index/activities/edit/controller-test.js b/frontend/tests/unit/index/activities/edit/controller-test.js similarity index 100% rename from tests/unit/index/activities/edit/controller-test.js rename to frontend/tests/unit/index/activities/edit/controller-test.js diff --git a/tests/unit/index/activities/edit/route-test.js b/frontend/tests/unit/index/activities/edit/route-test.js similarity index 100% rename from tests/unit/index/activities/edit/route-test.js rename to frontend/tests/unit/index/activities/edit/route-test.js diff --git a/tests/unit/index/activities/route-test.js b/frontend/tests/unit/index/activities/route-test.js similarity index 100% rename from tests/unit/index/activities/route-test.js rename to frontend/tests/unit/index/activities/route-test.js diff --git a/tests/unit/index/attendances/controller-test.js b/frontend/tests/unit/index/attendances/controller-test.js similarity index 100% rename from tests/unit/index/attendances/controller-test.js rename to frontend/tests/unit/index/attendances/controller-test.js diff --git a/tests/unit/index/attendances/route-test.js b/frontend/tests/unit/index/attendances/route-test.js similarity index 100% rename from tests/unit/index/attendances/route-test.js rename to frontend/tests/unit/index/attendances/route-test.js diff --git a/tests/unit/index/controller-test.js b/frontend/tests/unit/index/controller-test.js similarity index 100% rename from tests/unit/index/controller-test.js rename to frontend/tests/unit/index/controller-test.js diff --git a/tests/unit/index/reports/controller-test.js b/frontend/tests/unit/index/reports/controller-test.js similarity index 100% rename from tests/unit/index/reports/controller-test.js rename to frontend/tests/unit/index/reports/controller-test.js diff --git a/tests/unit/index/reports/route-test.js b/frontend/tests/unit/index/reports/route-test.js similarity index 100% rename from tests/unit/index/reports/route-test.js rename to frontend/tests/unit/index/reports/route-test.js diff --git a/tests/unit/index/route-test.js b/frontend/tests/unit/index/route-test.js similarity index 100% rename from tests/unit/index/route-test.js rename to frontend/tests/unit/index/route-test.js diff --git a/tests/unit/login/route-test.js b/frontend/tests/unit/login/route-test.js similarity index 100% rename from tests/unit/login/route-test.js rename to frontend/tests/unit/login/route-test.js diff --git a/tests/unit/models/absence-balance-test.js b/frontend/tests/unit/models/absence-balance-test.js similarity index 100% rename from tests/unit/models/absence-balance-test.js rename to frontend/tests/unit/models/absence-balance-test.js diff --git a/tests/unit/models/activity-test.js b/frontend/tests/unit/models/activity-test.js similarity index 100% rename from tests/unit/models/activity-test.js rename to frontend/tests/unit/models/activity-test.js diff --git a/tests/unit/models/attendance-test.js b/frontend/tests/unit/models/attendance-test.js similarity index 100% rename from tests/unit/models/attendance-test.js rename to frontend/tests/unit/models/attendance-test.js diff --git a/tests/unit/models/billing-type-test.js b/frontend/tests/unit/models/billing-type-test.js similarity index 100% rename from tests/unit/models/billing-type-test.js rename to frontend/tests/unit/models/billing-type-test.js diff --git a/tests/unit/models/cost-center-test.js b/frontend/tests/unit/models/cost-center-test.js similarity index 100% rename from tests/unit/models/cost-center-test.js rename to frontend/tests/unit/models/cost-center-test.js diff --git a/tests/unit/models/customer-statistic-test.js b/frontend/tests/unit/models/customer-statistic-test.js similarity index 100% rename from tests/unit/models/customer-statistic-test.js rename to frontend/tests/unit/models/customer-statistic-test.js diff --git a/tests/unit/models/customer-test.js b/frontend/tests/unit/models/customer-test.js similarity index 100% rename from tests/unit/models/customer-test.js rename to frontend/tests/unit/models/customer-test.js diff --git a/tests/unit/models/employment-test.js b/frontend/tests/unit/models/employment-test.js similarity index 100% rename from tests/unit/models/employment-test.js rename to frontend/tests/unit/models/employment-test.js diff --git a/tests/unit/models/location-test.js b/frontend/tests/unit/models/location-test.js similarity index 100% rename from tests/unit/models/location-test.js rename to frontend/tests/unit/models/location-test.js diff --git a/tests/unit/models/month-statistic-test.js b/frontend/tests/unit/models/month-statistic-test.js similarity index 100% rename from tests/unit/models/month-statistic-test.js rename to frontend/tests/unit/models/month-statistic-test.js diff --git a/tests/unit/models/overtime-credit-test.js b/frontend/tests/unit/models/overtime-credit-test.js similarity index 100% rename from tests/unit/models/overtime-credit-test.js rename to frontend/tests/unit/models/overtime-credit-test.js diff --git a/tests/unit/models/project-statistic-test.js b/frontend/tests/unit/models/project-statistic-test.js similarity index 100% rename from tests/unit/models/project-statistic-test.js rename to frontend/tests/unit/models/project-statistic-test.js diff --git a/tests/unit/models/project-test.js b/frontend/tests/unit/models/project-test.js similarity index 100% rename from tests/unit/models/project-test.js rename to frontend/tests/unit/models/project-test.js diff --git a/tests/unit/models/public-holiday-test.js b/frontend/tests/unit/models/public-holiday-test.js similarity index 100% rename from tests/unit/models/public-holiday-test.js rename to frontend/tests/unit/models/public-holiday-test.js diff --git a/tests/unit/models/report-intersection-test.js b/frontend/tests/unit/models/report-intersection-test.js similarity index 100% rename from tests/unit/models/report-intersection-test.js rename to frontend/tests/unit/models/report-intersection-test.js diff --git a/tests/unit/models/report-test.js b/frontend/tests/unit/models/report-test.js similarity index 100% rename from tests/unit/models/report-test.js rename to frontend/tests/unit/models/report-test.js diff --git a/tests/unit/models/task-statistic-test.js b/frontend/tests/unit/models/task-statistic-test.js similarity index 100% rename from tests/unit/models/task-statistic-test.js rename to frontend/tests/unit/models/task-statistic-test.js diff --git a/tests/unit/models/task-test.js b/frontend/tests/unit/models/task-test.js similarity index 100% rename from tests/unit/models/task-test.js rename to frontend/tests/unit/models/task-test.js diff --git a/tests/unit/models/user-statistic-test.js b/frontend/tests/unit/models/user-statistic-test.js similarity index 100% rename from tests/unit/models/user-statistic-test.js rename to frontend/tests/unit/models/user-statistic-test.js diff --git a/tests/unit/models/user-test.js b/frontend/tests/unit/models/user-test.js similarity index 100% rename from tests/unit/models/user-test.js rename to frontend/tests/unit/models/user-test.js diff --git a/tests/unit/models/worktime-balance-test.js b/frontend/tests/unit/models/worktime-balance-test.js similarity index 100% rename from tests/unit/models/worktime-balance-test.js rename to frontend/tests/unit/models/worktime-balance-test.js diff --git a/tests/unit/models/year-statistic-test.js b/frontend/tests/unit/models/year-statistic-test.js similarity index 100% rename from tests/unit/models/year-statistic-test.js rename to frontend/tests/unit/models/year-statistic-test.js diff --git a/tests/unit/no-access/route-test.js b/frontend/tests/unit/no-access/route-test.js similarity index 100% rename from tests/unit/no-access/route-test.js rename to frontend/tests/unit/no-access/route-test.js diff --git a/tests/unit/notfound/route-test.js b/frontend/tests/unit/notfound/route-test.js similarity index 100% rename from tests/unit/notfound/route-test.js rename to frontend/tests/unit/notfound/route-test.js diff --git a/tests/unit/projects/controller-test.js b/frontend/tests/unit/projects/controller-test.js similarity index 100% rename from tests/unit/projects/controller-test.js rename to frontend/tests/unit/projects/controller-test.js diff --git a/tests/unit/projects/route-test.js b/frontend/tests/unit/projects/route-test.js similarity index 100% rename from tests/unit/projects/route-test.js rename to frontend/tests/unit/projects/route-test.js diff --git a/tests/unit/protected/controller-test.js b/frontend/tests/unit/protected/controller-test.js similarity index 100% rename from tests/unit/protected/controller-test.js rename to frontend/tests/unit/protected/controller-test.js diff --git a/tests/unit/protected/route-test.js b/frontend/tests/unit/protected/route-test.js similarity index 100% rename from tests/unit/protected/route-test.js rename to frontend/tests/unit/protected/route-test.js diff --git a/tests/unit/serializers/attendance-test.js b/frontend/tests/unit/serializers/attendance-test.js similarity index 100% rename from tests/unit/serializers/attendance-test.js rename to frontend/tests/unit/serializers/attendance-test.js diff --git a/tests/unit/serializers/employment-test.js b/frontend/tests/unit/serializers/employment-test.js similarity index 100% rename from tests/unit/serializers/employment-test.js rename to frontend/tests/unit/serializers/employment-test.js diff --git a/tests/unit/services/autostart-tour-test.js b/frontend/tests/unit/services/autostart-tour-test.js similarity index 100% rename from tests/unit/services/autostart-tour-test.js rename to frontend/tests/unit/services/autostart-tour-test.js diff --git a/tests/unit/services/fetch-test.js b/frontend/tests/unit/services/fetch-test.js similarity index 100% rename from tests/unit/services/fetch-test.js rename to frontend/tests/unit/services/fetch-test.js diff --git a/tests/unit/services/metadata-fetcher-test.js b/frontend/tests/unit/services/metadata-fetcher-test.js similarity index 100% rename from tests/unit/services/metadata-fetcher-test.js rename to frontend/tests/unit/services/metadata-fetcher-test.js diff --git a/tests/unit/services/rejected-reports-test.js b/frontend/tests/unit/services/rejected-reports-test.js similarity index 100% rename from tests/unit/services/rejected-reports-test.js rename to frontend/tests/unit/services/rejected-reports-test.js diff --git a/tests/unit/services/tracking-test.js b/frontend/tests/unit/services/tracking-test.js similarity index 100% rename from tests/unit/services/tracking-test.js rename to frontend/tests/unit/services/tracking-test.js diff --git a/tests/unit/services/unverified-reports-test.js b/frontend/tests/unit/services/unverified-reports-test.js similarity index 100% rename from tests/unit/services/unverified-reports-test.js rename to frontend/tests/unit/services/unverified-reports-test.js diff --git a/tests/unit/sso-login/route-test.js b/frontend/tests/unit/sso-login/route-test.js similarity index 100% rename from tests/unit/sso-login/route-test.js rename to frontend/tests/unit/sso-login/route-test.js diff --git a/tests/unit/statistics/controller-test.js b/frontend/tests/unit/statistics/controller-test.js similarity index 100% rename from tests/unit/statistics/controller-test.js rename to frontend/tests/unit/statistics/controller-test.js diff --git a/tests/unit/statistics/route-test.js b/frontend/tests/unit/statistics/route-test.js similarity index 100% rename from tests/unit/statistics/route-test.js rename to frontend/tests/unit/statistics/route-test.js diff --git a/tests/unit/transforms/django-date-test.js b/frontend/tests/unit/transforms/django-date-test.js similarity index 100% rename from tests/unit/transforms/django-date-test.js rename to frontend/tests/unit/transforms/django-date-test.js diff --git a/tests/unit/transforms/django-datetime-test.js b/frontend/tests/unit/transforms/django-datetime-test.js similarity index 100% rename from tests/unit/transforms/django-datetime-test.js rename to frontend/tests/unit/transforms/django-datetime-test.js diff --git a/tests/unit/transforms/django-duration-test.js b/frontend/tests/unit/transforms/django-duration-test.js similarity index 100% rename from tests/unit/transforms/django-duration-test.js rename to frontend/tests/unit/transforms/django-duration-test.js diff --git a/tests/unit/transforms/django-time-test.js b/frontend/tests/unit/transforms/django-time-test.js similarity index 100% rename from tests/unit/transforms/django-time-test.js rename to frontend/tests/unit/transforms/django-time-test.js diff --git a/tests/unit/transforms/django-workdays-test.js b/frontend/tests/unit/transforms/django-workdays-test.js similarity index 100% rename from tests/unit/transforms/django-workdays-test.js rename to frontend/tests/unit/transforms/django-workdays-test.js diff --git a/tests/unit/users/edit/controller-test.js b/frontend/tests/unit/users/edit/controller-test.js similarity index 100% rename from tests/unit/users/edit/controller-test.js rename to frontend/tests/unit/users/edit/controller-test.js diff --git a/tests/unit/users/edit/credits/absence-credits/edit/controller-test.js b/frontend/tests/unit/users/edit/credits/absence-credits/edit/controller-test.js similarity index 100% rename from tests/unit/users/edit/credits/absence-credits/edit/controller-test.js rename to frontend/tests/unit/users/edit/credits/absence-credits/edit/controller-test.js diff --git a/tests/unit/users/edit/credits/absence-credits/edit/route-test.js b/frontend/tests/unit/users/edit/credits/absence-credits/edit/route-test.js similarity index 100% rename from tests/unit/users/edit/credits/absence-credits/edit/route-test.js rename to frontend/tests/unit/users/edit/credits/absence-credits/edit/route-test.js diff --git a/tests/unit/users/edit/credits/absence-credits/new/route-test.js b/frontend/tests/unit/users/edit/credits/absence-credits/new/route-test.js similarity index 100% rename from tests/unit/users/edit/credits/absence-credits/new/route-test.js rename to frontend/tests/unit/users/edit/credits/absence-credits/new/route-test.js diff --git a/tests/unit/users/edit/credits/index/controller-test.js b/frontend/tests/unit/users/edit/credits/index/controller-test.js similarity index 100% rename from tests/unit/users/edit/credits/index/controller-test.js rename to frontend/tests/unit/users/edit/credits/index/controller-test.js diff --git a/tests/unit/users/edit/credits/index/route-test.js b/frontend/tests/unit/users/edit/credits/index/route-test.js similarity index 100% rename from tests/unit/users/edit/credits/index/route-test.js rename to frontend/tests/unit/users/edit/credits/index/route-test.js diff --git a/tests/unit/users/edit/credits/overtime-credits/edit/controller-test.js b/frontend/tests/unit/users/edit/credits/overtime-credits/edit/controller-test.js similarity index 100% rename from tests/unit/users/edit/credits/overtime-credits/edit/controller-test.js rename to frontend/tests/unit/users/edit/credits/overtime-credits/edit/controller-test.js diff --git a/tests/unit/users/edit/credits/overtime-credits/edit/route-test.js b/frontend/tests/unit/users/edit/credits/overtime-credits/edit/route-test.js similarity index 100% rename from tests/unit/users/edit/credits/overtime-credits/edit/route-test.js rename to frontend/tests/unit/users/edit/credits/overtime-credits/edit/route-test.js diff --git a/tests/unit/users/edit/credits/overtime-credits/new/route-test.js b/frontend/tests/unit/users/edit/credits/overtime-credits/new/route-test.js similarity index 100% rename from tests/unit/users/edit/credits/overtime-credits/new/route-test.js rename to frontend/tests/unit/users/edit/credits/overtime-credits/new/route-test.js diff --git a/tests/unit/users/edit/credits/route-test.js b/frontend/tests/unit/users/edit/credits/route-test.js similarity index 100% rename from tests/unit/users/edit/credits/route-test.js rename to frontend/tests/unit/users/edit/credits/route-test.js diff --git a/tests/unit/users/edit/index/controller-test.js b/frontend/tests/unit/users/edit/index/controller-test.js similarity index 100% rename from tests/unit/users/edit/index/controller-test.js rename to frontend/tests/unit/users/edit/index/controller-test.js diff --git a/tests/unit/users/edit/index/route-test.js b/frontend/tests/unit/users/edit/index/route-test.js similarity index 100% rename from tests/unit/users/edit/index/route-test.js rename to frontend/tests/unit/users/edit/index/route-test.js diff --git a/tests/unit/users/edit/responsibilities/controller-test.js b/frontend/tests/unit/users/edit/responsibilities/controller-test.js similarity index 100% rename from tests/unit/users/edit/responsibilities/controller-test.js rename to frontend/tests/unit/users/edit/responsibilities/controller-test.js diff --git a/tests/unit/users/edit/responsibilities/route-test.js b/frontend/tests/unit/users/edit/responsibilities/route-test.js similarity index 100% rename from tests/unit/users/edit/responsibilities/route-test.js rename to frontend/tests/unit/users/edit/responsibilities/route-test.js diff --git a/tests/unit/users/edit/route-test.js b/frontend/tests/unit/users/edit/route-test.js similarity index 100% rename from tests/unit/users/edit/route-test.js rename to frontend/tests/unit/users/edit/route-test.js diff --git a/tests/unit/users/index/controller-test.js b/frontend/tests/unit/users/index/controller-test.js similarity index 100% rename from tests/unit/users/index/controller-test.js rename to frontend/tests/unit/users/index/controller-test.js diff --git a/tests/unit/users/index/route-test.js b/frontend/tests/unit/users/index/route-test.js similarity index 100% rename from tests/unit/users/index/route-test.js rename to frontend/tests/unit/users/index/route-test.js diff --git a/tests/unit/users/route-test.js b/frontend/tests/unit/users/route-test.js similarity index 100% rename from tests/unit/users/route-test.js rename to frontend/tests/unit/users/route-test.js diff --git a/tests/unit/utils/format-duration-test.js b/frontend/tests/unit/utils/format-duration-test.js similarity index 100% rename from tests/unit/utils/format-duration-test.js rename to frontend/tests/unit/utils/format-duration-test.js diff --git a/tests/unit/utils/humanize-duration-test.js b/frontend/tests/unit/utils/humanize-duration-test.js similarity index 100% rename from tests/unit/utils/humanize-duration-test.js rename to frontend/tests/unit/utils/humanize-duration-test.js diff --git a/tests/unit/utils/parse-django-duration-test.js b/frontend/tests/unit/utils/parse-django-duration-test.js similarity index 100% rename from tests/unit/utils/parse-django-duration-test.js rename to frontend/tests/unit/utils/parse-django-duration-test.js diff --git a/tests/unit/utils/query-params-test.js b/frontend/tests/unit/utils/query-params-test.js similarity index 100% rename from tests/unit/utils/query-params-test.js rename to frontend/tests/unit/utils/query-params-test.js diff --git a/tests/unit/utils/url-test.js b/frontend/tests/unit/utils/url-test.js similarity index 100% rename from tests/unit/utils/url-test.js rename to frontend/tests/unit/utils/url-test.js diff --git a/tests/unit/validators/moment-test.js b/frontend/tests/unit/validators/moment-test.js similarity index 100% rename from tests/unit/validators/moment-test.js rename to frontend/tests/unit/validators/moment-test.js diff --git a/tests/unit/validators/null-or-not-blank-test.js b/frontend/tests/unit/validators/null-or-not-blank-test.js similarity index 100% rename from tests/unit/validators/null-or-not-blank-test.js rename to frontend/tests/unit/validators/null-or-not-blank-test.js diff --git a/frontend/vendor/.gitkeep b/frontend/vendor/.gitkeep new file mode 100644 index 000000000..e69de29bb