diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000000000000000000000000000000000000..21c27454042679e6b3c941a02ece9b8b253af969 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,17 @@ +.idea/ +*/__pycache__/ +.env +.venv +env/ +venv/ +ENV/ +README.md +.pytest_cache +.mypy_cache +htmlcov +app/tests +figures/ +oidc_dev_example +oidc_dev/ +traefik_dev +ceph diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000000000000000000000000000000000000..1e46891975afedab71ce333d8be84cb9692cf41f --- /dev/null +++ b/.flake8 @@ -0,0 +1,3 @@ +[flake8] +max-line-length = 120 +exclude = .git,__pycache__,__init__.py,.mypy_cache,.pytest_cache diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..1a91d71400bc0ab62c6332878eb62cbd63213d48 --- /dev/null +++ b/.gitignore @@ -0,0 +1,10 @@ +.idea/ +__pycache__/ +.env +.venv +env/ +venv/ +ENV/ +.coverage +oidc_dev/ +traefik diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml new file mode 100644 index 0000000000000000000000000000000000000000..113c8cacb82aee2bfb57eb3fb345179836437282 --- /dev/null +++ b/.gitlab-ci.yml @@ -0,0 +1,143 @@ +image: python:3.10-slim + +variables: + PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip" + PYTHONPATH: "$CI_PROJECT_DIR" + OBJECT_GATEWAY_URI: "http://127.0.0.1:8000" + CEPH_ACCESS_KEY: "" + CEPH_SECRET_KEY: "" + OIDC_CLIENT_SECRET: "" + OIDC_CLIENT_ID: "" + OIDC_BASE_URI: "http://127.0.0.1:8000" + +cache: + paths: + - .cache/pip + - venv/ + +before_script: + - python --version # For debugging + - pip install virtualenv + - virtualenv venv + - source venv/bin/activate + - python -m pip install -r requirements.txt + - python -m pip install -r requirements-dev.txt + +stages: # List of stages for jobs, and their order of execution +# - build + - test +# - deploy + +#build-job: # This job runs in the build stage, which runs first. +# stage: build +# script: +# - echo "Compiling the code..." +# - echo "Compile complete." + +integration-test-job: # Runs integration tests with the database + stage: test + variables: + DB_PASSWORD: "$TEST_DB_PASSWORD" + DB_USER: "test_api_user" + DB_DATABASE: "integration-test-db" + DB_HOST: "integration-test-db" + services: + - name: mysql:8 + alias: integration-test-db + variables: + MYSQL_RANDOM_ROOT_PASSWORD: "yes" + MYSQL_DATABASE: "$DB_DATABASE" + MYSQL_USER: "$DB_USER" + MYSQL_PASSWORD: "$DB_PASSWORD" + script: + - python app/check_database_connection.py + - alembic downgrade base + - alembic upgrade head + - pytest --junitxml=integration-report.xml --cov=app --cov-report=term-missing app/tests/crud + - mkdir coverage-integration + - mv .coverage coverage-integration + artifacts: + paths: + - $CI_PROJECT_DIR/coverage-integration/.coverage + reports: + junit: $CI_PROJECT_DIR/integration-report.xml + +e2e-test-job: # Runs e2e tests on the API endpoints + stage: test + variables: + DB_PASSWORD: "$TEST_DB_PASSWORD" + DB_USER: "test_api_user" + DB_DATABASE: "e2e-test-db" + DB_HOST: "e2e-test-db" + OIDC_CLIENT_SECRET: "$TEST_OIDC_CLIENT_SECRET" + OIDC_CLIENT_ID: "$TEST_OIDC_CLIENT_ID" + OIDC_BASE_URI: "http://mock-oidc-server" + CLIENTS_CONFIGURATION_INLINE: "$TEST_OIDC_CLIENT_CONFIG" + services: + - name: mysql:8 + alias: e2e-test-db + variables: + MYSQL_RANDOM_ROOT_PASSWORD: "yes" + MYSQL_DATABASE: "$DB_DATABASE" + MYSQL_USER: "$DB_USER" + MYSQL_PASSWORD: "$DB_PASSWORD" + - name: ghcr.io/soluto/oidc-server-mock:latest + alias: mock-oidc-server + variables: + ASPNETCORE_ENVIRONMENT: "Development" + script: + - python app/check_database_connection.py + - python app/check_oidc_connection.py + - alembic downgrade base + - alembic upgrade head + - pytest --junitxml=e2e-report.xml --cov=app --cov-report=term-missing app/tests/api + - mkdir coverage-e2e + - mv .coverage coverage-e2e + artifacts: + paths: + - $CI_PROJECT_DIR/coverage-e2e/.coverage + reports: + junit: $CI_PROJECT_DIR/e2e-report.xml + +unit-test-job: # Runs unit tests + stage: test + script: + - pytest --junitxml=unit-report.xml --noconftest --cov=app --cov-report=term-missing app/tests/unit + - mkdir coverage-unit + - mv .coverage coverage-unit + artifacts: + paths: + - $CI_PROJECT_DIR/coverage-unit/.coverage + reports: + junit: $CI_PROJECT_DIR/unit-report.xml + +combine-test-coverage-job: # Combine coverage reports from different test jobs + stage: test + needs: + - job: "e2e-test-job" + artifacts: true + - job: "integration-test-job" + artifacts: true + - job: "unit-test-job" + artifacts: true + script: + - coverage combine coverage-e2e/.coverage coverage-integration/.coverage coverage-unit/.coverage + - coverage report + - coverage xml --data-file=$CI_PROJECT_DIR/.coverage -o coverage.xml + coverage: '/(?i)total.*? (100(?:\.0+)?\%|[1-9]?\d(?:\.\d+)?\%)$/' + artifacts: + reports: + coverage_report: + coverage_format: cobertura + path: $CI_PROJECT_DIR/coverage.xml + +lint-test-job: # Runs linters checks on code + stage: test + script: + - ./scripts/lint.sh + +#deploy-job: # This job runs in the deploy stage. +# stage: deploy # It only runs when *both* jobs in the test stage complete successfully. +# script: +# - echo "Deploying application..." +# - echo "Application successfully deployed." diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..6e0e5b0a031b90ec800490693c465de4f2b5ffe6 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,46 @@ +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.2.0 + hooks: + - id: end-of-file-fixer + - id: check-added-large-files + - id: check-toml + - id: check-docstring-first + - id: detect-private-key + - id: trailing-whitespace + - id: check-yaml + - id: debug-statements + - id: check-merge-conflict + - id: check-ast +- repo: https://github.com/psf/black + rev: 22.3.0 + hooks: + - id: black + files: app + args: [--check] +- repo: https://github.com/PyCQA/flake8 + rev: 4.0.1 + hooks: + - id: flake8 + files: app + args: [--config=.flake8] +- repo: https://github.com/pre-commit/mirrors-mypy + rev: v0.960 + hooks: + - id: mypy + files: app + args: [--config=pyproject.toml] + additional_dependencies: + - sqlalchemy2-stubs + - boto3-stubs-lite[s3] + - sqlalchemy<2.0.0 + - pydantic + - types-requests +- repo: https://github.com/PyCQA/isort + rev: 5.10.1 + hooks: + - id: isort + files: app + args: [-c] diff --git a/DEVELOPING.md b/DEVELOPING.md new file mode 100644 index 0000000000000000000000000000000000000000..18bfe6785f651baa6555b95197d2c22aeb4c4f5a --- /dev/null +++ b/DEVELOPING.md @@ -0,0 +1,153 @@ +## Development Setup + +### Python Setup ðŸ +Currently, only Python version `>=3.10` is supported because it uses its new features for type annotations +to write more compact code. Since FastAPI relies on these type annotations and `mypy` is integrated into +this project, we make heavy use of this feature. + +Write +```python +var1: list[int] = [1,2,3] +var2: str | None = None +``` +instead of +```python +from typing import List, Union + +var1: List[int] = [1,2,3] +var2: Union[str, None] = None +``` +### Environment Setup +Create a virtual environment, install the dependencies and install the [pre-commit](https://pre-commit.com/) hooks.<br> +The linters can prevent a commit of the code quality doesn't meet the standard. +```shell +python -m venv venv +source venv/bin/activate +python -m pip install -r requirements.txt +python -m pip install -r requirements-dev.txt +pre-commit install +``` + +### Ceph Setup +For how to set up a ceph cluster or how to connect to an existing one see +the [documentation in the ceph folder](ceph/README.md). + +A user with `user` capabilities should be created, e.g.<br> +`radosgw-admin user create --uid=myadmin --caps="users=*"` + +### Database Setup +#### Dev database +The easiest solution is [Docker](https://docs.docker.com/get-docker/) with an attached volume +to set up a MySQL database. +```shell +docker volume create proxyapi_dev_db +docker run --name proxyapi_devdb \ + -e MYSQL_RANDOM_ROOT_PASSWORD=yes \ + -e MYSQL_DATABASE=<database_name> \ + -e MYSQL_USER=<database_user> \ + -e MYSQL_PASSWORD=<database_password> \ + -p 127.0.0.1:3306:3306 \ + -v proxyapi_dev_db:/var/lib/mysql \ + -d \ + mysql:8 +``` +When the container stopped just restart it with +```shell +docker start proxyapi_devdb +``` +Look at the [Environment Variables](#environment-variables) section to see which env variables have to be set. +#### Test database +Set up a second database on a different port for the integration tests. This database doesn't have to be persistent +because all data will be purged after each test run. +```shell +docker run --name proxyapi_testdb \ + -e MYSQL_RANDOM_ROOT_PASSWORD=yes \ + -e MYSQL_DATABASE=<database_name> \ + -e MYSQL_USER=<database_user> \ + -e MYSQL_PASSWORD=<database_password> \ + -p 127.0.0.1:8001:3306 \ + -d \ + mysql:8 +``` + +### Dev OIDC Provider Setup +To avoid the complex process of connecting the local machine with the LifeScience AAI Test server, a simple [OIDC provider](https://github.com/Soluto/oidc-server-mock) +can be setup with Docker.<br> +Copy the `oidc_dev_example` directory to `oidc_dev` +```shell +cp -r oidc_dev_example oidc_dev +``` +In the file `oidc_dev/clients_config.json` add a random value to `ClientId` and `ClientSecrets`. These can be generated for example with `openssl`. +```shell +openssl rand -hex 10 +``` +You can add/delete users in the file `oidc_dev/users_config.json` according the schema that is provided there.<br> +Adjust the volume path and start the docker container +```shell +docker run --name proxyapi_oidc_provider \ + -e CLIENTS_CONFIGURATION_PATH=/tmp/config/clients_config.json \ + -e IDENTITY_RESOURCES_PATH=/tmp/config/identity_resources.json \ + -e USERS_CONFIGURATION_PATH=/tmp/config/users_config.json \ + -e SERVER_OPTIONS_PATH=/tmp/config/server_options.json \ + -e ASPNETCORE_ENVIRONMENT=Development \ + -p 127.0.0.1:8002:80 \ + -v /path/to/folder/oidc_dev:/tmp/config:ro \ + -d \ + ghcr.io/soluto/oidc-server-mock:latest +``` +Set the env variables `OIDC_BASE_URI` to `http://localhost:8002` and `OIDC_CLIENT_SECRET` / `OIDC_CLIENT_ID` to their appropriate value. + +### Reverse Proxy Setup +The `API_PREFIX` is handles on the level of the reverse proxy. This simplifies the routing in the code and the cooperation with the [Frontend](https://gitlab.ub.uni-bielefeld.de/denbi/object-storage-access-ui). +An simple Traefik reverse proxy configuration is stored in the repository. + +[Traefik](https://traefik.io/) is a reverse Proxy written in Go. +To use it, download the [`traefik`](https://github.com/traefik/traefik/releases) binary and start it with +```shell +cd traefik_dev +/path/to/binary/traefik --configFile=traefik.toml +``` +The provided configuration does the following things + * It forwards all request to http://localhost:9999/api/* to http://localhost:8080 (this backend) + * It strips the prefix `/api` before it forwards the request to the backend + * All other request will be forwarded to http://localhost:5173, the corresponding dev [Frontend](https://gitlab.ub.uni-bielefeld.de/denbi/object-storage-access-ui) + +You don't have to use Traefik for that. You can use any reverse proxy for this task, like [Caddy](https://caddyserver.com/), [HAProxy](https://www.haproxy.org/) or [nginx](https://nginx.org/en/).<br> + +### Run Dev Server +Export all necessary environment variables or create a `.env` file.<br> +Run the dev server with live reload after changes +```shell +python app/check_ceph_connection.py && \ + python app/check_oidc_connection.py && \ + python app/check_database_connection.py && \ + alembic upgrade head && \ + uvicorn app.main:app --reload +``` +You can check your code with linters or even automatically reformat files based on these rules +```shell +./scripts/lint.sh # check code +./scripts/format.sh # reformat code +``` + +### Run Tests +Export the port and other variables of the database and then start the test script +```shell +export DB_PORT=8001 +./tests-start.sh +``` + +### Common Problems +Q: When I start the server I get the error `ModuleNotFoundError: No module named 'app'`<br> +A: export the `PYTHONPATH` variable with the current working directory +```shell +export PYTHONPATH=$(pwd) +``` + +Q: When I start the linters `isort`, `black`, etc. cannot be found<br> +A: Prepend every call with `python -m` +```shell +python -m isort +python -m black +... +``` diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..e4434f1cfbdbb6027d619b745932f519d8220583 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,18 @@ +FROM python:3.10-slim +WORKDIR /code +ENV PYTHONPATH=/code +EXPOSE 80 + +# dumb-init forwards the kill signal to the python process +RUN apt-get update && apt-get -y install dumb-init curl +ENTRYPOINT ["/usr/bin/dumb-init", "--"] + +HEALTHCHECK --interval=35s --timeout=4s CMD curl -f http://localhost/health || exit 1 + +COPY requirements.txt ./requirements.txt + +RUN pip install --no-cache-dir --upgrade -r requirements.txt + +COPY . . + +CMD ["./start_service.sh"] diff --git a/README.md b/README.md index 8608b6c5681e73fffa001a2bca975af6aca76533..2b338cbd4ce106f16b42dd2dd6b8dcad20fe1e8d 100644 --- a/README.md +++ b/README.md @@ -1,92 +1,49 @@ -# Object Storage Access - - - -## Getting started - -To make it easy for you to get started with GitLab, here's a list of recommended next steps. - -Already a pro? Just edit this README.md and make it your own. Want to make it easy? [Use the template at the bottom](#editing-this-readme)! - -## Add your files - -- [ ] [Create](https://docs.gitlab.com/ee/user/project/repository/web_editor.html#create-a-file) or [upload](https://docs.gitlab.com/ee/user/project/repository/web_editor.html#upload-a-file) files -- [ ] [Add files using the command line](https://docs.gitlab.com/ee/gitlab-basics/add-file.html#add-a-file-using-the-command-line) or push an existing Git repository with the following command: - -``` -cd existing_repo -git remote add origin https://gitlab.ub.uni-bielefeld.de/denbi/object-storage-access.git -git branch -M main -git push -uf origin main -``` - -## Integrate with your tools - -- [ ] [Set up project integrations](https://gitlab.ub.uni-bielefeld.de/denbi/object-storage-access/-/settings/integrations) - -## Collaborate with your team - -- [ ] [Invite team members and collaborators](https://docs.gitlab.com/ee/user/project/members/) -- [ ] [Create a new merge request](https://docs.gitlab.com/ee/user/project/merge_requests/creating_merge_requests.html) -- [ ] [Automatically close issues from merge requests](https://docs.gitlab.com/ee/user/project/issues/managing_issues.html#closing-issues-automatically) -- [ ] [Enable merge request approvals](https://docs.gitlab.com/ee/user/project/merge_requests/approvals/) -- [ ] [Automatically merge when pipeline succeeds](https://docs.gitlab.com/ee/user/project/merge_requests/merge_when_pipeline_succeeds.html) - -## Test and Deploy - -Use the built-in continuous integration in GitLab. - -- [ ] [Get started with GitLab CI/CD](https://docs.gitlab.com/ee/ci/quick_start/index.html) -- [ ] [Analyze your code for known vulnerabilities with Static Application Security Testing(SAST)](https://docs.gitlab.com/ee/user/application_security/sast/) -- [ ] [Deploy to Kubernetes, Amazon EC2, or Amazon ECS using Auto Deploy](https://docs.gitlab.com/ee/topics/autodevops/requirements.html) -- [ ] [Use pull-based deployments for improved Kubernetes management](https://docs.gitlab.com/ee/user/clusters/agent/) -- [ ] [Set up protected environments](https://docs.gitlab.com/ee/ci/environments/protected_environments.html) - -*** - -# Editing this README - -When you're ready to make this README your own, just edit this file and use the handy template below (or feel free to structure it however you want - this is just a starting point!). Thank you to [makeareadme.com](https://www.makeareadme.com/) for this template. - -## Suggestions for a good README -Every project is different, so consider which of these sections apply to yours. The sections used in the template are suggestions for most open source projects. Also keep in mind that while a README can be too long and detailed, too long is better than too short. If you think your README is too long, consider utilizing another form of documentation rather than cutting out information. - -## Name -Choose a self-explaining name for your project. +# S3 Proxy API ## Description -Let people know what your project can do specifically. Provide context and add a link to any reference visitors might be unfamiliar with. A list of Features or a Background subsection can also be added here. If there are alternatives to your project, this is a good place to list differentiating factors. - -## Badges -On some READMEs, you may see small images that convey metadata, such as whether or not all the tests are passing for the project. You can use Shields to add some to your README. Many services also have instructions for adding a badge. - -## Visuals -Depending on what you are making, it can be a good idea to include screenshots or even a video (you'll frequently see GIFs rather than actual videos). Tools like ttygif can help, but check out Asciinema for a more sophisticated method. - -## Installation -Within a particular ecosystem, there may be a common way of installing things, such as using Yarn, NuGet, or Homebrew. However, consider the possibility that whoever is reading your README is a novice and would like more guidance. Listing specific steps helps remove ambiguity and gets people to using your project as quickly as possible. If it only runs in a specific context like a particular programming language version or operating system or has dependencies that have to be installed manually, also add a Requirements subsection. - -## Usage -Use examples liberally, and show the expected output if you can. It's helpful to have inline the smallest example of usage that you can demonstrate, while providing links to more sophisticated examples if they are too long to reasonably include in the README. - -## Support -Tell people where they can go to for help. It can be any combination of an issue tracker, a chat room, an email address, etc. - -## Roadmap -If you have ideas for releases in the future, it is a good idea to list them in the README. - -## Contributing -State if you are open to contributions and what your requirements are for accepting them. - -For people who want to make changes to your project, it's helpful to have some documentation on how to get started. Perhaps there is a script that they should run or some environment variables that they need to set. Make these steps explicit. These instructions could also be useful to your future self. - -You can also document commands to lint the code or run tests. These steps help to ensure high code quality and reduce the likelihood that the changes inadvertently break something. Having instructions for running tests is especially helpful if it requires external setup, such as starting a Selenium server for testing in a browser. - -## Authors and acknowledgment -Show your appreciation to those who have contributed to the project. - -## License -For open source projects, say how it is licensed. - -## Project status -If you have run out of energy or time for your project, put a note at the top of the README saying that development has slowed down or stopped completely. Someone may choose to fork your project or volunteer to step in as a maintainer or owner, allowing your project to keep going. You can also make an explicit request for maintainers. +Openstack is shipping with an integrated UI to access the Object Store provided by ceph. Unfortunately, this UI does not allow +fine-grained control who can access a bucket or object. You can either make it accessible for everyone or nobody, but +Ceph can do this and much more. 👎 +This is the backend for a new UI which can leverage the additional powerful functionality provided by Ceph in a +user-friendly manner. 👠+ +| Feature | Openstack Integration | New UI | +|-----------------------------|:---------------------:|:------:| +| Create / Delete Buckets UI | ✅ | ✅ | +| Create / Delete Buckets CLI | ✅ | ⌠| +| Upload / Download Objects | ✅ | ✅ | +| Fine-grained Access Control | ⌠| ✅ | + +### Concept + + +## Environment Variables + +### Mandatory / Recommended Variables + +| Variable | Default | Value | Description | +|----------------------|---------|-----------------------|---------------------------------------| +| `SECRET_KEY` | random | \<random key> | Secret key to sign JWT | +| `DB_HOST` | unset | <db hostname / IP> | IP or Hostname Adress of DB | +| `DB_PORT` | 3306 | Number | Port of the database | +| `DB_USER` | unset | \<db username> | Username of the database user | +| `DB_PASSWORD` | unset | \<db password> | Password of the database user | +| `DB_DATABASE` | unset | \<db name> | Name of the database | +| `OBJECT_GATEWAY_URI` | unset | HTTP URL | HTTP URL of the Ceph Object Gateway | +| `CEPH_ACCESS_KEY` | unset | \<access key> | Ceph access key with admin privileges | +| `CEPH_SECRET_KEY` | unset | \<secret key> | Ceph secret key with admin privileges | +| `OIDC_CLIENT_ID` | unset | \<OIDC client id> | Client ID from the OIDC provider | +| `OIDC_CLIENT_SECRET` | unset | \<OIDC client secret> | Client Secret from the OIDC provider | +| `OIDC_BASE_URI` | unset | HTTP URL | HTTP URL of the OIDC Provider | + +### Optional Variables + +| Variable | Default | Value | Description | +|-----------------------------|-------------------------------------|-----------------------------|---------------------------------------------------------------------------------------| +| `DOMAIN` | `localhost` | string | Domain under which the service will be hosted. | +| `SSL_TERMINATION` | `false` | `<"true"|"false">` | Flag if the service runs behind a SSL termination proxy | +| `API_PREFIX` | `/api` | URL path | Prefix before every URL path | +| `JWT_TOKEN_EXPIRE_MINUTES` | 8 days | number | Minutes till a JWT expires | +| `BACKEND_CORS_ORIGINS` | `[]` | json formatted list of urls | List of valid CORS origins | +| `SQLALCHEMY_VERBOSE_LOGGER` | `false` | `<"true"|"false">` | Enables verbose SQL output.<br>Should be `false` in production | +| `OIDC_META_INFO_PATH` | `/.well-known/openid-configuration` | URL path | Path to the OIDC configuration file<br> Will be concatenated with the `OIDC_BASE_URI` | diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 0000000000000000000000000000000000000000..4140727b52ff0b493b64ebb929d898d41ae4fa86 --- /dev/null +++ b/alembic.ini @@ -0,0 +1,100 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = alembic + +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python-dateutil library that can be +# installed by adding `alembic[tz]` to the pip requirements +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/alembic/README b/alembic/README new file mode 100644 index 0000000000000000000000000000000000000000..a23d4fb519d3329160c17e5573c1382ef8337e6b --- /dev/null +++ b/alembic/README @@ -0,0 +1 @@ +Generic single-database configuration with an async dbapi. diff --git a/alembic/env.py b/alembic/env.py new file mode 100644 index 0000000000000000000000000000000000000000..6d91f4d45408643589b4df5bb07370423bc579a1 --- /dev/null +++ b/alembic/env.py @@ -0,0 +1,93 @@ +import asyncio +from logging.config import fileConfig + +from sqlalchemy import engine_from_config, pool +from sqlalchemy.ext.asyncio import AsyncEngine + +from alembic import context +from app.core.config import settings +from app.db.base import Base + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +target_metadata = Base.metadata + + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def get_url() -> str: + return str(settings.SQLALCHEMY_DATABASE_ASYNC_URI) + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = get_url() + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def do_run_migrations(connection): + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +async def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + url = get_url() + connectable = AsyncEngine( + engine_from_config( + config.get_section(config.config_ini_section), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + future=True, + url=url, + ) + ) + + async with connectable.connect() as connection: + await connection.run_sync(do_run_migrations) + + await connectable.dispose() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + asyncio.run(run_migrations_online()) diff --git a/alembic/script.py.mako b/alembic/script.py.mako new file mode 100644 index 0000000000000000000000000000000000000000..2c0156303a8df3ffdc9de87765bf801bf6bea4a5 --- /dev/null +++ b/alembic/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/alembic/versions/5521b5759004_create_user_and_bucket_table.py b/alembic/versions/5521b5759004_create_user_and_bucket_table.py new file mode 100644 index 0000000000000000000000000000000000000000..5fa91f9575bffe0df65087cd6d5d8406024a9db0 --- /dev/null +++ b/alembic/versions/5521b5759004_create_user_and_bucket_table.py @@ -0,0 +1,51 @@ +"""Create user and bucket table + +Revision ID: 5521b5759004 +Revises: +Create Date: 2022-05-03 14:01:22.154984 + +""" +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +from alembic import op + +# revision identifiers, used by Alembic. +revision = "5521b5759004" +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "user", + sa.Column("uid", sa.String(length=64), nullable=False), + sa.Column("name", sa.String(length=256), nullable=False), + sa.PrimaryKeyConstraint("uid"), + ) + op.create_index(op.f("ix_user_uid"), "user", ["uid"], unique=True) + op.create_table( + "bucket", + sa.Column("name", sa.String(length=63), nullable=False), + sa.Column("description", mysql.TEXT(), nullable=False), + sa.Column("public", sa.Boolean(), server_default="0", nullable=True), + sa.Column("owner_id", sa.String(length=64), nullable=True), + sa.ForeignKeyConstraint( + ["owner_id"], + ["user.uid"], + ), + sa.PrimaryKeyConstraint("name"), + ) + op.create_index(op.f("ix_bucket_name"), "bucket", ["name"], unique=True) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f("ix_bucket_name"), table_name="bucket") + op.drop_table("bucket") + op.drop_index(op.f("ix_user_uid"), table_name="user") + op.drop_table("user") + # ### end Alembic commands ### diff --git a/alembic/versions/83a3a47a6351_add_username_and_display_name_and_drop_.py b/alembic/versions/83a3a47a6351_add_username_and_display_name_and_drop_.py new file mode 100644 index 0000000000000000000000000000000000000000..0e0064f62d1f7f68405f992b3022f8248c0dcc67 --- /dev/null +++ b/alembic/versions/83a3a47a6351_add_username_and_display_name_and_drop_.py @@ -0,0 +1,35 @@ +"""Add username and display_name and drop name for user table + +Revision ID: 83a3a47a6351 +Revises: cafa1e01b782 +Create Date: 2022-05-04 13:22:46.317796 + +""" +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +from alembic import op + +# revision identifiers, used by Alembic. +revision = "83a3a47a6351" +down_revision = "cafa1e01b782" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column("user", sa.Column("display_name", sa.String(length=256), nullable=True)) + op.add_column("user", sa.Column("username", sa.String(length=256), nullable=False)) + op.drop_column("user", "name") + op.create_index(op.f("ix_user_username"), "user", ["username"], unique=True) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column("user", sa.Column("name", mysql.VARCHAR(length=256), nullable=False)) + op.drop_index(op.f("ix_user_username"), table_name="user") + op.drop_column("user", "username") + op.drop_column("user", "display_name") + # ### end Alembic commands ### diff --git a/alembic/versions/9fa582febebe_delete_username_from_user.py b/alembic/versions/9fa582febebe_delete_username_from_user.py new file mode 100644 index 0000000000000000000000000000000000000000..2b5eaa9c89ddcf5b05296953b7be26e37ec74598 --- /dev/null +++ b/alembic/versions/9fa582febebe_delete_username_from_user.py @@ -0,0 +1,30 @@ +"""Delete username from user + +Revision ID: 9fa582febebe +Revises: 83a3a47a6351 +Create Date: 2022-07-27 11:10:53.440935 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = "9fa582febebe" +down_revision = "83a3a47a6351" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index("ix_user_username", table_name="user") + op.drop_column("user", "username") + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column("user", sa.Column("username", mysql.VARCHAR(length=256), nullable=False)) + op.create_index("ix_user_username", "user", ["username"], unique=False) + # ### end Alembic commands ### diff --git a/alembic/versions/cafa1e01b782_create_permission_table.py b/alembic/versions/cafa1e01b782_create_permission_table.py new file mode 100644 index 0000000000000000000000000000000000000000..424203ff4b3f68240ac640abda047123e9013e46 --- /dev/null +++ b/alembic/versions/cafa1e01b782_create_permission_table.py @@ -0,0 +1,54 @@ +"""Create Permission table + +Revision ID: cafa1e01b782 +Revises: 5521b5759004 +Create Date: 2022-05-04 11:41:54.470870 + +""" +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +from alembic import op + +# revision identifiers, used by Alembic. +revision = "cafa1e01b782" +down_revision = "5521b5759004" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "bucketpermission", + sa.Column("user_id", sa.String(length=64), nullable=False), + sa.Column("bucket_name", sa.String(length=63), nullable=False), + sa.Column("from", mysql.TIMESTAMP(), nullable=True), + sa.Column("to", mysql.TIMESTAMP(), nullable=True), + sa.Column("file_prefix", sa.String(length=512), nullable=True), + sa.Column("permissions", mysql.ENUM("READ", "WRITE", "READWRITE"), nullable=False), + sa.ForeignKeyConstraint(["bucket_name"], ["bucket.name"], ondelete="CASCADE"), + sa.ForeignKeyConstraint(["user_id"], ["user.uid"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("user_id", "bucket_name"), + ) + op.alter_column( + "bucket", + "public", + existing_type=mysql.TINYINT(display_width=1), + nullable=False, + existing_server_default=sa.text("'0'"), + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column( + "bucket", + "public", + existing_type=mysql.TINYINT(display_width=1), + nullable=True, + existing_server_default=sa.text("'0'"), + ) + op.drop_table("bucketpermission") + # ### end Alembic commands ### diff --git a/app/__init__.py b/app/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/app/api/__init__.py b/app/api/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/app/api/api.py b/app/api/api.py new file mode 100644 index 0000000000000000000000000000000000000000..133c1c2d1bcb637ea7c8a2dcf45cf377167c2bf5 --- /dev/null +++ b/app/api/api.py @@ -0,0 +1,43 @@ +from typing import Any + +from fastapi import APIRouter, Depends, status + +from app.api.dependencies import decode_bearer_token +from app.api.endpoints import bucket_permissions, buckets, login, users +from app.schemas.security import ErrorDetail + +alternative_responses: dict[int | str, dict[str, Any]] = { + status.HTTP_400_BAD_REQUEST: { + "model": ErrorDetail, + "description": "Error decoding JWT Token", + "content": {"application/json": {"example": {"detail": "Malformed JWT Token"}}}, + }, + status.HTTP_403_FORBIDDEN: { + "model": ErrorDetail, + "description": "Not authenticated", + "content": {"application/json": {"example": {"detail": "Not authenticated"}}}, + }, + status.HTTP_404_NOT_FOUND: { + "model": ErrorDetail, + "description": "Entity not Found", + "content": {"application/json": {"example": {"detail": "Entity not found."}}}, + }, +} + +api_router = APIRouter() +api_router.include_router(login.router) +api_router.include_router( + buckets.router, + dependencies=[Depends(decode_bearer_token)], + responses=alternative_responses, +) +api_router.include_router( + users.router, + dependencies=[Depends(decode_bearer_token)], + responses=alternative_responses, +) +api_router.include_router( + bucket_permissions.router, + dependencies=[Depends(decode_bearer_token)], + responses=alternative_responses, +) diff --git a/app/api/dependencies.py b/app/api/dependencies.py new file mode 100644 index 0000000000000000000000000000000000000000..8aaf24c2d9080d867001083e1aeceeb43b5d4655 --- /dev/null +++ b/app/api/dependencies.py @@ -0,0 +1,229 @@ +from typing import TYPE_CHECKING, Any, AsyncGenerator + +from authlib.jose.errors import BadSignatureError, DecodeError, ExpiredTokenError +from fastapi import Depends, HTTPException, Path, status +from fastapi.requests import Request +from fastapi.security import HTTPBearer +from fastapi.security.http import HTTPAuthorizationCredentials +from rgwadmin import RGWAdmin +from sqlalchemy.ext.asyncio import AsyncSession + +from app.ceph.rgw import rgw, s3_resource +from app.core.security import decode_token, oauth +from app.crud.crud_bucket import CRUDBucket +from app.crud.crud_bucket_permission import CRUDBucketPermission +from app.crud.crud_user import CRUDUser +from app.db.session import SessionAsync as Session +from app.models.bucket import Bucket +from app.models.user import User +from app.schemas.security import JWTToken + +if TYPE_CHECKING: + from boto3.resources.base import ServiceResource +else: + ServiceResource = object + +bearer_token = HTTPBearer(description="JWT Token") + + +def get_rgw_admin() -> RGWAdmin: + return rgw # pragma: no cover + + +def get_s3_resource() -> ServiceResource: + return s3_resource # pragma: no cover + + +async def get_db() -> AsyncGenerator: + """ + Get a Session with the database. + + FastAPI Dependency Injection Function. + + Returns + ------- + db : AsyncGenerator + Async session object with the database + """ + async with Session() as db: + yield db + + +def decode_bearer_token( + token: HTTPAuthorizationCredentials = Depends(bearer_token), +) -> JWTToken: + """ + Get the decoded JWT or reject request if it is not valid. + + FastAPI Dependency Injection Function. + + Parameters + ---------- + token : fastapi.security.http.HTTPAuthorizationCredentials + Bearer token sent with the HTTP request. Dependency Injection. + + Returns + ------- + token : app.schemas.security.JWTToken + The verified and decoded JWT. + """ + try: + return JWTToken(**decode_token(token.credentials)) + except ExpiredTokenError: # pragma: no cover + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="JWT Signature has expired") + except (DecodeError, BadSignatureError): + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Malformed JWT") + + +async def get_current_user(token: JWTToken = Depends(decode_bearer_token), db: AsyncSession = Depends(get_db)) -> User: + """ + Get the current user from the database based on the JWT. + + FastAPI Dependency Injection Function. + + Parameters + ---------- + token : app.schemas.security.JWTToken + The verified and decoded JWT. + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. Dependency Injection. + + Returns + ------- + user : app.models.user.User + User associated with the JWT sent with the HTTP request. + """ + user = await CRUDUser.get(db, token.sub) + if user: + return user + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found") + + +async def get_user_by_path_uid( + uid: str = Path( + default=..., description="UID of a user", example="28c5353b8bb34984a8bd4169ba94c606", max_length=64 + ), + db: AsyncSession = Depends(get_db), + token: JWTToken = Depends(decode_bearer_token), +) -> User: + """ + Get the user from the database with the given uid. + Reject the request if the current user is not the same as the requested one. + + FastAPI Dependency Injection Function. + + Parameters + ---------- + uid : str + The uid of a user. URL path parameter. + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. Dependency Injection. + token : app.schemas.security.JWTToken + Decoded JWT sent with the HTTP request. + + Returns + ------- + user : app.models.user.User + User with the given uid. + + """ + user = await CRUDUser.get(db, uid) + if user: + if user.uid == token.sub: + return user + else: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="A user can only access himself", + ) + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found") + + +async def get_current_bucket( + bucket_name: str = Path(..., description="Name of bucket", example="test-bucket", max_length=63, min_length=3), + db: AsyncSession = Depends(get_db), + current_user: User = Depends(get_current_user), +) -> Bucket: + """ + Get the Bucket from the database based on the name in the path. + Reject the request if user has no READ permission for this bucket. + + FastAPI Dependency Injection Function + + Parameters + ---------- + bucket_name : str + Name of a bucket. URL Path Parameter. + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. Dependency Injection. + current_user : app.models.user.User + User associated with the JWT sent with the HTTP request. Dependency Injection + + Returns + ------- + bucket : app.models.bucket.Bucket + Bucket with the given name. + """ + bucket = await CRUDBucket.get(db, bucket_name) + if bucket is None: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Bucket not found") + elif not bucket.public and not await CRUDBucketPermission.check_permission(db, bucket_name, current_user.uid): + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="No rights for this bucket") + return bucket + + +async def get_authorized_user_for_permission( + bucket: Bucket = Depends(get_current_bucket), + uid: str = Path( + default=..., description="UID of a user", example="28c5353b8bb34984a8bd4169ba94c606", max_length=64 + ), + db: AsyncSession = Depends(get_db), + current_user: User = Depends(get_current_user), +) -> User: + """ + Get the user for viewing and deleting bucket permissions.\n + Only the owner of a bucket and grantee can do this. + + Parameters + ---------- + bucket : app.models.bucket.Bucket + Bucket with the name provided in the URL path. Dependency Injection. + uid : str + The uid of a user. URL path parameter. + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. Dependency Injection. + current_user : app.models.user.User + Current user. Dependency Injection. + + Returns + ------- + user : app.models.user.User + Authorized user for bucket permission. Dependency Injection. + """ + user = await CRUDUser.get(db, uid) + if user is None: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found") + elif current_user != user and current_user.uid != bucket.owner_id: + raise HTTPException( + status.HTTP_403_FORBIDDEN, detail="Only the owner or the grantee can access a bucket permission" + ) + return user + + +async def get_userinfo_from_access_token(request: Request) -> dict[str, Any]: # pragma: no cover + """ + Get the userinfo from the OAuth2 userinfo endpoint with the access token. + + Parameters + ---------- + request : starlette.requests.Request + Raw Request object. + + Returns + ------- + userinfo : dict[str, Any] + Info about the corresponding user. + """ + claims = await oauth.lifescience.authorize_access_token(request) + # ID token doesn't have all necessary information, call userinfo endpoint + return await oauth.lifescience.userinfo(token=claims) diff --git a/app/api/endpoints/__init__.py b/app/api/endpoints/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/app/api/endpoints/bucket_permissions.py b/app/api/endpoints/bucket_permissions.py new file mode 100644 index 0000000000000000000000000000000000000000..33d0e7d7e0e20bc38102d0149c617298e6b70b94 --- /dev/null +++ b/app/api/endpoints/bucket_permissions.py @@ -0,0 +1,287 @@ +import json +from typing import TYPE_CHECKING + +from fastapi import APIRouter, Body, Depends, HTTPException, status +from sqlalchemy.ext.asyncio import AsyncSession + +from app.api.dependencies import ( + get_authorized_user_for_permission, + get_current_bucket, + get_current_user, + get_db, + get_s3_resource, + get_user_by_path_uid, +) +from app.crud.crud_bucket_permission import CRUDBucketPermission, DuplicateError +from app.models.bucket import Bucket as BucketDB +from app.models.user import User as UserDB +from app.schemas.bucket_permission import BucketPermission as PermissionSchema +from app.schemas.bucket_permission import BucketPermissionParameters as PermissionParametersSchema + +router = APIRouter(prefix="/permissions", tags=["BucketPermissions"]) + +if TYPE_CHECKING: + from mypy_boto3_s3.service_resource import S3ServiceResource +else: + S3ServiceResource = object + + +@router.get( + "/bucket/{bucket_name}/user/{uid}", + response_model=PermissionSchema, + summary="Get permission for bucket and user combination.", + response_model_exclude_none=True, +) +async def get_permission_for_bucket( + bucket: BucketDB = Depends(get_current_bucket), + db: AsyncSession = Depends(get_db), + user: UserDB = Depends(get_authorized_user_for_permission), +) -> PermissionSchema: + """ + Get the bucket permissions for the specific combination of bucket and user.\n + The owner of the bucket and the grantee of the permission can view it. + \f + Parameters + ---------- + bucket : app.models.bucket.Bucket + Bucket with the name provided in the URL path. Dependency Injection. + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. Dependency Injection. + user : app.models.user.User + User with the uid in the URL. Dependency Injection. + + Returns + ------- + permissions : app.schemas.bucket_permission.BucketPermission + Permission for this bucket and user combination. + """ + bucket_permission = await CRUDBucketPermission.get(db, bucket.name, user.uid) + if bucket_permission: + return PermissionSchema.from_db_model(bucket_permission, uid=user.uid) + raise HTTPException( + status.HTTP_404_NOT_FOUND, + detail=f"Permission for combination of bucket={bucket.name} and user={user.uid} doesn't exists", + ) + + +@router.delete( + "/bucket/{bucket_name}/user/{uid}", + status_code=status.HTTP_204_NO_CONTENT, + summary="Delete a bucket permission", +) +async def delete_permission_for_bucket( + bucket: BucketDB = Depends(get_current_bucket), + db: AsyncSession = Depends(get_db), + user: UserDB = Depends(get_authorized_user_for_permission), + s3: S3ServiceResource = Depends(get_s3_resource), +) -> None: + """ + Delete the bucket permissions for the specific combination of bucket and user.\n + The owner of the bucket and the grantee of the permission can delete it. + \f + Parameters + ---------- + bucket : app.models.bucket.Bucket + Bucket with the name provided in the URL path. Dependency Injection. + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. Dependency Injection. + user : app.models.user.User + User with the uid in the URL. Dependency Injection. + s3 : boto3_type_annotations.s3.ServiceResource + S3 Service to perform operations on buckets in Ceph. Dependency Injection. + + Returns + ------- + permissions : app.schemas.bucket_permission.BucketPermission + Permission for this bucket and user combination. + """ + bucket_permission = await CRUDBucketPermission.get(db, bucket.name, user.uid) + if bucket_permission is None: + raise HTTPException( + status.HTTP_404_NOT_FOUND, + detail=f"Permission for combination of bucket={bucket.name} and user={user.uid} doesn't exists", + ) + await CRUDBucketPermission.delete(db, bucket_permission) + bucket_permission_schema = PermissionSchema.from_db_model(bucket_permission, user.uid) + s3_policy = s3.Bucket(bucket_permission_schema.bucket_name).Policy() + policy = json.loads(s3_policy.policy) + policy["Statement"] = [ + stmt for stmt in policy["Statement"] if stmt["Sid"] != bucket_permission_schema.to_hash(user.uid) + ] + s3_policy.put(Policy=json.dumps(policy)) + + +@router.get( + "/bucket/{bucket_name}", + response_model=list[PermissionSchema], + summary="Get all permissions for a bucket.", + response_model_exclude_none=True, +) +async def list_permissions_per_bucket( + bucket: BucketDB = Depends(get_current_bucket), + db: AsyncSession = Depends(get_db), + current_user: UserDB = Depends(get_current_user), +) -> list[PermissionSchema]: + """ + List all the bucket permissions for the given bucket. + \f + Parameters + ---------- + bucket : app.models.bucket.Bucket + Bucket with the name provided in the URL path. Dependency Injection. + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. Dependency Injection. + current_user : app.models.user.User + Current user. Dependency Injection. + + Returns + ------- + permissions : list[app.schemas.bucket_permission.BucketPermission] + List of all permissions for this bucket. + """ + if not await CRUDBucketPermission.check_permission(db, bucket.name, current_user.uid, only_own=True): + raise HTTPException(status.HTTP_403_FORBIDDEN, "You can only view your own bucket permissions") + bucket_permissions = await CRUDBucketPermission.get_permissions_for_bucket(db, bucket.name) + return [PermissionSchema.from_db_model(p) for p in bucket_permissions] + + +@router.get( + "/user/{uid}", + response_model=list[PermissionSchema], + summary="Get all permissions for a user.", + response_model_exclude_none=True, +) +async def list_permissions_per_user( + user: UserDB = Depends(get_user_by_path_uid), db: AsyncSession = Depends(get_db) +) -> list[PermissionSchema]: + """ + List all the bucket permissions for the given user. + \f + Parameters + ---------- + user : : app.models.user.User + User with given uid. Dependency Injection. + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. Dependency Injection. + + Returns + ------- + permissions : list[app.schemas.bucket_permission.BucketPermission] + List of all permissions for this user. + """ + bucket_permissions = await CRUDBucketPermission.get_permissions_for_user(db, user.uid) + return [PermissionSchema.from_db_model(p, uid=user.uid) for p in bucket_permissions] + + +@router.post( + "/", + response_model=PermissionSchema, + status_code=status.HTTP_201_CREATED, + summary="Create a permission.", + response_model_exclude_none=True, +) +async def create_permission( + permission: PermissionSchema = Body(..., description="Permission to create"), + db: AsyncSession = Depends(get_db), + current_user: UserDB = Depends(get_current_user), + s3: S3ServiceResource = Depends(get_s3_resource), +) -> PermissionSchema: + """ + Create a permission for a bucket and user. + \f + Parameters + ---------- + permission : app.schemas.bucket_permission.BucketPermission + Information about the permission which should be created. HTTP Body parameter. + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. Dependency Injection. + current_user : app.models.user.User + Current user. Dependency Injection. + s3 : boto3_type_annotations.s3.ServiceResource + S3 Service to perform operations on buckets in Ceph. Dependency Injection. + + Returns + ------- + permissions : app.schemas.bucket_permission.BucketPermission + Newly created permission. + """ + await get_current_bucket(permission.bucket_name, db=db, current_user=current_user) + try: + permission_db = await CRUDBucketPermission.create(db, permission) + except ValueError: + raise HTTPException( + status.HTTP_400_BAD_REQUEST, detail="The owner of the bucket can't get any more permissions" + ) + except DuplicateError: + raise HTTPException( + status.HTTP_400_BAD_REQUEST, + detail=f"Permission for combination of bucket={permission.bucket_name} and user={permission.uid} already exists", # noqa:E501 + ) + except KeyError: + raise HTTPException(status.HTTP_404_NOT_FOUND, detail=f"User with uid={permission.uid} not found") + s3_policy = s3.Bucket(permission.bucket_name).Policy() + old_policy = s3_policy.policy + json_policy = json.loads(old_policy) + json_policy["Statement"] += permission.map_to_bucket_policy_statement(permission_db.user_id) + new_policy = json.dumps(json_policy) + s3_policy.put(Policy=new_policy) + return PermissionSchema.from_db_model(permission_db, uid=permission.uid) + + +@router.put( + "/bucket/{bucket_name}/user/{uid}", + status_code=status.HTTP_200_OK, + response_model=PermissionSchema, + summary="Update a bucket permission", + response_model_exclude_none=True, +) +async def update_permission( + permission_parameters: PermissionParametersSchema = Body(..., description="Permission to create"), + user: UserDB = Depends(get_authorized_user_for_permission), + bucket: BucketDB = Depends(get_current_bucket), + db: AsyncSession = Depends(get_db), + current_user: UserDB = Depends(get_current_user), + s3: S3ServiceResource = Depends(get_s3_resource), +) -> PermissionSchema: + """ + Update a permission for a bucket and user. + \f + Parameters + ---------- + permission_parameters : app.schemas.bucket_permission.BucketPermission + Information about the permission which should be updated. HTTP Body parameter. + user : app.models.user.User + User with the uid in the URL. Dependency Injection. + bucket : app.models.bucket.Bucket + Bucket with the name provided in the URL path. Dependency Injection. + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. Dependency Injection. + current_user : app.models.user.User + Current user. Dependency Injection. + s3 : boto3_type_annotations.s3.ServiceResource + S3 Service to perform operations on buckets in Ceph. Dependency Injection. + + Returns + ------- + permissions : app.schemas.bucket_permission.BucketPermission + Updated permission. + """ + if not await CRUDBucketPermission.check_permission(db, bucket.name, current_user.uid, only_own=True): + raise HTTPException(status.HTTP_403_FORBIDDEN, "You can only modify permissions on your own bucket") + bucket_permission = await CRUDBucketPermission.get(db, bucket.name, user.uid) + + if bucket_permission is None: + raise HTTPException( + status.HTTP_404_NOT_FOUND, + detail=f"Permission for combination of bucket={bucket.name} and user={user.uid} doesn't exists", + ) + updated_permission = await CRUDBucketPermission.update_permission(db, bucket_permission, permission_parameters) + updated_permission_schema = PermissionSchema.from_db_model(updated_permission) + s3_policy = s3.Bucket(updated_permission_schema.bucket_name).Policy() + policy = json.loads(s3_policy.policy) + policy["Statement"] = [ + stmt for stmt in policy["Statement"] if stmt["Sid"] != updated_permission_schema.to_hash(user.uid) + ] + policy["Statement"] += updated_permission_schema.map_to_bucket_policy_statement(updated_permission.user_id) + s3_policy.put(Policy=json.dumps(policy)) + return updated_permission_schema diff --git a/app/api/endpoints/buckets.py b/app/api/endpoints/buckets.py new file mode 100644 index 0000000000000000000000000000000000000000..d91760aa759f7e0f42e17ecf34019fce17ae8a86 --- /dev/null +++ b/app/api/endpoints/buckets.py @@ -0,0 +1,285 @@ +import json +from typing import TYPE_CHECKING + +from botocore.exceptions import ClientError +from fastapi import APIRouter, Depends, HTTPException, Path, Query, status +from sqlalchemy.ext.asyncio import AsyncSession + +from app.api.dependencies import get_current_bucket, get_current_user, get_db, get_s3_resource +from app.crud.crud_bucket import CRUDBucket +from app.crud.crud_bucket_permission import CRUDBucketPermission +from app.models.bucket import Bucket as BucketDB +from app.models.user import User +from app.schemas.bucket import BucketIn as BucketInSchema +from app.schemas.bucket import BucketOut as BucketOutSchema +from app.schemas.bucket import S3ObjectMetaInformation + +if TYPE_CHECKING: + from mypy_boto3_s3.service_resource import S3ServiceResource +else: + S3ServiceResource = object + +router = APIRouter(prefix="/buckets", tags=["Bucket"]) + + +@router.get("/", response_model=list[BucketOutSchema], summary="List buckets of user") +async def list_buckets( + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), + s3: S3ServiceResource = Depends(get_s3_resource), +) -> list[BucketOutSchema]: + """ + List the buckets of the current user where the user has READ permissions for. + \f + Parameters + ---------- + user : app.models.user.User + User for which to retrieve the buckets. Dependency Injection. + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. Dependency Injection. + s3 : boto3_type_annotations.s3.ServiceResource + S3 Service to perform operations on buckets in Ceph. Dependency Injection. + + Returns + ------- + buckets : list[app.schemas.bucket.BucketOut] + All the buckets for which the user has READ permissions. + """ + buckets = [ + BucketOutSchema( + **{ + "description": bucket.description, + "name": bucket.name, + "created_at": s3.Bucket(name=bucket.name).creation_date, + "owner": bucket.owner.uid, + } + ) + for bucket in await CRUDBucket.get_for_user(db, user.uid) + ] + + return buckets + + +@router.post( + "/", + response_model=BucketOutSchema, + status_code=status.HTTP_201_CREATED, + summary="Create a bucket for the current user", +) +async def create_bucket( + bucket: BucketInSchema, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), + s3: S3ServiceResource = Depends(get_s3_resource), +) -> BucketOutSchema: + """ + Create a bucket for the current user.\n + The name of the bucket has some constraints. + For more information see the [Ceph documentation](https://docs.ceph.com/en/quincy/radosgw/s3/bucketops/#constraints) + \f + Parameters + ---------- + bucket : app.schemas.bucket.BucketIn + Information about the bucket to create. HTTP Body + user : app.models.user.User + Current user who will be the owner of the newly created bucket. Dependency Injection. + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. Dependency Injection. + s3 : boto3_type_annotations.s3.ServiceResource + S3 Service to perform operations on buckets in Ceph. Dependency Injection. + + Returns + ------- + bucket : app.schemas.bucket.BucketOut + The newly created bucket. + """ + + db_bucket = await CRUDBucket.create(db, bucket, user.uid) + if db_bucket is None: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Bucket name is already taken", + ) + s3_bucket = s3.Bucket(db_bucket.name) + s3_bucket.create() + # Add basic permission to the user for getting, creating and deleting objects in the bucket. + bucket_policy = json.dumps( + { + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "PseudoOwnerPerm", + "Effect": "Allow", + "Principal": {"AWS": [f"arn:aws:iam:::user/{user.uid}"]}, + "Action": ["s3:GetObject", "s3:DeleteObject", "s3:PutObject", "s3:ListBucket"], + "Resource": [f"arn:aws:s3:::{db_bucket.name}/*", f"arn:aws:s3:::{db_bucket.name}"], + } + ], + } + ) + s3_bucket.Policy().put(Policy=bucket_policy) + return BucketOutSchema( + **{ + "description": db_bucket.description, + "name": db_bucket.name, + "created_at": s3.Bucket(name=db_bucket.name).creation_date, + "owner": db_bucket.owner.uid, + } + ) + + +@router.get("/{bucket_name}", response_model=BucketOutSchema, summary="Get a bucket by its name") +async def get_bucket( + bucket: BucketDB = Depends(get_current_bucket), s3: S3ServiceResource = Depends(get_s3_resource) +) -> BucketOutSchema: + """ + Get a bucket by its name if the current user has READ permissions for the bucket. + \f + Parameters + ---------- + bucket : app.models.bucket.Bucket + Bucket with the name provided in the URL path. Dependency Injection. + s3 : boto3_type_annotations.s3.ServiceResource + S3 Service to perform operations on buckets in Ceph. Dependency Injection. + + Returns + ------- + bucket : app.schemas.bucket.BucketOut + Bucket with the provided name. + """ + return BucketOutSchema( + **{ + "description": bucket.description, + "name": bucket.name, + "created_at": s3.Bucket(name=bucket.name).creation_date, + "owner": bucket.owner.uid, + } + ) + + +@router.delete("/{bucket_name}", status_code=status.HTTP_204_NO_CONTENT, summary="Delete a bucket") +async def delete_bucket( + force_delete: bool = Query(False, description="Delete even non-empty bucket"), + bucket: BucketDB = Depends(get_current_bucket), + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), + s3: S3ServiceResource = Depends(get_s3_resource), +) -> None: + """ + Delete a bucket by its name. Only the owner of the bucket can delete the bucket. + \f + Parameters + ---------- + force_delete : bool, default False + Flag for deleting a non-empty bucket. Query parameter. + bucket : app.models.bucket.Bucket + Bucket with the name provided in the URL path. Dependency Injection. + user : app.models.user.User + Current user. Dependency Injection. + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. Dependency Injection. + s3 : boto3_type_annotations.s3.ServiceResource + S3 Service to perform operations on buckets in Ceph. Dependency Injection. + """ + if not await CRUDBucketPermission.check_permission(db, bucket.name, user.uid, only_own=True): + raise HTTPException(status.HTTP_403_FORBIDDEN, "You can only delete your own buckets") + if force_delete: + objs = [{"Key": obj.key} for obj in s3.Bucket(bucket.name).objects.all()] + if len(objs) > 0: + s3.Bucket(bucket.name).delete_objects(Delete={"Objects": objs}) # type: ignore + try: + s3.Bucket(name=bucket.name).delete() + await CRUDBucket.delete(db, bucket) + except ClientError: + raise HTTPException(status.HTTP_400_BAD_REQUEST, detail="Bucket not empty") + + +@router.get( + "/{bucket_name}/objects", + response_model=list[S3ObjectMetaInformation], + tags=["Object"], + summary="Get the metadata of the objects in the bucket", +) +async def get_bucket_objects( + bucket: BucketDB = Depends(get_current_bucket), + s3: S3ServiceResource = Depends(get_s3_resource), + current_user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +) -> list[S3ObjectMetaInformation]: + """ + Get the metadata of the objects in the bucket. + \f + Parameters + ---------- + bucket : app.models.bucket.Bucket + Bucket with the name provided in the URL path. Dependency Injection. + s3 : boto3_type_annotations.s3.ServiceResource + S3 Service to perform operations on buckets in Ceph. Dependency Injection. + + Returns + ------- + objs : list[app.schemas.bucket.S3ObjectMetaInformation] + Meta information about all objects in the bucket. + """ + permission = await CRUDBucketPermission.get(db, bucket.name, current_user.uid) + if permission is not None and permission.file_prefix is not None: + return [ + S3ObjectMetaInformation.from_native_s3_object(obj) + for obj in s3.Bucket(bucket.name).objects.filter(Prefix=permission.file_prefix).all() + ] + return [S3ObjectMetaInformation.from_native_s3_object(obj) for obj in s3.Bucket(bucket.name).objects.all()] + + +@router.get( + "/{bucket_name}/objects/{object_path:path}", + response_model=S3ObjectMetaInformation, + tags=["Object"], + summary="Get the metadata about a specific object", +) +async def get_bucket_object( + bucket: BucketDB = Depends(get_current_bucket), + s3: S3ServiceResource = Depends(get_s3_resource), + object_path: str = Path( + ..., + decsription="Name of the object", + examples={ + "normal": { + "summary": "Normal file", + "description": "A normal file in a bucket", + "value": "test.txt", + }, + "pseudo-folder": { + "summary": "Pseudo-folder file", + "description": "A file in a pseudo folder", + "value": "pseudo/sub/folder/test.txt", + }, + }, + ), + current_user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db), +) -> S3ObjectMetaInformation: + """ + Get the metadata of a specific object in a bucket. + \f + Parameters + ---------- + bucket : app.models.bucket.Bucket + Bucket with the name provided in the URL path. Dependency Injection. + s3 : boto3_type_annotations.s3.ServiceResource + S3 Service to perform operations on buckets in Ceph. Dependency Injection. + object_path : str + Key of a specific object in the bucket. URL Path Parameter. + + Returns + ------- + objs : app.schemas.bucket.S3ObjectMetaInformation + Meta information about the specific object in the bucket. + """ + permission = await CRUDBucketPermission.get(db, bucket.name, current_user.uid) + try: + if permission is None or permission.file_prefix is None or object_path.startswith(permission.file_prefix): + obj = s3.ObjectSummary(bucket_name=bucket.name, key=object_path) + return S3ObjectMetaInformation.from_native_s3_object(obj) + raise HTTPException(status.HTTP_403_FORBIDDEN, detail="No rights for this object.") + except ClientError: + raise HTTPException(status.HTTP_404_NOT_FOUND, detail="Object not found") diff --git a/app/api/endpoints/login.py b/app/api/endpoints/login.py new file mode 100644 index 0000000000000000000000000000000000000000..7d0d511c3acb115f9d29e481ae145e1b48e55829 --- /dev/null +++ b/app/api/endpoints/login.py @@ -0,0 +1,112 @@ +from typing import Any + +from fastapi import APIRouter, Depends, status +from fastapi.requests import Request +from fastapi.responses import RedirectResponse +from rgwadmin import RGWAdmin +from sqlalchemy.ext.asyncio import AsyncSession + +from app.api.dependencies import get_db, get_rgw_admin, get_userinfo_from_access_token +from app.core.config import settings +from app.core.security import create_access_token, oauth +from app.crud.crud_user import CRUDUser +from app.models.user import User + +router = APIRouter(prefix="/auth", tags=["Auth"]) + + +@router.get("/login", include_in_schema=False) +async def login(request: Request) -> RedirectResponse: + """ + Redirect route to OIDC provider. + \f + Parameters + ---------- + request : fastapi.requests.Request + Raw request object. + + Returns + ------- + response : fastapi.responses.RedirectResponse + Redirect response to right OAuth2 endpoint + """ + # Clear session to prevent an overflow + request.session.clear() + # construct absolute url for callback + base_url = str(request.base_url)[:-1] + if settings.SSL_TERMINATION: # pragma: no cover + base_url = "https" + base_url[4:] + redirect_uri = base_url + router.prefix + "/callback" + return await oauth.lifescience.authorize_redirect(request, redirect_uri) + + +@router.get( + "/callback", + response_class=RedirectResponse, + status_code=status.HTTP_302_FOUND, + summary="Life Science Login Callback", + responses={ + status.HTTP_302_FOUND: { + "headers": { + "Set-Cookie": { + "description": "JWT for accessing the API", + "schema": { + "type": "string", + "example": "bearer=fake-jwt-cookie; Domain=localhost; expired=Wed, 05 Jan 2022 " + "09:00:00 GMT; Path=/; SameSite=strict; Secure", + }, + } + } + } + }, +) +async def login_callback( + response: RedirectResponse, + user_info: dict[str, Any] = Depends(get_userinfo_from_access_token), + db: AsyncSession = Depends(get_db), + rgw: RGWAdmin = Depends(get_rgw_admin), +) -> str: + """ + Callback for the Life Science Identity Provider.\n + To start the login process visit the route [login route](/api/auth/login/) + + If the user is already known to the system, then a JWT token will be created and sent via the 'set-cookie' header. + The key for this Cookie is 'bearer'.\n + If the user is new, he will be created and then a JWT token is issued.\n + This JWT has to be sent to all authorized endpoints via the HTTPBearer scheme. + \f + Parameters + ---------- + response : fastapi.responses.RedirectResponse + Response which will hold the JWT cookie. + user_info : dict[str, Any] + Get the userinfo with OAuth2. Dependency Injection. + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. Dependency Injection. + rgw : rgwadmin.RGWAdmin + RGW admin interface to manage Ceph's object store. Dependency Injection. + + Returns + ------- + path : str + Redirect path after successful login. + """ + lifescience_id = ( + user_info["voperson_id"] if isinstance(user_info["voperson_id"], str) else user_info["voperson_id"][0] + ) + uid = lifescience_id.split("@")[0] + user = await CRUDUser.get(db, uid) + if user is None: + new_user = User(uid=uid, display_name=user_info["name"]) + user = await CRUDUser.create(db, new_user) + rgw.create_user(uid=user.uid, max_buckets=-1, display_name=user.display_name) + token = create_access_token(uid) + response.set_cookie( + key="bearer", + value=token, + samesite="strict", + expires=settings.JWT_TOKEN_EXPIRE_MINUTES, + secure=True, + domain=settings.DOMAIN, + ) + return "/" diff --git a/app/api/endpoints/users.py b/app/api/endpoints/users.py new file mode 100644 index 0000000000000000000000000000000000000000..b6f649a832901d12c4b6580af524b0d8d1acc442 --- /dev/null +++ b/app/api/endpoints/users.py @@ -0,0 +1,181 @@ +from fastapi import APIRouter, Depends, HTTPException, Path, status +from rgwadmin import RGWAdmin +from rgwadmin.exceptions import RGWAdminException + +from app.api.dependencies import get_current_user, get_rgw_admin, get_user_by_path_uid +from app.models.user import User as UserDB +from app.schemas.user import S3Key +from app.schemas.user import User as UserSchema + +router = APIRouter(prefix="/users", tags=["User"]) + + +@router.get("/me", response_model=UserSchema, summary="Get the logged in user") +def get_logged_in_user( + current_user: UserDB = Depends(get_current_user), +) -> UserDB: + """ + Return the user associated with the used JWT. + \f + Parameters + ---------- + current_user : app.models.user.User + User from the database associated to the used JWT. Dependency injection. + + Returns + ------- + current_user : app.models.user.User + User associated to used JWT. + """ + return current_user + + +@router.get("/{uid}", response_model=UserSchema, summary="Get a user by its uid") +def get_user(user: UserDB = Depends(get_user_by_path_uid)) -> UserDB: + """ + Return the user with the specific uid. A user can only view himself. + \f + Parameters + ---------- + user : app.models.user.User + User with given uid. Dependency Injection. + Returns + ------- + user : app.models.user.User + User with given uid. + """ + return user + + +@router.get( + "/{uid}/keys", + response_model=list[S3Key], + tags=["Key"], + summary="Get the S3 Access keys from a user", +) +def get_user_keys(rgw: RGWAdmin = Depends(get_rgw_admin), user: UserDB = Depends(get_user_by_path_uid)) -> list[S3Key]: + """ + Get all the S3 Access keys for a specific user. + \f + Parameters + ---------- + rgw : rgwadmin.RGWAdmin + RGW admin interface to manage Ceph's object store. Dependency Injection. + user : app.models.user.User + User with given uid. Dependency Injection. + + Returns + ------- + keys : list(app.schemas.user.S3Key) + All S3 keys from the user. + """ + return [S3Key(**key) for key in rgw.get_user(uid=user.uid, stats=False)["keys"]] + + +@router.post( + "/{uid}/keys", + response_model=S3Key, + tags=["Key"], + summary="Create a Access key for a user", + status_code=status.HTTP_201_CREATED, +) +def create_user_key(rgw: RGWAdmin = Depends(get_rgw_admin), user: UserDB = Depends(get_user_by_path_uid)) -> S3Key: + """ + Create a S3 Access key for a specific user. + \f + Parameters + ---------- + rgw : rgwadmin.RGWAdmin + RGW admin interface to manage Ceph's object store. Dependency Injection. + user : app.models.user.User + User with given uid. Dependency Injection. + + Returns + ------- + key : app.schemas.user.S3Key + Newly created S3 key. + """ + before_keys_set = set( + map( + lambda key: key["access_key"], + rgw.get_user(uid=user.uid, stats=False)["keys"], + ) + ) + # create keys returns all keys for a user including the new one + after_keys = rgw.create_key(uid=user.uid, key_type="s3", generate_key=True) + new_key_id = list(set(map(lambda key: key["access_key"], after_keys)) - before_keys_set)[0] # find ID of the key + index = [key["access_key"] for key in after_keys].index(new_key_id) # find new key by ID + return S3Key(**after_keys[index]) + + +@router.get( + "/{uid}/keys/{access_id}", + response_model=S3Key, + tags=["Key"], + summary="Get a specific S3 Access key from a user", +) +def get_user_key( + access_id: str = Path( + ..., + description="ID of the S3 access key", + example="CRJ6B037V2ZT4U3W17VC", + ), + rgw: RGWAdmin = Depends(get_rgw_admin), + user: UserDB = Depends(get_user_by_path_uid), +) -> S3Key: + """ + Get a specific S3 Access Key for a specific user. + \f + Parameters + ---------- + access_id : str + ID of the requested S3 key. URL Path Parameter. + rgw : rgwadmin.RGWAdmin + RGW admin interface to manage Ceph's object store. Dependency Injection. + user : app.models.user.User + User with given uid. Dependency Injection. + + Returns + ------- + key : app.schemas.user.S3Key + Requested S3 key. + """ + keys = rgw.get_user(uid=user.uid, stats=False)["keys"] + try: + index = [key["access_key"] for key in keys].index(access_id) + return S3Key(**keys[index]) + except ValueError: + raise HTTPException(status.HTTP_404_NOT_FOUND, detail="Key not found") + + +@router.delete( + "/{uid}/keys/{access_id}", + tags=["Key"], + summary="Delete a specific S3 Access key from a user", + status_code=status.HTTP_204_NO_CONTENT, +) +def delete_user_key( + access_id: str = Path( + ..., + description="ID of the S3 access key", + example="CRJ6B037V2ZT4U3W17VC", + ), + rgw: RGWAdmin = Depends(get_rgw_admin), + user: UserDB = Depends(get_user_by_path_uid), +) -> None: + """ + Delete a specific S3 Access key for a specific user. + \f + Parameters + ---------- + access_id : str + ID of the S3 key to delete. URL Path Parameter. + rgw : rgwadmin.RGWAdmin + RGW admin interface to manage Ceph's object store. Dependency Injection. + user : app.models.user.User + User with given uid. Dependency Injection. + """ + try: + rgw.remove_key(access_key=access_id, uid=user.uid) + except RGWAdminException: + raise HTTPException(status.HTTP_404_NOT_FOUND, detail="Key not found") diff --git a/app/api/miscellaneous_endpoints.py b/app/api/miscellaneous_endpoints.py new file mode 100644 index 0000000000000000000000000000000000000000..00fa05b100ea8d9138c23aa3415a4c3904a5694d --- /dev/null +++ b/app/api/miscellaneous_endpoints.py @@ -0,0 +1,49 @@ +import httpx +from fastapi import APIRouter, Depends, HTTPException, status +from sqlalchemy.ext.asyncio import AsyncSession + +from app.api.dependencies import get_db +from app.core.config import settings +from app.schemas.security import ErrorDetail + +miscellaneous_router = APIRouter(include_in_schema=True) + + +@miscellaneous_router.get( + "/health", + tags=["Miscellaneous"], + responses={ + status.HTTP_200_OK: { + "description": "Service Health is OK", + "content": {"application/json": {"example": {"status": "OK"}}}, + }, + status.HTTP_500_INTERNAL_SERVER_ERROR: { + "model": ErrorDetail, + "description": "Service Health is not OK", + "content": {"application/json": {"example": {"detail": "Connection to RGW lost"}}}, + }, + }, +) +async def health_check(db: AsyncSession = Depends(get_db)) -> dict[str, str]: + """ + Check the health of the service. + \f + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. Dependency Injection. + + Returns + ------- + response : dict[str, str] + status ok + """ + try: + assert db.is_active + except Exception: + raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Connection to database lost") + try: + httpx.get(settings.OBJECT_GATEWAY_URI, timeout=3.5) + except Exception: + raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Connection to RGW lost") + return {"status": "OK"} diff --git a/app/ceph/__init__.py b/app/ceph/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/app/ceph/rgw.py b/app/ceph/rgw.py new file mode 100644 index 0000000000000000000000000000000000000000..a5a3486d0fe24090401177a12c463146533f40bf --- /dev/null +++ b/app/ceph/rgw.py @@ -0,0 +1,25 @@ +from typing import TYPE_CHECKING + +from boto3 import resource +from rgwadmin import RGWAdmin + +from app.core.config import settings + +if TYPE_CHECKING: + from boto3.resources.base import ServiceResource +else: + ServiceResource = object + +s3_resource: ServiceResource = resource( + service_name="s3", + endpoint_url=settings.OBJECT_GATEWAY_URI, + aws_access_key_id=settings.CEPH_ACCESS_KEY, + aws_secret_access_key=settings.CEPH_SECRET_KEY, + verify=False, +) +rgw = RGWAdmin( + access_key=settings.CEPH_ACCESS_KEY, + secret_key=settings.CEPH_SECRET_KEY, + secure=False, + server=settings.OBJECT_GATEWAY_URI.split("://")[-1], +) diff --git a/app/check_ceph_connection.py b/app/check_ceph_connection.py new file mode 100644 index 0000000000000000000000000000000000000000..5106b509fe4a871f79261f9266973bd615638d4d --- /dev/null +++ b/app/check_ceph_connection.py @@ -0,0 +1,36 @@ +import logging + +import httpx +from tenacity import after_log, before_log, retry, stop_after_attempt, wait_fixed + +from app.core.config import settings + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +max_tries = 60 * 3 # 3 minutes +wait_seconds = 2 + + +@retry( + stop=stop_after_attempt(max_tries), + wait=wait_fixed(wait_seconds), + before=before_log(logger, logging.INFO), + after=after_log(logger, logging.WARN), +) +def init() -> None: + try: + httpx.get(settings.OBJECT_GATEWAY_URI, timeout=5.0) + except Exception as e: + logger.error(e) + raise e + + +def main() -> None: + logger.info("Check Ceph connection") + init() + logger.info("Ceph connection established") + + +if __name__ == "__main__": + main() diff --git a/app/check_database_connection.py b/app/check_database_connection.py new file mode 100644 index 0000000000000000000000000000000000000000..7f04075b5f543b3881e12c5ead6b9fa7d76f3a2e --- /dev/null +++ b/app/check_database_connection.py @@ -0,0 +1,36 @@ +import logging + +from db.session import SessionLocal +from tenacity import after_log, before_log, retry, stop_after_attempt, wait_fixed + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +max_tries = 60 * 3 # 3 minutes +wait_seconds = 2 + + +@retry( + stop=stop_after_attempt(max_tries), + wait=wait_fixed(wait_seconds), + before=before_log(logger, logging.INFO), + after=after_log(logger, logging.WARN), +) +def init() -> None: + try: + with SessionLocal() as db: + # Try to create session to check if DB is awake + db.execute("SELECT 1") + except Exception as e: + logger.error(e) + raise e + + +def main() -> None: + logger.info("Initializing DB") + init() + logger.info("DB finished initializing") + + +if __name__ == "__main__": + main() diff --git a/app/check_oidc_connection.py b/app/check_oidc_connection.py new file mode 100644 index 0000000000000000000000000000000000000000..cd76d1a20f17bd565362cfe78b884a7564858a59 --- /dev/null +++ b/app/check_oidc_connection.py @@ -0,0 +1,36 @@ +import logging + +import httpx +from tenacity import after_log, before_log, retry, stop_after_attempt, wait_fixed + +from app.core.config import settings + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +max_tries = 60 * 3 # 3 minutes +wait_seconds = 2 + + +@retry( + stop=stop_after_attempt(max_tries), + wait=wait_fixed(wait_seconds), + before=before_log(logger, logging.INFO), + after=after_log(logger, logging.WARN), +) +def init() -> None: + try: + httpx.get(settings.OIDC_BASE_URI + settings.OIDC_META_INFO_PATH, timeout=5.0) + except Exception as e: + logger.error(e) + raise e + + +def main() -> None: + logger.info("Check OIDC Provider connection") + init() + logger.info("OIDC Provider connection established") + + +if __name__ == "__main__": + main() diff --git a/app/core/__init__.py b/app/core/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/app/core/config.py b/app/core/config.py new file mode 100644 index 0000000000000000000000000000000000000000..28716cf28abc110235330ab9711085d3da2dc45a --- /dev/null +++ b/app/core/config.py @@ -0,0 +1,75 @@ +import secrets +from typing import Any, Dict, List, Optional, Union + +from pydantic import AnyHttpUrl, AnyUrl, BaseSettings, Field, validator + + +def _assemble_db_uri(values: Dict[str, Any], async_flag: bool = True) -> Any: + return AnyUrl.build( + scheme=f"mysql+{'aiomysql' if async_flag else 'pymysql'}", + password=values.get("DB_PASSWORD"), + user=values.get("DB_USER"), + port=str(values.get("DB_PORT")), + host=values.get("DB_HOST"), + path=f"/{values.get('DB_DATABASE') or ''}", + ) + + +class Settings(BaseSettings): + DOMAIN: str = Field("localhost", description="Domain of the service.") + SSL_TERMINATION: bool = Field("False", description="Flag if the service runs behind a SSL termination proxy") + API_PREFIX: str = Field("/api", description="Path Prefix for all API endpoints.") + SECRET_KEY: str = Field(secrets.token_urlsafe(32), description="Secret key to sign the JWTs.") + # 60 minutes * 24 hours * 8 days = 8 days + JWT_TOKEN_EXPIRE_MINUTES: int = Field(60 * 24 * 8, description="JWT lifespan in minutes.") + # BACKEND_CORS_ORIGINS is a JSON-formatted list of origins + # e.g: '["http://localhost", "http://localhost:4200", "http://localhost:3000", \ + # "http://localhost:8080", "http://local.dockertoolbox.tiangolo.com"]' + BACKEND_CORS_ORIGINS: List[AnyHttpUrl] = Field([], description="List of all valid CORS origins") + + @validator("BACKEND_CORS_ORIGINS", pre=True) + def assemble_cors_origins(cls, v: Union[str, List[str]]) -> Union[List[str], str]: + if isinstance(v, str) and not v.startswith("["): + return [i.strip() for i in v.split(",")] + elif isinstance(v, (list, str)): + return v + raise ValueError(v) + + DB_HOST: str = Field(..., description="Host of the database.") + DB_USER: str = Field(..., description="Username in the database.") + DB_PASSWORD: str = Field(..., description="Password for the database user.") + DB_DATABASE: str = Field(..., description="Name of the database.") + DB_PORT: int = Field(3306, description="Port of the database.") + SQLALCHEMY_VERBOSE_LOGGER: bool = Field(False, description="Flag whether to print the SQL Queries in the logs.") + SQLALCHEMY_DATABASE_ASYNC_URI: AnyUrl | None = None + + @validator("SQLALCHEMY_DATABASE_ASYNC_URI", pre=True) + def assemble_async_db_connection(cls, v: Optional[str], values: Dict[str, Any]) -> Any: + if isinstance(v, str): + return v + return _assemble_db_uri(values, async_flag=True) + + SQLALCHEMY_DATABASE_NORMAL_URI: AnyUrl | None = None + + @validator("SQLALCHEMY_DATABASE_NORMAL_URI", pre=True) + def assemble_db_connection(cls, v: Optional[str], values: Dict[str, Any]) -> Any: + if isinstance(v, str): + return v + return _assemble_db_uri(values, async_flag=False) + + OBJECT_GATEWAY_URI: AnyHttpUrl = Field(..., description="URI of the Ceph Object Gateway.") + CEPH_ACCESS_KEY: str = Field(..., description="Access key for the Ceph Object Gateway with admin privileges.") + CEPH_SECRET_KEY: str = Field(..., description="Secret key for the Ceph Object Gateway with admin privileges.") + + OIDC_CLIENT_SECRET: str = Field(..., description="OIDC Client secret") + OIDC_CLIENT_ID: str = Field(..., description="OIDC Client ID") + OIDC_BASE_URI: AnyHttpUrl = Field(..., description="OIDC Base URI") + OIDC_META_INFO_PATH: str = Field("/.well-known/openid-configuration", description="Path to the OIDC meta data file") + + class Config: + case_sensitive = True + env_file = ".env" + secrets_dir = "/run/secrets" + + +settings = Settings() diff --git a/app/core/security.py b/app/core/security.py new file mode 100644 index 0000000000000000000000000000000000000000..2499c6b01056d4e8f4d85a0a632e68165db40d4e --- /dev/null +++ b/app/core/security.py @@ -0,0 +1,69 @@ +from datetime import datetime, timedelta +from typing import Any, Union + +from authlib.integrations.starlette_client import OAuth +from authlib.jose import JsonWebToken + +from app.core.config import settings + +ISSUER = "proxy_api" +ALGORITHM = "HS256" +jwt = JsonWebToken([ALGORITHM]) + + +def create_access_token(subject: Union[str, Any]) -> str: + """ + Create a JWT access token. + + Parameters + ---------- + subject : Union[str, Any] + Ths subject in the JWT. + + Returns + ------- + token : str + The generated JWT. + + """ + expire = datetime.utcnow() + timedelta(minutes=settings.JWT_TOKEN_EXPIRE_MINUTES) + to_encode = {"exp": expire, "sub": str(subject), "iss": ISSUER} + encoded_jwt = jwt.encode(header={"alg": ALGORITHM}, payload=to_encode, key=settings.SECRET_KEY) + return encoded_jwt.decode("utf-8") + + +def decode_token(token: str) -> dict[str, str]: + """ + Decode and verify a JWT token. + + Parameters + ---------- + token : str + The JWT to decode. + + Returns + ------- + decoded_token : dict[str, str] + Payload of the decoded token. + """ + claims = jwt.decode( + s=token, + key=settings.SECRET_KEY, + claims_options={ + "iss": {"essential": True}, + "sub": {"essential": True}, + "exp": {"essential": True}, + }, + ) + claims.validate() + return claims + + +oauth = OAuth() +oauth.register( + name="lifescience", + client_id=settings.OIDC_CLIENT_ID, + client_secret=settings.OIDC_CLIENT_SECRET, + server_metadata_url=settings.OIDC_BASE_URI + settings.OIDC_META_INFO_PATH, + client_kwargs={"scope": "openid profile aarc", "code_challenge_method": "S256"}, +) diff --git a/app/crud/__init__.py b/app/crud/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/app/crud/crud_bucket.py b/app/crud/crud_bucket.py new file mode 100644 index 0000000000000000000000000000000000000000..1f4cb29f53d9f106b19d5c5edfde621e6422b25d --- /dev/null +++ b/app/crud/crud_bucket.py @@ -0,0 +1,156 @@ +from sqlalchemy import func, or_ +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select +from sqlalchemy.orm import joinedload + +from app.models.bucket import Bucket +from app.models.bucket_permission import BucketPermission as BucketPermissionDB +from app.models.bucket_permission import PermissionEnum +from app.schemas.bucket import BucketIn as BucketInSchema + + +class CRUDBucket: + @staticmethod + async def get(db: AsyncSession, bucket_name: str) -> Bucket | None: + """ + Get a bucket by its name. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. + bucket_name : str + Name of the Bucket to get from database. + + Returns + ------- + bucket : Bucket | None + Returns the bucket if it exists, None otherwise. + """ + stmt = select(Bucket).where(Bucket.name == bucket_name) + row = await db.execute(stmt) + return row.scalar() + + @staticmethod + async def get_for_user(db: AsyncSession, uid: str) -> list[Bucket]: + """ + Get all buckets where the given user has READ permissions for. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession + Async database session to perform query on. + uid : str + UID of a user. + + Returns + ------- + buckets : list[app.models.bucket.Bucket] + A list of all buckets where the given user has READ permissions for. + + Notes + ----- + Creates this SQL Query + SELECT bucket.name, bucket.description, bucket.public, bucket.owner_id, + user_1.uid, user_1.display_name + FROM bucket LEFT OUTER JOIN user AS user_1 ON user_1.uid = bucket.owner_id + WHERE bucket.owner_id = %s OR (EXISTS + (SELECT 1 FROM bucketpermission + WHERE bucket.name = bucketpermission.bucket_name AND bucketpermission.user_id = %s + AND(bucketpermission.permissions = %s OR bucketpermission.permissions = %s) + AND(datediff(now(), bucketpermission.`from`) <= %s OR bucketpermission.`from` IS NULL) + AND(datediff(now(), bucketpermission.`to`) >= %s OR bucketpermission.`to` IS NULL))) + """ + stmt = ( + select(Bucket) + .options(joinedload(Bucket.owner)) + .where( + or_( + Bucket.owner_id == uid, + Bucket.permissions.any(BucketPermissionDB.user_id == uid) + .where( + or_( + BucketPermissionDB.permissions == PermissionEnum.READ, + BucketPermissionDB.permissions == PermissionEnum.READWRITE, + ) + ) + .where( + or_( + func.datediff(func.now(), BucketPermissionDB.from_) >= 0, + BucketPermissionDB.from_ == None, # noqa:E711 + ) + ) + .where( + or_( + func.datediff(func.now(), BucketPermissionDB.to) <= 0, + BucketPermissionDB.to == None, # noqa:E711 + ) + ), + ) + ) + ) + + buckets = (await db.execute(stmt)).scalars().all() + return buckets + + @staticmethod + async def get_own_buckets(db: AsyncSession, uid: str) -> list[Bucket]: + """ + Get all the buckets where the user is the owner of it. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession + Async database session to perform query on. + uid : str + UID of the user to get the buckets for. + + Returns + ------- + buckets : list[app.models.bucket.Bucket] + All the buckets for the given UID. + """ + stmt = select(Bucket).where(Bucket.owner_id == uid) + return (await db.execute(stmt)).scalars().all() + + @staticmethod + async def create(db: AsyncSession, bucket_in: BucketInSchema, uid: str) -> Bucket | None: + """ + Create a bucket for a given user. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession + Async database session to perform query on. + bucket_in : app.schemas.bucket.BucketIn + All relevant information for a new bucket. + uid : str + UID of the owner for the new bucket. + + Returns + ------- + bucket : app.models.bucket.Bucket | None + Returns the created bucket. If None then there was a problem, e.g. the name of the bucket is already taken. + """ + bucket = Bucket(**bucket_in.dict(), owner_id=uid) + if await CRUDBucket.get(db, bucket.name) is None: + db.add(bucket) + await db.commit() + await db.refresh(bucket) + return bucket + return None + + @staticmethod + async def delete(db: AsyncSession, bucket: Bucket) -> None: + """ + Delete a specific bucket. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession + Async database session to perform query on. + bucket : app.models.bucket.Bucket + The bucket to delete. + """ + await db.delete(bucket) + await db.commit() diff --git a/app/crud/crud_bucket_permission.py b/app/crud/crud_bucket_permission.py new file mode 100644 index 0000000000000000000000000000000000000000..2b0a47b0dea84fc9685419c0a51238aca45c5139 --- /dev/null +++ b/app/crud/crud_bucket_permission.py @@ -0,0 +1,188 @@ +from sqlalchemy import and_ +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select +from sqlalchemy.orm import joinedload + +from app.crud.crud_bucket import CRUDBucket +from app.crud.crud_user import CRUDUser +from app.models.bucket_permission import BucketPermission as BucketPermissionDB +from app.schemas.bucket_permission import BucketPermission as BucketPermissionSchema +from app.schemas.bucket_permission import BucketPermissionParameters as BucketPermissionParametersSchema + + +class CRUDBucketPermission: + @staticmethod + async def get(db: AsyncSession, bucket_name: str, user_id: str) -> BucketPermissionDB | None: + stmt = select(BucketPermissionDB).where( + and_(BucketPermissionDB.user_id == user_id, BucketPermissionDB.bucket_name == bucket_name) + ) + row = await db.execute(stmt) + return row.scalar() + + @staticmethod + async def get_permissions_for_bucket(db: AsyncSession, bucket_name: str) -> list[BucketPermissionDB]: + """ + Get the permissions for the given bucket. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. + bucket_name : str + Name of the bucket which to query. + + Returns + ------- + buckets : list[BucketPermission] + Returns the permissions for the given bucket. + """ + stmt = ( + select(BucketPermissionDB) + .options(joinedload(BucketPermissionDB.grantee)) + .where(BucketPermissionDB.bucket_name == bucket_name) + ) + row = await db.execute(stmt) + return row.scalars().all() + + @staticmethod + async def get_permissions_for_user(db: AsyncSession, user_id: str) -> list[BucketPermissionDB]: + """ + Get the permissions for the given user. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. + user_id : str + UID of the user which to query. + + Returns + ------- + buckets : list[BucketPermission] + Returns the permissions for the given user. + """ + stmt = select(BucketPermissionDB).where(BucketPermissionDB.user_id == user_id) + row = await db.execute(stmt) + return row.scalars().all() + + @staticmethod + async def check_permission(db: AsyncSession, bucket_name: str, uid: str, only_own: bool = False) -> bool: + """ + Check if the provided user has READ permission to the provided bucket. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession + Async database session to perform query on. + bucket_name : str + Name of the bucket for which to perform the check. + uid : str + UID of the user for which to perform the check. + only_own : bool, default False + Flag if the check is only for the users own buckets or include foreign buckets with permission. + + Returns + ------- + permission_check : bool + Return True if the user has READ permission on the bucket, False otherwise. + """ + buckets = await (CRUDBucket.get_own_buckets(db, uid) if only_own else CRUDBucket.get_for_user(db, uid)) + return bucket_name in map(lambda x: x.name, buckets) + + @staticmethod + async def create(db: AsyncSession, permission: BucketPermissionSchema) -> BucketPermissionDB: + """ + Create a permission in the database and raise Exceptions if there are problems. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession + Async database session to perform query on. + permission : app.schemas.bucket_permission.BucketPermission + The permission to create. + Returns + ------- + permission : app.models.bucket_permission.BucketPermission + Newly created permission model from the db. + """ + # Check if user exists + user = await CRUDUser.get(db, uid=permission.uid) + if user is None: + raise KeyError( + f"Unknown user with uid {permission.uid}", + ) + # Check that grantee is not the owner of the bucket + bucket = await CRUDBucket.get(db, permission.bucket_name) + if bucket is None or bucket.owner_id == user.uid: + raise ValueError(f"User {permission.uid} is the owner of the bucket {permission.bucket_name}") + # Check if combination of user and bucket already exists + duplicate_check_stmt = select(BucketPermissionDB).where( + and_(BucketPermissionDB.user_id == user.uid, BucketPermissionDB.bucket_name == permission.bucket_name) + ) + previous_permission = (await db.execute(duplicate_check_stmt)).scalar() + if previous_permission is not None: + raise DuplicateError( + f"bucket permission for combination {permission.bucket_name} {permission.uid} already exists." + ) + # Add permission to db + permission_db = BucketPermissionDB( + user_id=user.uid, + bucket_name=permission.bucket_name, + from_=permission.from_timestamp, + to=permission.to_timestamp, + file_prefix=permission.file_prefix, + permissions=permission.permission, + ) + db.add(permission_db) + await db.commit() + await db.refresh(permission_db) + return permission_db + + @staticmethod + async def delete(db: AsyncSession, permission: BucketPermissionDB) -> None: + """ + Delete a permission in the database. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession + Async database session to perform query on. + permission : app.schemas.bucket_permission.BucketPermission + The permission to create. + Returns + ------- + permission : app.models.bucket_permission.BucketPermission + Newly created permission model from the db. + """ + await db.delete(permission) + await db.commit() + + @staticmethod + async def update_permission( + db: AsyncSession, permission: BucketPermissionDB, new_params: BucketPermissionParametersSchema + ) -> BucketPermissionDB: + """ + Update a permission in the database. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession + Async database session to perform query on. + permission : app.schemas.bucket_permission.BucketPermission + The permission to update. + new_params : app.schemas.bucket_permission.BucketPermissionParameters + The parameters which should be updated. + + Returns + ------- + permission : app.models.bucket_permission.BucketPermission + Updated permission model from the db. + """ + permission.update_parameters(new_params) + await db.commit() + await db.refresh(permission) + return permission + + +class DuplicateError(Exception): + pass diff --git a/app/crud/crud_user.py b/app/crud/crud_user.py new file mode 100644 index 0000000000000000000000000000000000000000..4ae8eca4acf2d5b7086dfd0b03e5e6ad6ff05245 --- /dev/null +++ b/app/crud/crud_user.py @@ -0,0 +1,48 @@ +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select + +from app.models.user import User + + +class CRUDUser: + @staticmethod + async def create(db: AsyncSession, user: User) -> User: + """ + Create a new user in the database. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. + user : app.models.user.User + The user to create. + + Returns + ------- + user : app.models.user.User + The newly created user. + """ + db.add(user) + await db.commit() + await db.refresh(user) + return user + + @staticmethod + async def get(db: AsyncSession, uid: str) -> User | None: + """ + Get a user by its UID. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. + uid : str + UID of a user. + + Returns + ------- + user : app.models.user.User | None + The user for the given UID if he exists, None otherwise + """ + stmt = select(User).where(User.uid == uid) + return (await db.execute(stmt)).scalar() diff --git a/app/db/__init__.py b/app/db/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/app/db/base.py b/app/db/base.py new file mode 100644 index 0000000000000000000000000000000000000000..92a999350a7d18f09ee8872382f5d40913156ba4 --- /dev/null +++ b/app/db/base.py @@ -0,0 +1,6 @@ +# Import all the models, so that Base has them before being +# imported by Alembic +from app.db.base_class import Base # noqa +from app.models.bucket import Bucket # noqa +from app.models.bucket_permission import BucketPermission # noqa +from app.models.user import User # noqa diff --git a/app/db/base_class.py b/app/db/base_class.py new file mode 100644 index 0000000000000000000000000000000000000000..59be70308cbefd11f1c259799bffc030cac717f0 --- /dev/null +++ b/app/db/base_class.py @@ -0,0 +1,3 @@ +from sqlalchemy.orm import declarative_base + +Base = declarative_base() diff --git a/app/db/session.py b/app/db/session.py new file mode 100644 index 0000000000000000000000000000000000000000..d5f7127d05fbd43f126095132241b997e4f9abbd --- /dev/null +++ b/app/db/session.py @@ -0,0 +1,11 @@ +from sqlalchemy import create_engine +from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine +from sqlalchemy.orm import sessionmaker + +from app.core.config import settings + +engine = create_engine(str(settings.SQLALCHEMY_DATABASE_NORMAL_URI), pool_pre_ping=True) +SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) + +engineAsync = create_async_engine(settings.SQLALCHEMY_DATABASE_ASYNC_URI, echo=settings.SQLALCHEMY_VERBOSE_LOGGER) +SessionAsync = sessionmaker(engineAsync, expire_on_commit=False, class_=AsyncSession, future=True) diff --git a/app/main.py b/app/main.py new file mode 100644 index 0000000000000000000000000000000000000000..59d3135840a832a943f4945da3a172899a5c293f --- /dev/null +++ b/app/main.py @@ -0,0 +1,62 @@ +import uvicorn +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +from fastapi.middleware.gzip import GZipMiddleware +from fastapi.responses import RedirectResponse +from fastapi.routing import APIRoute +from starlette.middleware.sessions import SessionMiddleware + +from app.api.api import api_router +from app.api.miscellaneous_endpoints import miscellaneous_router +from app.core.config import settings + +description = """ +This is the backend for a new UI which can leverage the additional powerful functionality provided by Ceph in a +user-friendly manner. +""" + + +def custom_generate_unique_id(route: APIRoute) -> str: + return f"{route.tags[-1]}-{route.name}" + + +app = FastAPI( + title="S3-Proxy", + version="1.0.0", + description=description, + contact={ + "name": "Daniel Goebel", + "url": "https://ekvv.uni-bielefeld.de/pers_publ/publ/PersonDetail.jsp?personId=223066601", + "email": "dgoebel@techfak.uni-bielefeld.de", + }, + generate_unique_id_function=custom_generate_unique_id, + # license_info={"name": "MIT", "url": "https://mit-license.org/"}, + root_path=settings.API_PREFIX, +) + +# CORS Settings for the API +app.add_middleware( + CORSMiddleware, + allow_origins=settings.BACKEND_CORS_ORIGINS, + allow_credentials=False, + allow_methods=["*"], + allow_headers=["*"], +) + +# Enable gzip compression for large responses +app.add_middleware(GZipMiddleware, minimum_size=500) + +# Include all routes +app.include_router(api_router) +app.include_router(miscellaneous_router) + +app.add_middleware(SessionMiddleware, secret_key=settings.SECRET_KEY) + + +@app.get("/", response_class=RedirectResponse, tags=["Miscellaneous"], include_in_schema=False) +def redirect_docs() -> str: + return settings.API_PREFIX + "/docs" + + +if __name__ == "__main__": + uvicorn.run(app, host="0.0.0.0", port=8000) diff --git a/app/models/__init__.py b/app/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/app/models/bucket.py b/app/models/bucket.py new file mode 100644 index 0000000000000000000000000000000000000000..6c2f0f5f7b0bb0cd2a0a80afd4dc80ba82b53d16 --- /dev/null +++ b/app/models/bucket.py @@ -0,0 +1,38 @@ +from typing import TYPE_CHECKING, Any + +from sqlalchemy import Boolean, Column, ForeignKey, String +from sqlalchemy.dialects.mysql import TEXT +from sqlalchemy.orm import relationship + +from app.db.base_class import Base + +if TYPE_CHECKING: + from .bucket_permission import BucketPermission + from .user import User + + +class Bucket(Base): + """ + Database model for a bucket. + """ + + __tablename__: str = "bucket" + name: str = Column(String(63), primary_key=True, index=True, unique=True) + description: str = Column(TEXT, nullable=False) + public: bool = Column(Boolean(), default=False, server_default="0", nullable=False) + owner_id: str = Column(ForeignKey("user.uid"), nullable=True) + owner: "User" = relationship("User", back_populates="buckets") + permissions: list["BucketPermission"] = relationship( + "BucketPermission", + back_populates="bucket", + cascade="all, delete", + passive_deletes=True, + ) + + __mapper_args__ = {"eager_defaults": True} + + def __eq__(self, other: Any) -> bool: + return self.name == other.name if isinstance(other, Bucket) else False + + def __repr__(self) -> str: + return f"'Bucket(name={self.name}', owner='{self.owner_id}')" diff --git a/app/models/bucket_permission.py b/app/models/bucket_permission.py new file mode 100644 index 0000000000000000000000000000000000000000..7b26d80502d8ed1330de9f0ed292f3fd934d9801 --- /dev/null +++ b/app/models/bucket_permission.py @@ -0,0 +1,62 @@ +from datetime import datetime +from enum import Enum, unique +from typing import TYPE_CHECKING + +from sqlalchemy import Column, ForeignKey, String +from sqlalchemy.dialects.mysql import ENUM, TIMESTAMP +from sqlalchemy.orm import relationship + +from app.db.base_class import Base + +if TYPE_CHECKING: + from app.schemas.bucket_permission import BucketPermissionParameters + + from .bucket import Bucket + from .user import User +else: + BucketPermissionParameters = object + + +@unique +class PermissionEnum(str, Enum): + """ + Enumeration for the possible permission on a bucket. + """ + + READ: str = "READ" + WRITE: str = "WRITE" + READWRITE: str = "READWRITE" + + +class BucketPermission(Base): + """ + Database model for the permission for a user on a bucket. + Will be deleted if either the user or the bucket is deleted. + """ + + __tablename__: str = "bucketpermission" + user_id: str = Column(ForeignKey("user.uid", ondelete="CASCADE"), primary_key=True) + bucket_name: str = Column(ForeignKey("bucket.name", ondelete="CASCADE"), primary_key=True) + from_: datetime | None = Column("from", TIMESTAMP, nullable=True) + to: datetime | None = Column(TIMESTAMP, nullable=True) + file_prefix: str | None = Column(String(512), nullable=True) + permissions: str | PermissionEnum = Column(ENUM(PermissionEnum), default=PermissionEnum.READ, nullable=False) + grantee: "User" = relationship("User", back_populates="permissions") + bucket: "Bucket" = relationship("Bucket", back_populates="permissions") + + def update_parameters(self, params: BucketPermissionParameters) -> None: # pragma: no cover + """ + Update the object with the new parameters. + + Parameters + ---------- + params : app.schemas.bucket_permission.BucketPermissionParameters + The parameters which should be updated. + """ + self.from_ = params.from_timestamp + self.to = params.to_timestamp + self.file_prefix = params.file_prefix + self.permissions = params.permission + + def __repr__(self) -> str: + return f"BucketPermission(uid={self.user_id} bucket_name={self.bucket_name})" diff --git a/app/models/user.py b/app/models/user.py new file mode 100644 index 0000000000000000000000000000000000000000..b32f8d939017f69224dec5a4b0f3289f7dda0675 --- /dev/null +++ b/app/models/user.py @@ -0,0 +1,28 @@ +from typing import TYPE_CHECKING, Any + +from sqlalchemy import Column, String +from sqlalchemy.orm import relationship + +from app.db.base_class import Base + +if TYPE_CHECKING: + from .bucket import Bucket + from .bucket_permission import BucketPermission + + +class User(Base): + """ + Database model for a user. + """ + + __tablename__: str = "user" + uid: str = Column(String(64), primary_key=True, index=True, unique=True) + display_name: str | None = Column(String(256), nullable=True) + buckets: list["Bucket"] = relationship("Bucket", back_populates="owner") + permissions: list["BucketPermission"] = relationship("BucketPermission", back_populates="grantee") + + def __eq__(self, other: Any) -> bool: + return self.uid == other.uid if isinstance(other, User) else False + + def __repr__(self) -> str: + return f"'User(uid={self.uid}', display_name='{self.display_name}')" diff --git a/app/schemas/__init__.py b/app/schemas/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/app/schemas/bucket.py b/app/schemas/bucket.py new file mode 100644 index 0000000000000000000000000000000000000000..7f1109e1c1cce4641cc4950faad164f509cae0d4 --- /dev/null +++ b/app/schemas/bucket.py @@ -0,0 +1,100 @@ +from datetime import datetime +from typing import TYPE_CHECKING + +from pydantic import BaseModel, Field + +if TYPE_CHECKING: + from mypy_boto3_s3.service_resource import ObjectSummary +else: + ObjectSummary = object + + +class _BaseBucket(BaseModel): + """ + Base Schema for a bucket. + """ + + name: str = Field( + ..., + example="test-bucket", + description="Name of the bucket", + min_length=3, + max_length=63, + regex=r"(?!(^((2(5[0-5]|[0-4]\d)|[01]?\d{1,2})\.){3}(2(5[0-5]|[0-4]\d)|[01]?\d{1,2})$))^[a-z\d][a-z\d.-]{1,61}[a-z\d]$", # noqa:E501 + ) + description: str = Field( + ..., + example="""\ +This is a very long sample description of a bucket and its purpose which has to be more \ +than 126 characters long and is mandatory. + """.strip(), + description="Description of the bucket", + min_length=126, + max_length=2**16, + ) + + +class BucketIn(_BaseBucket): + """ + Schema for creating a new bucket. + """ + + +class BucketOut(_BaseBucket): + """ + Schema for answering a request with a bucket. + """ + + created_at: datetime = Field( + ..., + example=datetime(2022, 1, 1, 0, 0), + description="Time when the bucket was created", + ) + owner: str = Field(..., description="UID of the owner", example="28c5353b8bb34984a8bd4169ba94c606") + + class Config: + orm_mode = True + + +class S3ObjectMetaInformation(BaseModel): + """ + Schema for the meta-information about a S3 object. + """ + + key: str = Field( + ..., + description="Key of the Object in the S3 store", + example="test.txt", + max_length=512, + ) + bucket: str = Field( + ..., + description="Name of the Bucket in which the object is", + example="test-bucket", + max_length=256, + ) + size: int = Field(..., description="Size of the object in Bytes", example=123456) + last_modified: datetime = Field( + ..., + description="Last time the object was modified", + example=datetime(2022, 1, 1, 0, 0), + ) + + @staticmethod + def from_native_s3_object(obj: ObjectSummary) -> "S3ObjectMetaInformation": # pragma: no cover + """ + Create an S3ObjectMetaInformation object from a boto3 S3ObjectSummary object. + + Parameters + ---------- + obj : mypy_boto3_s3.service_resource.ObjectSummary + boto3 S3ObjectSummary. + + Returns + ------- + obj : app.schemas.bucket.S3ObjectMetaInformation + Converted S3objectMetaInformation. + """ + return S3ObjectMetaInformation( + key=obj.key, bucket=obj.bucket_name, size=obj.size, last_modified=obj.last_modified + ) diff --git a/app/schemas/bucket_permission.py b/app/schemas/bucket_permission.py new file mode 100644 index 0000000000000000000000000000000000000000..4378ac0a4929f5bad34ed947281557345a5424fb --- /dev/null +++ b/app/schemas/bucket_permission.py @@ -0,0 +1,129 @@ +import hashlib +from datetime import datetime +from typing import Any + +from pydantic import BaseModel, Field + +from app.models.bucket_permission import BucketPermission as BucketPermissionDB +from app.models.bucket_permission import PermissionEnum + + +class BucketPermissionParameters(BaseModel): + """ + Schema for the parameters of a bucket permission. + """ + + from_timestamp: datetime | None = Field( + None, description="Start date of permission", example=datetime(2022, 1, 1, 0, 0) + ) + to_timestamp: datetime | None = Field( + None, description="End date of permission", example=datetime(2023, 1, 1, 0, 0) + ) + file_prefix: str | None = Field(None, description="Prefix of subfolder", example="pseudo/sub/folder/") + permission: PermissionEnum | str = Field(PermissionEnum.READ, description="Permission", example=PermissionEnum.READ) + + +class BucketPermission(BucketPermissionParameters): + """ + Schema for the bucket permissions. + """ + + uid: str = Field(..., description="UID of the grantee", example="28c5353b8bb34984a8bd4169ba94c606") + bucket_name: str = Field(..., description="Name of Bucket", example="test-bucket") + + @staticmethod + def from_db_model(permission: BucketPermissionDB, uid: str | None = None) -> "BucketPermission": + """ + Create a bucket permission schema from the database model. + + Parameters + ---------- + permission : app.models.bucket_permission.BucketPermission + DB model for the permission. + uid : str | None, default None + Sets the uid in the schema. If None it will be taken from the database model. + + Returns + ------- + permission_schema : app.schemas.bucket_permission.BucketPermission + Schema populated with the values from the database model. + """ + return BucketPermission( + uid=uid if uid else permission.grantee.uid, + bucket_name=permission.bucket_name, + from_timestamp=permission.from_, + to_timestamp=permission.to, + file_prefix=permission.file_prefix, + permission=permission.permissions, + ) + + def to_hash(self, user_id: str) -> str: + """ + Combine the bucket name and user id and produce the MD5 hash of it. + + Parameters + ---------- + user_id : str + The unique and unchanging user id + + Returns + ------- + hash : str + The resulting MD5 hash. + """ + str_for_id_hash = self.bucket_name + user_id + return hashlib.md5(str_for_id_hash.encode("utf-8")).hexdigest() + + def map_to_bucket_policy_statement(self, user_id: str) -> list[dict[str, Any]]: + """ + Create a bucket policy statement from the schema and the user_id.\n + The Sid is unique for every bucket and user combination. + + Parameters + ---------- + user_id : str + The unique and unchanging user id belonging to this permission. + + Returns + ------- + statements : list[dict[str, Any]] + Bucket and object permission statements. + """ + obj_policy: dict[str, Any] = { + "Sid": self.to_hash(user_id), + "Effect": "Allow", + "Principal": {"AWS": f"arn:aws:iam:::user/{self.uid}"}, + "Resource": f"arn:aws:s3:::{self.bucket_name}/{'' if self.file_prefix is None else self.file_prefix}*", + "Action": [], + "Condition": {}, + } + bucket_policy: dict[str, Any] = { + "Sid": self.to_hash(user_id), + "Effect": "Allow", + "Principal": {"AWS": f"arn:aws:iam:::user/{self.uid}"}, + "Resource": f"arn:aws:s3:::{self.bucket_name}", + "Action": [], + "Condition": {}, + } + if self.permission == PermissionEnum.READ or self.permission == PermissionEnum.READWRITE: + bucket_policy["Action"] += ["s3:ListBucket"] + obj_policy["Action"] += ["s3:GetObject"] + if self.permission == PermissionEnum.WRITE or self.permission == PermissionEnum.READWRITE: + obj_policy["Action"] += ["s3:DeleteObject", "s3:PutObject"] + if self.to_timestamp is not None: + obj_policy["Condition"]["DateLessThan"] = { + "aws:CurrentTime": self.to_timestamp.strftime("%Y-%m-%dT%H:%M:%SZ") + } + bucket_policy["Condition"]["DateLessThan"] = obj_policy["Condition"]["DateLessThan"] + if self.from_timestamp is not None: + obj_policy["Condition"]["DateGreaterThan"] = { + "aws:CurrentTime": self.from_timestamp.strftime("%Y-%m-%dT%H:%M:%SZ") + } + bucket_policy["Condition"]["DateGreaterThan"] = obj_policy["Condition"]["DateGreaterThan"] + if self.file_prefix is not None: + bucket_policy["Condition"]["StringLike"] = {"s3:prefix": self.file_prefix + "*"} + if len(bucket_policy["Condition"]) == 0: + del bucket_policy["Condition"] + if len(obj_policy["Condition"]) == 0: + del obj_policy["Condition"] + return [obj_policy] if self.permission == PermissionEnum.WRITE else [obj_policy, bucket_policy] diff --git a/app/schemas/security.py b/app/schemas/security.py new file mode 100644 index 0000000000000000000000000000000000000000..e2328cb056603a3f1c7f955f1b5dfea8bbd62da5 --- /dev/null +++ b/app/schemas/security.py @@ -0,0 +1,20 @@ +from datetime import datetime + +from pydantic import BaseModel, Field + + +class JWTToken(BaseModel): + """ + Schema for a JWT. Only for convenience + """ + + exp: datetime + sub: str + + +class ErrorDetail(BaseModel): + """ + Schema for a error due to a rejected request. + """ + + detail: str = Field(..., description="Detail about the occurred error") diff --git a/app/schemas/user.py b/app/schemas/user.py new file mode 100644 index 0000000000000000000000000000000000000000..950f3d73d42429db4c7bf8d30385787ed79a02e8 --- /dev/null +++ b/app/schemas/user.py @@ -0,0 +1,37 @@ +from pydantic import BaseModel, Field + + +class User(BaseModel): + """ + Schema for a user. + """ + + uid: str = Field( + ..., + description="ID of the user", + example="28c5353b8bb34984a8bd4169ba94c606", + max_length=64, + ) + display_name: str = Field( + ..., + description="Full Name of the user", + example="Bilbo Baggins", + max_length=256, + ) + + class Config: + orm_mode = True + + +class S3Key(BaseModel): + """ + Schema for a S3 key associated with a user. + """ + + user: str = Field(..., description="UID of the user of that access key", example="28c5353b8bb34984a8bd4169ba94c606") + access_key: str = Field(..., description="ID of the S3 access key", example="CRJ6B037V2ZT4U3W17VC") + secret_key: str = Field( + ..., + description="Secret of the S3 access key", + example="2F5uNTI1qvt4oAroXV0wWct8rWclL2QvFXKqSqjS", + ) diff --git a/app/tests/__init__.py b/app/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/app/tests/api/__init__.py b/app/tests/api/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/app/tests/api/test_bucket_permissions.py b/app/tests/api/test_bucket_permissions.py new file mode 100644 index 0000000000000000000000000000000000000000..261fa86cdb93ff06daab2c66bf8e3fe12cff493d --- /dev/null +++ b/app/tests/api/test_bucket_permissions.py @@ -0,0 +1,615 @@ +import json +from datetime import datetime, timedelta + +import pytest +from fastapi import status +from httpx import AsyncClient +from sqlalchemy.ext.asyncio import AsyncSession + +from app.models.bucket import Bucket +from app.models.bucket_permission import PermissionEnum +from app.models.user import User +from app.schemas.bucket_permission import BucketPermission as BucketPermissionSchema +from app.schemas.bucket_permission import BucketPermissionParameters as BucketPermissionParametersSchema +from app.tests.utils.bucket import add_permission_for_bucket +from app.tests.utils.user import get_authorization_headers +from app.tests.utils.utils import json_datetime_converter + + +class _TestBucketPermissionRoutes: + base_path = "/permissions/" + + +class TestBucketPermissionRoutesGet(_TestBucketPermissionRoutes): + @pytest.mark.asyncio + async def test_get_valid_bucket_permission( + self, + client: AsyncClient, + user_token_headers: dict[str, str], + random_bucket_permission_schema: BucketPermissionSchema, + ) -> None: + """ + Test for getting a valid bucket permission. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + user_token_headers : dict[str,str] + HTTP Headers to authorize the request. pytest fixture. + random_bucket_permission_schema : app.schemas.bucket_permission.BucketPermission + Bucket permission for a random bucket for testing. pytest fixture. + """ + response = await client.get( + f"{self.base_path}bucket/{random_bucket_permission_schema.bucket_name}/user/{random_bucket_permission_schema.uid}", # noqa:E501 + headers=user_token_headers, + ) + assert response.status_code == status.HTTP_200_OK + + permission = response.json() + + assert permission + assert permission["uid"] == random_bucket_permission_schema.uid + assert permission["bucket_name"] == random_bucket_permission_schema.bucket_name + + @pytest.mark.asyncio + async def test_get_bucket_permission_for_unknown_user( + self, + client: AsyncClient, + user_token_headers: dict[str, str], + random_bucket_permission_schema: BucketPermissionSchema, + ) -> None: + """ + Test for getting a bucket permission for an unknown user. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + user_token_headers : dict[str,str] + HTTP Headers to authorize the request. pytest fixture. + random_bucket_permission_schema : app.schemas.bucket_permission.BucketPermission + Bucket permission for a random bucket for testing. pytest fixture. + """ + response = await client.get( + f"{self.base_path}bucket/{random_bucket_permission_schema.bucket_name}/user/ImpossibleUser", + headers=user_token_headers, + ) + assert response.status_code == status.HTTP_404_NOT_FOUND + + @pytest.mark.asyncio + async def test_get_unknown_bucket_permission( + self, client: AsyncClient, user_token_headers: dict[str, str], random_bucket: Bucket, random_second_user: User + ) -> None: + """ + Test for getting a bucket permission for an unknown user/bucket combination. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + user_token_headers : dict[str,str] + HTTP Headers to authorize the request. pytest fixture. + random_bucket : app.models.bucket.Bucket + Random bucket for testing. pytest fixture. + random_second_user : app.models.user.User + Random second user for testing. pytest fixture. + """ + response = await client.get( + f"{self.base_path}bucket/{random_bucket.name}/user/{random_second_user.uid}", + headers=user_token_headers, + ) + assert response.status_code == status.HTTP_404_NOT_FOUND + + @pytest.mark.asyncio + async def test_get_foreign_bucket_permission_with_permission( + self, client: AsyncClient, random_bucket_permission_schema: BucketPermissionSchema, random_second_user: User + ) -> None: + """ + Test for getting a bucket permission for a foreign bucket with READ permission for that bucket. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_bucket_permission_schema : app.schemas.bucket_permission.BucketPermission + Bucket permission for a random bucket for testing. pytest fixture. + random_second_user : app.models.user.User + Random second user for testing. pytest fixture. + """ + user_token_headers = get_authorization_headers(random_second_user.uid) + response = await client.get( + f"{self.base_path}bucket/{random_bucket_permission_schema.bucket_name}/user/{random_second_user.uid}", + headers=user_token_headers, + ) + assert response.status_code == status.HTTP_200_OK + permission = response.json() + + assert permission + assert permission["uid"] == random_bucket_permission_schema.uid + assert permission["bucket_name"] == random_bucket_permission_schema.bucket_name + + @pytest.mark.asyncio + async def test_get_wrong_bucket_permission_with_permission( + self, + db: AsyncSession, + client: AsyncClient, + random_bucket_permission_schema: BucketPermissionSchema, + random_third_user: User, + ) -> None: + """ + Test for getting a bucket permission as a grantee for another grantee for the bucket. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. pytest fixture. + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_bucket_permission_schema : app.schemas.bucket_permission.BucketPermission + Bucket permission for a random bucket for testing. pytest fixture. + random_third_user : app.models.user.User + Random third user who has no permissions for the bucket. pytest fixture. + """ + await add_permission_for_bucket(db, random_bucket_permission_schema.bucket_name, random_third_user.uid) + user_token_headers = get_authorization_headers(random_third_user.uid) + response = await client.get( + f"{self.base_path}bucket/{random_bucket_permission_schema.bucket_name}/user/{random_bucket_permission_schema.uid}", # noqa:E501 + headers=user_token_headers, + ) + assert response.status_code == status.HTTP_403_FORBIDDEN + + @pytest.mark.asyncio + async def test_get_bucket_permissions_for_user( + self, client: AsyncClient, random_second_user: User, random_bucket_permission_schema: BucketPermissionSchema + ) -> None: + """ + Test for getting all bucket permission for a user. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_second_user : app.models.user.User + Random second user for testing. pytest fixture. + random_bucket_permission_schema : app.schemas.bucket_permission.BucketPermission + Bucket permission for a random bucket for testing. pytest fixture. + """ + user_token_headers = get_authorization_headers(random_second_user.uid) + response = await client.get( + f"{self.base_path}user/{random_bucket_permission_schema.uid}", + headers=user_token_headers, + ) + assert response.status_code == status.HTTP_200_OK + permission_list = response.json() + assert isinstance(permission_list, list) + assert len(permission_list) == 1 + permission = permission_list[0] + assert permission["uid"] == random_bucket_permission_schema.uid + assert permission["bucket_name"] == random_bucket_permission_schema.bucket_name + + @pytest.mark.asyncio + async def test_get_bucket_permissions_for_bucket( + self, + client: AsyncClient, + user_token_headers: dict[str, str], + random_bucket_permission_schema: BucketPermissionSchema, + ) -> None: + """ + Test for getting all bucket permission for a bucket. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + user_token_headers : dict[str,str] + HTTP Headers to authorize the request. pytest fixture. + random_bucket_permission_schema : app.schemas.bucket_permission.BucketPermission + Bucket permission for a random bucket for testing. pytest fixture. + """ + response = await client.get( + f"{self.base_path}bucket/{random_bucket_permission_schema.bucket_name}", + headers=user_token_headers, + ) + assert response.status_code == status.HTTP_200_OK + permission_list = response.json() + assert isinstance(permission_list, list) + assert len(permission_list) == 1 + permission = permission_list[0] + assert permission["uid"] == random_bucket_permission_schema.uid + assert permission["bucket_name"] == random_bucket_permission_schema.bucket_name + + @pytest.mark.asyncio + async def test_get_bucket_permissions_for_foreign_bucket( + self, client: AsyncClient, random_second_user: User, random_bucket_permission_schema: BucketPermissionSchema + ) -> None: + """ + Test for getting all bucket permissions for a foreign bucket with READ permission for that bucket. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_second_user : app.models.user.User + Random second user for testing. pytest fixture. + random_bucket_permission_schema : app.schemas.bucket_permission.BucketPermission + Bucket permission for a random bucket for testing. pytest fixture. + """ + user_token_headers = get_authorization_headers(random_second_user.uid) + response = await client.get( + f"{self.base_path}bucket/{random_bucket_permission_schema.bucket_name}", + headers=user_token_headers, + ) + assert response.status_code == status.HTTP_403_FORBIDDEN + + +class TestBucketPermissionRoutesCreate(_TestBucketPermissionRoutes): + @pytest.mark.asyncio + async def test_create_bucket_permissions_for_unknown_user( + self, client: AsyncClient, user_token_headers: dict[str, str], random_bucket: Bucket + ) -> None: + """ + Test for creating a bucket permission for an unknown user. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + user_token_headers : dict[str,str] + HTTP Headers to authorize the request. pytest fixture. + random_bucket : app.models.bucket.Bucket + Random bucket for testing. pytest fixture. + """ + permission = BucketPermissionSchema(bucket_name=random_bucket.name, uid="ImpossibleUser") + response = await client.post(self.base_path, headers=user_token_headers, json=permission.dict()) + assert response.status_code == status.HTTP_404_NOT_FOUND + + @pytest.mark.asyncio + async def test_create_bucket_permissions_for_owner( + self, client: AsyncClient, user_token_headers: dict[str, str], random_user: User, random_bucket: Bucket + ) -> None: + """ + Test for creating a bucket permission for the owner of the bucket. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + user_token_headers : dict[str,str] + HTTP Headers to authorize the request. pytest fixture. + random_user : app.models.user.User + Random user for testing who is owner of the bucket. pytest fixture. + random_bucket : app.models.bucket.Bucket + Random bucket for testing. pytest fixture. + """ + permission = BucketPermissionSchema(bucket_name=random_bucket.name, uid=random_user.uid) + response = await client.post(self.base_path, headers=user_token_headers, json=permission.dict()) + assert response.status_code == status.HTTP_400_BAD_REQUEST + + @pytest.mark.asyncio + async def test_create_duplicate_bucket_permissions( + self, + client: AsyncClient, + user_token_headers: dict[str, str], + random_bucket_permission_schema: BucketPermissionSchema, + ) -> None: + """ + Test for creating a duplicated bucket permission. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + user_token_headers : dict[str,str] + HTTP Headers to authorize the request. pytest fixture. + random_bucket_permission_schema : app.schemas.bucket_permission.BucketPermission + Bucket permission for a random bucket for testing. pytest fixture. + """ + permission = BucketPermissionSchema( + bucket_name=random_bucket_permission_schema.bucket_name, uid=random_bucket_permission_schema.uid + ) + response = await client.post(self.base_path, headers=user_token_headers, json=permission.dict()) + assert response.status_code == status.HTTP_400_BAD_REQUEST + + @pytest.mark.asyncio + async def test_create_valid_bucket_permissions( + self, client: AsyncClient, user_token_headers: dict[str, str], random_second_user: User, random_bucket: Bucket + ) -> None: + """ + Test for creating a valid bucket permission. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + user_token_headers : dict[str,str] + HTTP Headers to authorize the request. pytest fixture. + random_second_user : app.models.user.User + Random second user for testing. pytest fixture. + random_bucket : app.models.bucket.Bucket + Random bucket for testing. pytest fixture. + """ + permission = BucketPermissionSchema(bucket_name=random_bucket.name, uid=random_second_user.uid) + + response = await client.post(self.base_path, headers=user_token_headers, json=permission.dict()) + assert response.status_code == status.HTTP_201_CREATED + created_permission = response.json() + assert created_permission["uid"] == random_second_user.uid + assert created_permission["bucket_name"] == random_bucket.name + + +class TestBucketPermissionRoutesDelete(_TestBucketPermissionRoutes): + @pytest.mark.asyncio + async def test_delete_bucket_permission_from_owner( + self, + client: AsyncClient, + user_token_headers: dict[str, str], + random_bucket_permission_schema: BucketPermissionSchema, + ) -> None: + """ + Test for deleting a valid bucket permission as the owner of the bucket. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + user_token_headers : dict[str,str] + HTTP Headers to authorize the request. pytest fixture. + random_bucket_permission_schema : app.schemas.bucket_permission.BucketPermission + Bucket permission for a random bucket for testing. pytest fixture. + """ + response = await client.delete( + f"{self.base_path}bucket/{random_bucket_permission_schema.bucket_name}/user/{random_bucket_permission_schema.uid}", # noqa:E501 + headers=user_token_headers, + ) + assert response.status_code == status.HTTP_204_NO_CONTENT + + @pytest.mark.asyncio + async def test_delete_foreign_bucket_permission_with_permission( + self, client: AsyncClient, random_second_user: User, random_bucket_permission_schema: BucketPermissionSchema + ) -> None: + """ + Test for deleting a bucket permission as a grantee. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_second_user : app.models.user.User + Random second user for testing. pytest fixture. + random_bucket_permission_schema : app.schemas.bucket_permission.BucketPermission + Bucket permission for a random bucket for testing. pytest fixture. + """ + user_token_headers = get_authorization_headers(random_second_user.uid) + response = await client.get( + f"{self.base_path}bucket/{random_bucket_permission_schema.bucket_name}/user/{random_bucket_permission_schema.uid}", # noqa:E501 + headers=user_token_headers, + ) + assert response.status_code == status.HTTP_200_OK + + @pytest.mark.asyncio + async def test_delete_bucket_permission_with_unknown_user( + self, + client: AsyncClient, + user_token_headers: dict[str, str], + random_bucket_permission_schema: BucketPermissionSchema, + ) -> None: + """ + Test for deleting a bucket permission as the grantee of the permission. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_bucket_permission_schema : app.schemas.bucket_permission.BucketPermission + Bucket permission for a random bucket for testing. pytest fixture. + """ + response = await client.delete( + f"{self.base_path}bucket/{random_bucket_permission_schema.bucket_name}/user/ImpossibleUser", + headers=user_token_headers, + ) + assert response.status_code == status.HTTP_404_NOT_FOUND + + @pytest.mark.asyncio + async def test_delete_bucket_permission_without_permission( + self, client: AsyncClient, user_token_headers: dict[str, str], random_bucket: Bucket, random_second_user: User + ) -> None: + """ + Test for deleting a bucket permission with an unknown bucket/user combination. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_bucket : app.models.bucket.Bucket + Random bucket for testing. pytest fixture. + random_second_user : app.models.user.User + Random second user for testing. pytest fixture. + """ + response = await client.delete( + f"{self.base_path}bucket/{random_bucket.name}/user/{random_second_user.uid}", + headers=user_token_headers, + ) + assert response.status_code == status.HTTP_404_NOT_FOUND + + @pytest.mark.asyncio + async def test_delete_wrong_bucket_permission_with_permission( + self, + db: AsyncSession, + client: AsyncClient, + random_bucket_permission_schema: BucketPermissionSchema, + random_third_user: User, + ) -> None: + """ + Test for deleting a bucket permission as a grantee for another grantee for the bucket. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. pytest fixture. + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_bucket_permission_schema : app.schemas.bucket_permission.BucketPermission + Bucket permission for a random bucket for testing. pytest fixture. + random_third_user : app.models.user.User + Random third user who has no permissions for the bucket. pytest fixture. + """ + await add_permission_for_bucket(db, random_bucket_permission_schema.bucket_name, random_third_user.uid) + user_token_headers = get_authorization_headers(random_third_user.uid) + response = await client.delete( + f"{self.base_path}bucket/{random_bucket_permission_schema.bucket_name}/user/{random_bucket_permission_schema.uid}", # noqa:E501 + headers=user_token_headers, + ) + assert response.status_code == status.HTTP_403_FORBIDDEN + + +class TestBucketPermissionRoutesUpdate(_TestBucketPermissionRoutes): + @pytest.mark.asyncio + async def test_update_valid_bucket_permission( + self, + client: AsyncClient, + user_token_headers: dict[str, str], + random_bucket_permission_schema: BucketPermissionSchema, + ) -> None: + """ + Test for updating a bucket permission as the owner of the bucket. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_bucket_permission_schema : app.schemas.bucket_permission.BucketPermission + Bucket permission for a random bucket for testing. pytest fixture. + """ + new_from_time = datetime(2022, 1, 1, 0, 0) + new_params = BucketPermissionParametersSchema( + from_timestamp=new_from_time, + to_timestamp=new_from_time + timedelta(days=1), + permission=PermissionEnum.READWRITE, + file_prefix="pseudo/folder/", + ) + response = await client.put( + f"{self.base_path}bucket/{random_bucket_permission_schema.bucket_name}/user/{random_bucket_permission_schema.uid}", # noqa:E501 + headers=user_token_headers, + content=json.dumps(new_params.dict(), default=json_datetime_converter), + ) + assert response.status_code == status.HTTP_200_OK + updated_permission = response.json() + assert updated_permission["uid"] == random_bucket_permission_schema.uid + assert updated_permission["bucket_name"] == random_bucket_permission_schema.bucket_name + if new_params.from_timestamp is not None and new_params.to_timestamp is not None: + assert updated_permission["from_timestamp"] == new_params.from_timestamp.strftime("%Y-%m-%dT%H:%M:%S") + assert updated_permission["to_timestamp"] == new_params.to_timestamp.strftime("%Y-%m-%dT%H:%M:%S") + assert updated_permission["permission"] == new_params.permission + assert updated_permission["file_prefix"] == new_params.file_prefix + + @pytest.mark.asyncio + async def test_update_unknown_bucket_permission( + self, + client: AsyncClient, + user_token_headers: dict[str, str], + random_bucket_permission_schema: BucketPermissionSchema, + ) -> None: + """ + Test for updating a bucket permission with an unknown user. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_bucket_permission_schema : app.schemas.bucket_permission.BucketPermission + Bucket permission for a random bucket for testing. pytest fixture. + """ + new_params = BucketPermissionParametersSchema( + permission=PermissionEnum.READWRITE, + file_prefix="pseudo/folder/", + ) + response = await client.put( + f"{self.base_path}bucket/{random_bucket_permission_schema.bucket_name}/user/impossibleUser", + headers=user_token_headers, + json=new_params.dict(), + ) + assert response.status_code == status.HTTP_404_NOT_FOUND + + @pytest.mark.asyncio + async def test_update_bucket_permission_without_permission( + self, client: AsyncClient, user_token_headers: dict[str, str], random_bucket: Bucket, random_second_user: User + ) -> None: + """ + Test for updating a non-existing bucket permission with a valid user. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_bucket : app.models.bucket.Bucket + Random bucket for testing. pytest fixture. + random_second_user : app.models.user.User + Random second user for testing. pytest fixture. + """ + new_params = BucketPermissionParametersSchema( + permission=PermissionEnum.READWRITE, + file_prefix="pseudo/folder/", + ) + response = await client.put( + f"{self.base_path}bucket/{random_bucket.name}/user/{random_second_user.uid}", + headers=user_token_headers, + json=new_params.dict(), + ) + assert response.status_code == status.HTTP_404_NOT_FOUND + + @pytest.mark.asyncio + async def test_update_foreign_bucket_permission_with_permission( + self, client: AsyncClient, random_bucket_permission_schema: BucketPermissionSchema, random_second_user: User + ) -> None: + """ + Test for updating a bucket permission as the grantee of the permission. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_bucket_permission_schema : app.schemas.bucket_permission.BucketPermission + Bucket permission for a random bucket for testing. pytest fixture. + random_second_user : app.models.user.User + Random second user for testing. pytest fixture. + """ + user_token_headers = get_authorization_headers(random_second_user.uid) + new_params = BucketPermissionParametersSchema( + permission=PermissionEnum.READWRITE, + file_prefix="pseudo/folder/", + ) + response = await client.put( + f"{self.base_path}bucket/{random_bucket_permission_schema.bucket_name}/user/{random_second_user.uid}", + headers=user_token_headers, + json=new_params.dict(), + ) + assert response.status_code == status.HTTP_403_FORBIDDEN + + @pytest.mark.asyncio + async def test_update_foreign_bucket_permission_without_permission( + self, client: AsyncClient, random_bucket_permission_schema: BucketPermissionSchema, random_third_user: User + ) -> None: + """ + Test for updating a bucket permission as an unrelated third user. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_bucket_permission_schema : app.schemas.bucket_permission.BucketPermission + Bucket permission for a random bucket for testing. pytest fixture. + random_third_user : app.models.user.User + Random second user for testing. pytest fixture. + """ + user_token_headers = get_authorization_headers(random_third_user.uid) + new_params = BucketPermissionParametersSchema( + permission=PermissionEnum.READWRITE, + file_prefix="pseudo/folder/", + ) + response = await client.put( + f"{self.base_path}bucket/{random_bucket_permission_schema.bucket_name}/user/{random_bucket_permission_schema.uid}", # noqa:E501 + headers=user_token_headers, + json=new_params.dict(), + ) + assert response.status_code == status.HTTP_403_FORBIDDEN diff --git a/app/tests/api/test_buckets.py b/app/tests/api/test_buckets.py new file mode 100644 index 0000000000000000000000000000000000000000..887aec5bef667e14bd6c6ed2915f2896035bd091 --- /dev/null +++ b/app/tests/api/test_buckets.py @@ -0,0 +1,295 @@ +import pytest +from fastapi import status +from httpx import AsyncClient +from sqlalchemy.ext.asyncio import AsyncSession + +from app.crud.crud_bucket import CRUDBucket +from app.models.bucket import Bucket +from app.models.bucket_permission import PermissionEnum +from app.models.user import User +from app.schemas.bucket import BucketIn +from app.tests.mocks.mock_s3_resource import MockS3ServiceResource +from app.tests.utils.bucket import add_permission_for_bucket +from app.tests.utils.user import get_authorization_headers +from app.tests.utils.utils import random_lower_string + + +class _TestBucketRoutes: + base_path = "/buckets/" + + +class TestBucketRoutesGet(_TestBucketRoutes): + @pytest.mark.asyncio + async def test_get_own_buckets( + self, + client: AsyncClient, + random_bucket: Bucket, + user_token_headers: dict[str, str], + ) -> None: + """ + Test for getting the buckets where the user is the owner. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_bucket : app.models.bucket.Bucket + Random bucket for testing. pytest fixture. + user_token_headers : dict[str,str] + HTTP Headers to authorize the request. pytest fixture. + """ + response = await client.get(f"{self.base_path}", headers=user_token_headers) + assert response.status_code == status.HTTP_200_OK + + buckets = response.json() + + assert buckets + assert len(buckets) == 1 + bucket = buckets[0] + + assert bucket["name"] == random_bucket.name + assert bucket["owner"] == random_bucket.owner.uid + + @pytest.mark.asyncio + async def test_get_bucket_by_name( + self, + client: AsyncClient, + random_bucket: Bucket, + user_token_headers: dict[str, str], + ) -> None: + """ + Test for getting a bucket by its name where the user is the owner of the bucket. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_bucket : app.models.bucket.Bucket + Random bucket for testing. pytest fixture. + user_token_headers : dict[str,str] + HTTP Headers to authorize the request. pytest fixture. + """ + response = await client.get(f"{self.base_path}{random_bucket.name}", headers=user_token_headers) + assert response.status_code == status.HTTP_200_OK + + bucket = response.json() + + assert bucket["name"] == random_bucket.name + assert bucket["owner"] == random_bucket.owner.uid + + @pytest.mark.asyncio + async def test_get_unknown_bucket(self, client: AsyncClient, user_token_headers: dict[str, str]) -> None: + """ + Test for getting an unknown bucket by its name. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + user_token_headers : dict[str,str] + HTTP Headers to authorize the request. pytest fixture. + """ + response = await client.get(f"{self.base_path}impossible_bucket_name", headers=user_token_headers) + assert response.status_code == status.HTTP_404_NOT_FOUND + + @pytest.mark.asyncio + async def test_get_foreign_bucket( + self, client: AsyncClient, random_bucket: Bucket, random_second_user: User + ) -> None: + """ + Test for getting a foreign bucket with permission by its name. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_bucket : app.models.bucket.Bucket + Random bucket for testing. pytest fixture. + random_second_user : app.models.user.User + Random user which is not the owner of the bucket. pytest fixture. + """ + user_token_headers = get_authorization_headers(random_second_user.uid) + response = await client.get(f"{self.base_path}{random_bucket.name}", headers=user_token_headers) + assert response.status_code == status.HTTP_403_FORBIDDEN + + +class TestBucketRoutesCreate(_TestBucketRoutes): + @pytest.mark.asyncio + async def test_create_bucket( + self, + db: AsyncSession, + client: AsyncClient, + random_user: User, + user_token_headers: dict[str, str], + ) -> None: + """ + Test for creating a bucket. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. pytest fixture. + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_user : app.models.user.User + Random user for testing. pytest fixture. + user_token_headers : dict[str,str] + HTTP Headers to authorize the request. pytest fixture. + """ + bucket_info = BucketIn(name=random_lower_string(), description=random_lower_string(127)) + response = await client.post(f"{self.base_path}", headers=user_token_headers, json=bucket_info.dict()) + + assert response.status_code == status.HTTP_201_CREATED + bucket = response.json() + assert bucket + assert bucket["name"] == bucket_info.name + assert bucket["owner"] == random_user.uid + + dbBucket = await CRUDBucket.get(db, bucket_info.name) + assert dbBucket + assert dbBucket.name == bucket_info.name + assert dbBucket.owner_id == random_user.uid + + await CRUDBucket.delete(db, dbBucket) + + @pytest.mark.asyncio + async def test_create_duplicated_bucket( + self, + client: AsyncClient, + random_bucket: Bucket, + user_token_headers: dict[str, str], + ) -> None: + """ + Test for creating a bucket where the name is already taken. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_bucket : app.models.bucket.Bucket + Random bucket for testing. pytest fixture. + user_token_headers : dict[str,str] + HTTP Headers to authorize the request. pytest fixture. + """ + bucket_info = BucketIn(name=random_bucket.name, description=random_lower_string(127)) + response = await client.post(f"{self.base_path}", headers=user_token_headers, json=bucket_info.dict()) + + assert response.status_code == status.HTTP_400_BAD_REQUEST + + +class TestBucketRoutesDelete(_TestBucketRoutes): + @pytest.mark.asyncio + async def test_delete_empty_bucket( + self, + client: AsyncClient, + random_bucket: Bucket, + user_token_headers: dict[str, str], + ) -> None: + """ + Test for deleting an empty bucket. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_bucket : app.models.bucket.Bucket + Random bucket for testing. pytest fixture. + user_token_headers : dict[str,str] + HTTP Headers to authorize the request. pytest fixture. + """ + response = await client.delete( + f"{self.base_path}{random_bucket.name}", headers=user_token_headers, params={"force_delete": False} + ) + + assert response.status_code == status.HTTP_204_NO_CONTENT + + @pytest.mark.asyncio + async def test_delete_foreign_bucket_with_permission( + self, client: AsyncClient, db: AsyncSession, random_user: User, random_second_user: User + ) -> None: + """ + Test for deleting a foreign bucket. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_user : app.models.user.User + Random user for testing. pytest fixture. + random_second_user : app.models.user.User + Random user which is not the owner of the bucket. pytest fixture. + """ + bucket = Bucket( + name=random_lower_string(), + description=random_lower_string(127), + owner_id=random_second_user.uid, + ) + db.add(bucket) + await db.commit() + await add_permission_for_bucket(db, bucket.name, random_user.uid, permission=PermissionEnum.READWRITE) + + user_token_headers = get_authorization_headers(random_user.uid) + response = await client.delete( + f"{self.base_path}{bucket.name}", headers=user_token_headers, params={"force_delete": False} + ) + + assert response.status_code == status.HTTP_403_FORBIDDEN + + await db.delete(bucket) + + @pytest.mark.asyncio + async def test_delete_non_empty_bucket( + self, + client: AsyncClient, + random_bucket: Bucket, + user_token_headers: dict[str, str], + mock_s3_service: MockS3ServiceResource, + ) -> None: + """ + Test for deleting a non-empty bucket. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_bucket : app.models.bucket.Bucket + Random bucket for testing. pytest fixture. + user_token_headers : dict[str,str] + HTTP Headers to authorize the request. pytest fixture. + mock_s3_service : app.tests.mocks.mock_s3_resource.MockS3ServiceResource + Mock S3 Service to manipulate objects. pytest fixture. + """ + mock_s3_service.create_object_in_bucket(bucket_name=random_bucket.name, key=random_lower_string()) + response = await client.delete( + f"{self.base_path}{random_bucket.name}", headers=user_token_headers, params={"force_delete": False} + ) + + assert response.status_code == status.HTTP_400_BAD_REQUEST + + @pytest.mark.asyncio + async def test_force_delete_non_empty_bucket( + self, + client: AsyncClient, + random_bucket: Bucket, + user_token_headers: dict[str, str], + mock_s3_service: MockS3ServiceResource, + ) -> None: + """ + Test for force deleting a non-empty bucket. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_bucket : app.models.bucket.Bucket + Random bucket for testing. pytest fixture. + user_token_headers : dict[str,str] + HTTP Headers to authorize the request. pytest fixture. + mock_s3_service : app.tests.mocks.mock_s3_resource.MockS3ServiceResource + Mock S3 Service to manipulate objects. pytest fixture. + """ + mock_s3_service.create_object_in_bucket(bucket_name=random_bucket.name, key=random_lower_string()) + response = await client.delete( + f"{self.base_path}{random_bucket.name}", headers=user_token_headers, params={"force_delete": True} + ) + assert response.status_code == status.HTTP_204_NO_CONTENT diff --git a/app/tests/api/test_login.py b/app/tests/api/test_login.py new file mode 100644 index 0000000000000000000000000000000000000000..6b7532a759d85b3972a3475e6288135747d45eb7 --- /dev/null +++ b/app/tests/api/test_login.py @@ -0,0 +1,116 @@ +from urllib.parse import parse_qs, urlparse + +import pytest +from fastapi import status +from httpx import AsyncClient +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select + +from app.core.config import settings +from app.core.security import decode_token +from app.models.user import User +from app.tests.mocks.mock_rgw_admin import MockRGWAdmin +from app.tests.utils.utils import random_lower_string + + +class TestLoginRoute: + login_path: str = "/auth/" + + @pytest.mark.asyncio + async def test_login_redirect(self, client: AsyncClient) -> None: + """ + Test for the query parameter on the login redirect route. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + """ + r = await client.get(self.login_path + "login", follow_redirects=False) + query = parse_qs(urlparse(r.headers["location"], scheme="http").query) + assert r.status_code == status.HTTP_302_FOUND + assert "code" in query["response_type"] + assert settings.OIDC_CLIENT_ID in query["client_id"] + assert "openid" in query["scope"][0].split(" ") + assert query["state"] + + @pytest.mark.asyncio + async def test_successful_login_with_existing_user(self, client: AsyncClient, random_user: User) -> None: + """ + Test for login callback route with an existing user. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_user : app.models.user.User + Random user for testing. pytest fixture. + """ + r = await client.get( + self.login_path + "callback", + params={ + "voperson_id": random_user.uid, + "name": random_user.display_name, + }, + follow_redirects=False, + ) + assert r.status_code == status.HTTP_302_FOUND + assert "set-cookie" in r.headers.keys() + cookie_header = r.headers["set-cookie"] + right_header = None + for t in cookie_header.split(";"): + if t.startswith("bearer"): + right_header = t + break + assert right_header + claim = decode_token(right_header.split("=")[1]) + assert claim["sub"] == random_user.uid + + @pytest.mark.asyncio + async def test_successful_login_with_non_existing_user( + self, client: AsyncClient, mock_rgw_admin: MockRGWAdmin, db: AsyncSession + ) -> None: + """ + Test for login callback route with a non-existing user. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + mock_rgw_admin : app.tests.mocks.mock_rgw_admin.MockRGWAdmin + Mock RGW admin for Ceph. pytest fixture. + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. pytest fixture. + """ + uid = random_lower_string() + display_name = f"{random_lower_string(8)} {random_lower_string(8)}" + r = await client.get( + self.login_path + "callback", + params={"voperson_id": uid, "name": display_name}, + follow_redirects=False, + ) + # Check response and valid/right jwt token + assert r.status_code == status.HTTP_302_FOUND + assert "set-cookie" in r.headers.keys() + cookie_header = r.headers["set-cookie"] + right_header = None + for t in cookie_header.split(";"): + if t.startswith("bearer"): + right_header = t + break + assert right_header + claim = decode_token(right_header.split("=")[1]) + assert claim["sub"] == uid + + # Check that user is created in RGW + assert mock_rgw_admin.get_user(uid)["keys"][0]["user"] == uid + + # Check that user is created in DB + db_user = (await db.execute(select(User).where(User.uid == uid))).scalar() + assert db_user + assert db_user.uid == uid + + # Cleanup + await db.delete(db_user) + await db.commit() + mock_rgw_admin.delete_user(uid) diff --git a/app/tests/api/test_s3_keys.py b/app/tests/api/test_s3_keys.py new file mode 100644 index 0000000000000000000000000000000000000000..538063a1c22c11ecf7022146122475468cb908d9 --- /dev/null +++ b/app/tests/api/test_s3_keys.py @@ -0,0 +1,170 @@ +import pytest +from fastapi import status +from httpx import AsyncClient + +from app.models.user import User +from app.tests.mocks.mock_rgw_admin import MockRGWAdmin +from app.tests.utils.user import get_authorization_headers + + +class _TestS3KeyRoutes: + base_path = "/users/" + + +class TestS3KeyRoutesGet(_TestS3KeyRoutes): + @pytest.mark.asyncio + async def test_get_s3_keys_for_foreign_user( + self, + client: AsyncClient, + user_token_headers: dict[str, str], + random_second_user: User, + ) -> None: + """ + Test for getting the S3 keys from a foreign user. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + user_token_headers : dict[str,str] + HTTP Headers to authorize the request. pytest fixture. + random_second_user : app.models.user.User + Random foreign user for testing. pytest fixture. + """ + response = await client.get(f"{self.base_path}{random_second_user.uid}/keys", headers=user_token_headers) + + assert response.status_code == status.HTTP_403_FORBIDDEN + + @pytest.mark.asyncio + async def test_get_s3_keys_for_user(self, client: AsyncClient, random_user: User) -> None: + """ + Test for getting the S3 keys from a user. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_user : app.models.user.User + Random user for testing. pytest fixture. + """ + headers = get_authorization_headers(random_user.uid) + response = await client.get(f"{self.base_path}{random_user.uid}/keys", headers=headers) + keys = response.json() + assert response.status_code == status.HTTP_200_OK + assert isinstance(keys, list) + assert len(keys) == 1 + assert keys[0]["user"] == random_user.uid + + @pytest.mark.asyncio + async def test_get_specific_s3_key_for_user( + self, client: AsyncClient, random_user: User, mock_rgw_admin: MockRGWAdmin + ) -> None: + """ + Test for getting a specific S3 key from a user. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_user : app.models.user.User + Random user for testing. pytest fixture. + mock_rgw_admin : app.tests.mocks.mock_rgw_admin.MockRGWAdmin + Mock class for rgwadmin package. pytest fixture. + """ + headers = get_authorization_headers(random_user.uid) + s3_key = mock_rgw_admin.get_user(uid=random_user.uid)["keys"][0] + response = await client.get(f"{self.base_path}{random_user.uid}/keys/{s3_key['access_key']}", headers=headers) + response_key = response.json() + assert response.status_code == status.HTTP_200_OK + assert response_key["access_key"] == s3_key["access_key"] + assert response_key["secret_key"] == s3_key["secret_key"] + assert response_key["user"] == s3_key["user"] + + @pytest.mark.asyncio + async def test_get_unknown_s3_key_for_user(self, client: AsyncClient, random_user: User) -> None: + """ + Test for getting an unknown S3 keys from a user. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_user : app.models.user.User + Random user for testing. pytest fixture. + """ + headers = get_authorization_headers(random_user.uid) + response = await client.get(f"{self.base_path}{random_user.uid}/keys/impossible_key", headers=headers) + assert response.status_code == status.HTTP_404_NOT_FOUND + + +class TestS3KeyRoutesCreate(_TestS3KeyRoutes): + @pytest.mark.asyncio + async def test_create_s3_key_for_user( + self, client: AsyncClient, random_user: User, mock_rgw_admin: MockRGWAdmin + ) -> None: + """ + Test for getting a specific S3 key from a user. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_user : app.models.user.User + Random user for testing. pytest fixture. + mock_rgw_admin : app.tests.mocks.mock_rgw_admin.MockRGWAdmin + Mock class for rgwadmin package. pytest fixture. + """ + headers = get_authorization_headers(random_user.uid) + old_s3_key = mock_rgw_admin.get_user(uid=random_user.uid)["keys"][0] + response = await client.post(f"{self.base_path}{random_user.uid}/keys", headers=headers) + new_key = response.json() + + assert response.status_code == status.HTTP_201_CREATED + assert new_key["access_key"] != old_s3_key["access_key"] + assert new_key["user"] == random_user.uid + + mock_rgw_admin.remove_key(uid=random_user.uid, access_key=new_key["access_key"]) + + +class TestS3KeyRoutesDelete(_TestS3KeyRoutes): + @pytest.mark.asyncio + async def test_delete_s3_key_for_user( + self, client: AsyncClient, random_user: User, mock_rgw_admin: MockRGWAdmin + ) -> None: + """ + Test for deleting a specific S3 key from a user. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_user : app.models.user.User + Random user for testing. pytest fixture. + mock_rgw_admin : app.tests.mocks.mock_rgw_admin.MockRGWAdmin + Mock class for rgwadmin package. pytest fixture. + """ + headers = get_authorization_headers(random_user.uid) + new_s3_key = mock_rgw_admin.create_key(uid=random_user.uid)[-1] + assert len(mock_rgw_admin.get_user(uid=random_user.uid)["keys"]) == 2 + response = await client.delete( + f"{self.base_path}{random_user.uid}/keys/{new_s3_key['access_key']}", headers=headers + ) + + assert response.status_code == status.HTTP_204_NO_CONTENT + assert len(mock_rgw_admin.get_user(uid=random_user.uid)["keys"]) == 1 + + @pytest.mark.asyncio + async def test_delete_unknown_s3_key_for_user(self, client: AsyncClient, random_user: User) -> None: + """ + Test for deleting an unknown S3 key from a user. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_user : app.models.user.User + Random user for testing. pytest fixture. + """ + headers = get_authorization_headers(random_user.uid) + response = await client.delete(f"{self.base_path}{random_user.uid}/keys/impossible", headers=headers) + assert response.status_code == status.HTTP_404_NOT_FOUND diff --git a/app/tests/api/test_s3_objects.py b/app/tests/api/test_s3_objects.py new file mode 100644 index 0000000000000000000000000000000000000000..f7b2ee3cfe4bc42bdff47f261fd769776eb19d0a --- /dev/null +++ b/app/tests/api/test_s3_objects.py @@ -0,0 +1,196 @@ +import pytest +from fastapi import status +from httpx import AsyncClient +from sqlalchemy.ext.asyncio import AsyncSession + +from app.models.bucket import Bucket +from app.models.bucket_permission import BucketPermission, PermissionEnum +from app.models.user import User +from app.tests.mocks.mock_s3_resource import MockS3ServiceResource +from app.tests.utils.user import get_authorization_headers +from app.tests.utils.utils import random_lower_string + + +class _TestS3ObjectsRoutes: + base_path = "/buckets/" + + +class TestS3ObjectsRoutesGet(_TestS3ObjectsRoutes): + @pytest.mark.asyncio + async def test_get_objects_with_right_for_specific_prefix( + self, + db: AsyncSession, + client: AsyncClient, + random_bucket: Bucket, + random_second_user: User, + mock_s3_service: MockS3ServiceResource, + ) -> None: + """ + Test for getting the list of S3 objects in a bucket while only having rights for a specific prefix. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. pytest fixture. + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_bucket : app.models.bucket.Bucket + Random bucket for testing. pytest fixture. + random_second_user : app.models.user.User + Random second user for testing. pytest fixture. + mock_s3_service : app.tests.mocks.mock_s3_resource.MockS3ServiceResource + Mock S3 Service to manipulate objects. pytest fixture. + """ + user_token_headers = get_authorization_headers(random_second_user.uid) + mock_s3_service.create_object_in_bucket(bucket_name=random_bucket.name, key=random_lower_string()) + obj = mock_s3_service.create_object_in_bucket( + bucket_name=random_bucket.name, key="pseudo/folder/" + random_lower_string() + ) + permission = BucketPermission( + bucket_name=random_bucket.name, + user_id=random_second_user.uid, + permissions=PermissionEnum.READ, + file_prefix="pseudo/folder/", + ) + db.add(permission) + await db.commit() + response = await client.get(f"{self.base_path}{random_bucket.name}/objects", headers=user_token_headers) + assert response.status_code == status.HTTP_200_OK + response_obj_list = response.json() + assert len(response_obj_list) == 1 + assert response_obj_list[0]["key"] == obj.key + + @pytest.mark.asyncio + async def test_get_object_without_right_for_specific_prefix( + self, + db: AsyncSession, + client: AsyncClient, + random_bucket: Bucket, + random_second_user: User, + mock_s3_service: MockS3ServiceResource, + ) -> None: + """ + Test for getting a specific S3 object in a bucket while not having rights for the prefix. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. pytest fixture. + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_bucket : app.models.bucket.Bucket + Random bucket for testing. pytest fixture. + random_second_user : app.models.user.User + Random second user for testing. pytest fixture. + mock_s3_service : app.tests.mocks.mock_s3_resource.MockS3ServiceResource + Mock S3 Service to manipulate objects. pytest fixture. + """ + user_token_headers = get_authorization_headers(random_second_user.uid) + obj = mock_s3_service.create_object_in_bucket( + bucket_name=random_bucket.name, key="another/folder/" + random_lower_string() + ) + permission = BucketPermission( + bucket_name=random_bucket.name, + user_id=random_second_user.uid, + permissions=PermissionEnum.READ, + file_prefix="pseudo/folder/", + ) + db.add(permission) + await db.commit() + response = await client.get( + f"{self.base_path}{random_bucket.name}/objects/{obj.key}", headers=user_token_headers + ) + assert response.status_code == status.HTTP_403_FORBIDDEN + + @pytest.mark.asyncio + async def test_get_all_s3_object_from_bucket( + self, + client: AsyncClient, + random_bucket: Bucket, + user_token_headers: dict[str, str], + mock_s3_service: MockS3ServiceResource, + ) -> None: + """ + Test for getting the list of S3 objects in a bucket. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_bucket : app.models.bucket.Bucket + Random bucket for testing. pytest fixture. + user_token_headers : dict[str,str] + HTTP Headers to authorize the request. pytest fixture. + mock_s3_service : app.tests.mocks.mock_s3_resource.MockS3ServiceResource + Mock S3 Service to manipulate objects. pytest fixture. + """ + # Create MockS3ObjectSummary in mock service + obj = mock_s3_service.create_object_in_bucket(bucket_name=random_bucket.name, key=random_lower_string()) + + response = await client.get(f"{self.base_path}{random_bucket.name}/objects", headers=user_token_headers) + response_obj_list = response.json() + + assert response.status_code == status.HTTP_200_OK + assert isinstance(response_obj_list, list) + assert len(response_obj_list) > 0 + assert len(response_obj_list) == len(mock_s3_service.Bucket(name=random_bucket.name).get_objects()) + + response_obj = response_obj_list[0] + assert response_obj + assert response_obj["key"] == obj.key + assert response_obj["bucket"] == obj.bucket_name + + @pytest.mark.asyncio + async def test_get_unknown_s3_object( + self, client: AsyncClient, random_bucket: Bucket, user_token_headers: dict[str, str] + ) -> None: + """ + Test for getting an unknown object from a bucket. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_bucket : app.models.bucket.Bucket + Random bucket for testing. pytest fixture. + user_token_headers : dict[str,str] + HTTP Headers to authorize the request. pytest fixture. + """ + response = await client.get( + f"{self.base_path}{random_bucket.name}/objects/impossible.pdf", headers=user_token_headers + ) + + assert response.status_code == status.HTTP_404_NOT_FOUND + + @pytest.mark.asyncio + async def test_get_s3_object( + self, + client: AsyncClient, + random_bucket: Bucket, + user_token_headers: dict[str, str], + mock_s3_service: MockS3ServiceResource, + ) -> None: + """ + Test for getting a specific object from a bucket. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_bucket : app.models.bucket.Bucket + Random bucket for testing. pytest fixture. + user_token_headers : dict[str,str] + HTTP Headers to authorize the request. pytest fixture. + mock_s3_service : app.tests.mocks.mock_s3_resource.MockS3ServiceResource + Mock S3 Service to manipulate objects. pytest fixture. + """ + # Create MockS3ObjectSummary in mock service + obj = mock_s3_service.create_object_in_bucket(bucket_name=random_bucket.name, key=random_lower_string()) + response = await client.get( + f"{self.base_path}{random_bucket.name}/objects/{obj.key}", headers=user_token_headers + ) + response_obj = response.json() + assert response.status_code == status.HTTP_200_OK + assert response_obj + assert response_obj["key"] == obj.key + assert response_obj["bucket"] == obj.bucket_name diff --git a/app/tests/api/test_security.py b/app/tests/api/test_security.py new file mode 100644 index 0000000000000000000000000000000000000000..72115b29de09253932491eca10b5b80e71fd9d9f --- /dev/null +++ b/app/tests/api/test_security.py @@ -0,0 +1,85 @@ +import pytest +from fastapi import status +from httpx import AsyncClient +from sqlalchemy.ext.asyncio import AsyncSession + +from app.models.user import User + + +class TestJWTProtectedRoutes: + protected_route: str = "/users/me" + + @pytest.mark.asyncio + async def test_missing_authorization_header(self, client: AsyncClient) -> None: + """ + Test with missing authorization header on a protected route. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + """ + response = await client.get(self.protected_route) + error = response.json() + assert error + assert response.status_code == status.HTTP_403_FORBIDDEN + assert error["detail"] == "Not authenticated" + + @pytest.mark.asyncio + async def test_malformed_authorization_header(self, client: AsyncClient) -> None: + """ + Test with malformed authorization header on a protected route. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + """ + response = await client.get(self.protected_route, headers={"Authorization": "Bearer not-a-jwt-token"}) + error = response.json() + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert error + assert error["detail"] == "Malformed JWT" + + @pytest.mark.asyncio + async def test_correct_authorization_header(self, client: AsyncClient, user_token_headers: dict[str, str]) -> None: + """ + Test with correct authorization header on a protected route. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + user_token_headers : dict[str,str] + HTTP Headers to authorize the request. pytest fixture. + """ + response = await client.get(self.protected_route, headers=user_token_headers) + assert response.status_code == status.HTTP_200_OK + + @pytest.mark.asyncio + async def test_protected_route_with_deleted_user( + self, + db: AsyncSession, + client: AsyncClient, + random_user: User, + user_token_headers: dict[str, str], + ) -> None: + """ + Test with correct authorization header from a deleted user on a protected route. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. pytest fixture. + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_user : app.models.user.User + Random user for testing. pytest fixture. + user_token_headers : dict[str,str] + HTTP Headers to authorize the request. pytest fixture. + """ + await db.delete(random_user) + await db.commit() + + response = await client.get(self.protected_route, headers=user_token_headers) + assert response.status_code == status.HTTP_404_NOT_FOUND diff --git a/app/tests/api/test_users.py b/app/tests/api/test_users.py new file mode 100644 index 0000000000000000000000000000000000000000..8611742a8e1bf20bd4f43931e5a585e7aebb9e0c --- /dev/null +++ b/app/tests/api/test_users.py @@ -0,0 +1,90 @@ +import pytest +from fastapi import status +from httpx import AsyncClient + +from app.models.user import User +from app.tests.utils.user import get_authorization_headers + + +class _TestUserRoutes: + base_path = "/users/" + + +class TestUserRoutesGet(_TestUserRoutes): + @pytest.mark.asyncio + async def test_get_user_me(self, client: AsyncClient, random_user: User) -> None: + """ + Test for getting the currently logged-in user. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_user : app.models.user.User + Random user for testing. pytest fixture. + """ + headers = get_authorization_headers(random_user.uid) + response = await client.get(f"{self.base_path}me", headers=headers) + current_user = response.json() + assert response.status_code == status.HTTP_200_OK + assert current_user + assert current_user["uid"] == random_user.uid + assert current_user["display_name"] == random_user.display_name + + @pytest.mark.asyncio + async def test_get_unknown_user(self, client: AsyncClient, user_token_headers: dict[str, str]) -> None: + """ + Test for getting an unknown user by its uid. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + user_token_headers : dict[str,str] + HTTP Headers to authorize the request. pytest fixture. + """ + response = await client.get(f"{self.base_path}impossible_uid", headers=user_token_headers) + assert response.status_code == status.HTTP_404_NOT_FOUND + + @pytest.mark.asyncio + async def test_get_user_by_uid(self, client: AsyncClient, random_user: User) -> None: + """ + Test for getting a known user by its uid. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + random_user : app.models.user.User + Random user for testing. pytest fixture. + """ + headers = get_authorization_headers(random_user.uid) + response = await client.get(f"{self.base_path}{random_user.uid}", headers=headers) + current_user = response.json() + assert response.status_code == status.HTTP_200_OK + assert current_user + assert current_user["uid"] == random_user.uid + assert current_user["display_name"] == random_user.display_name + + @pytest.mark.asyncio + async def test_get_foreign_user_by_uid( + self, + client: AsyncClient, + user_token_headers: dict[str, str], + random_second_user: User, + ) -> None: + """ + Test for getting a foreign user by its uid. + + Parameters + ---------- + client : httpx.AsyncClient + HTTP Client to perform the request on. pytest fixture. + user_token_headers : dict[str,str] + HTTP Headers to authorize the request. pytest fixture. + random_second_user : app.models.user.User + Random foreign user for testing. pytest fixture. + """ + response = await client.get(f"{self.base_path}{random_second_user.uid}", headers=user_token_headers) + + assert response.status_code == status.HTTP_403_FORBIDDEN diff --git a/app/tests/conftest.py b/app/tests/conftest.py new file mode 100644 index 0000000000000000000000000000000000000000..e975fd46d243ba744ba5c9890188f32104036faf --- /dev/null +++ b/app/tests/conftest.py @@ -0,0 +1,216 @@ +import asyncio +import json +from typing import AsyncGenerator, Generator + +import pytest +import pytest_asyncio +from httpx import AsyncClient +from sqlalchemy.ext.asyncio import AsyncSession + +from app.api.dependencies import get_rgw_admin, get_s3_resource, get_userinfo_from_access_token +from app.db.session import SessionAsync as Session +from app.main import app +from app.models.bucket import Bucket +from app.models.bucket_permission import BucketPermission as BucketPermissionDB +from app.models.user import User +from app.schemas.bucket_permission import BucketPermission as BucketPermissionSchema +from app.tests.mocks.mock_rgw_admin import MockRGWAdmin +from app.tests.mocks.mock_s3_resource import MockS3ServiceResource +from app.tests.utils.bucket import create_random_bucket +from app.tests.utils.user import create_random_user, get_authorization_headers + + +@pytest.fixture(scope="session") +def event_loop() -> Generator: + """ + Creates an instance of the default event loop for the test session. + """ + loop = asyncio.new_event_loop() + yield loop + loop.close() + + +@pytest.fixture(scope="session") +def mock_rgw_admin() -> MockRGWAdmin: + """ + Fixture for creating a mock object for the rgwadmin package. + """ + return MockRGWAdmin() + + +@pytest.fixture(scope="session") +def mock_s3_service() -> MockS3ServiceResource: + """ + Fixture for creating a mock object for the rgwadmin package. + """ + return MockS3ServiceResource() + + +@pytest_asyncio.fixture(scope="module") +async def client(mock_rgw_admin: MockRGWAdmin, mock_s3_service: MockS3ServiceResource) -> AsyncGenerator: + """ + Fixture for creating a TestClient and perform HTTP Request on it. + Overrides the dependency for the RGW admin operations. + """ + + def get_mock_rgw() -> MockRGWAdmin: + return mock_rgw_admin + + def get_mock_s3() -> MockS3ServiceResource: + return mock_s3_service + + def get_mock_userinfo(voperson_id: str, name: str) -> dict[str, str]: + return {"voperson_id": voperson_id + "@lifescience-ri.eu", "name": name} + + app.dependency_overrides[get_rgw_admin] = get_mock_rgw + app.dependency_overrides[get_s3_resource] = get_mock_s3 + app.dependency_overrides[get_userinfo_from_access_token] = get_mock_userinfo + async with AsyncClient(app=app, base_url="http://localhost:8000") as ac: + yield ac + app.dependency_overrides = {} + + +@pytest_asyncio.fixture(scope="module") +async def user_token_headers(random_user: User) -> dict[str, str]: + """ + Create valid authorization header with a successful login. + """ + return get_authorization_headers(uid=random_user.uid) + + +@pytest_asyncio.fixture(scope="module") +async def db() -> AsyncGenerator: + """ + Fixture for creating a database session to connect to. + """ + async with Session() as dbSession: + yield dbSession + + +@pytest_asyncio.fixture(scope="module") +async def random_user(db: AsyncSession, mock_rgw_admin: MockRGWAdmin) -> AsyncGenerator: + """ + Create a random user and deletes him afterwards. + """ + user = await create_random_user(db) + mock_rgw_admin.create_key(uid=user.uid) + yield user + mock_rgw_admin.delete_user(uid=user.uid) + await db.delete(user) + await db.commit() + + +@pytest_asyncio.fixture(scope="module") +async def random_second_user(db: AsyncSession, mock_rgw_admin: MockRGWAdmin) -> AsyncGenerator: + """ + Create a random second user and deletes him afterwards. + """ + user = await create_random_user(db) + mock_rgw_admin.create_key(uid=user.uid) + yield user + mock_rgw_admin.delete_user(uid=user.uid) + await db.delete(user) + await db.commit() + + +@pytest_asyncio.fixture(scope="module") +async def random_third_user(db: AsyncSession, mock_rgw_admin: MockRGWAdmin) -> AsyncGenerator: + """ + Create a random third user and deletes him afterwards. + """ + user = await create_random_user(db) + mock_rgw_admin.create_key(uid=user.uid) + yield user + mock_rgw_admin.delete_user(uid=user.uid) + await db.delete(user) + await db.commit() + + +@pytest_asyncio.fixture(scope="function") +async def random_bucket(db: AsyncSession, random_user: User, mock_s3_service: MockS3ServiceResource) -> AsyncGenerator: + """ + Create a random user and deletes him afterwards. + """ + bucket = await create_random_bucket(db, random_user) + mock_s3_service.Bucket(name=bucket.name).create() + mock_s3_service.BucketPolicy(bucket.name).put( + json.dumps( + { + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "PseudoOwnerPerm", + "Effect": "Allow", + "Principal": {"AWS": [f"arn:aws:iam:::user/{random_user.uid}"]}, + "Action": ["s3:GetObject", "s3:DeleteObject", "s3:PutObject", "s3:ListBucket"], + "Resource": [f"arn:aws:s3:::{bucket.name}/*", f"arn:aws:s3:::{bucket.name}"], + } + ], + } + ) + ) + yield bucket + mock_s3_service.delete_bucket(name=bucket.name, force_delete=True) + await db.delete(bucket) + await db.commit() + + +@pytest_asyncio.fixture(scope="function") +async def random_bucket_permission( + db: AsyncSession, random_second_user: User, random_bucket: Bucket, mock_s3_service: MockS3ServiceResource +) -> BucketPermissionDB: + """ + Create a bucket READ permission for the second user on a bucket. + """ + permission_db = BucketPermissionDB(user_id=random_second_user.uid, bucket_name=random_bucket.name) + db.add(permission_db) + await db.commit() + await db.refresh(permission_db) + mock_s3_service.Bucket(random_bucket.name).Policy().put( + json.dumps( + { + "Version": "2012-10-17", + "Statement": BucketPermissionSchema.from_db_model(permission_db).map_to_bucket_policy_statement( + random_second_user.uid + ), + } + ) + ) + return permission_db + + +@pytest_asyncio.fixture(scope="function") +async def random_bucket_permission_schema( + random_bucket_permission: BucketPermissionDB, random_second_user: User +) -> BucketPermissionSchema: + """ + Create a bucket READ permission for the second user on a bucket. + """ + + return BucketPermissionSchema.from_db_model(random_bucket_permission, random_second_user.uid) + + +@pytest_asyncio.fixture(autouse=True) +async def multiple_random_users_buckets( + db: AsyncSession, mock_rgw_admin: MockRGWAdmin, mock_s3_service: MockS3ServiceResource +) -> AsyncGenerator: + """ + Create multiple random users and buckets to ensure that the database is not empty. + """ + user1 = await create_random_user(db) + mock_rgw_admin.create_key(uid=user1.uid) + user2 = await create_random_user(db) + mock_rgw_admin.create_key(uid=user2.uid) + bucket1 = await create_random_bucket(db, user1) + bucket2 = await create_random_bucket(db, user2) + mock_s3_service.Bucket(name=bucket1.name).create() + mock_s3_service.Bucket(name=bucket2.name).create() + yield + mock_rgw_admin.delete_user(uid=user1.uid) + mock_rgw_admin.delete_user(uid=user2.uid) + await db.delete(user1) + await db.delete(user2) + await db.delete(bucket1) + await db.delete(bucket2) + mock_s3_service.delete_bucket(name=bucket1.name) + mock_s3_service.delete_bucket(name=bucket2.name) diff --git a/app/tests/crud/__init__.py b/app/tests/crud/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/app/tests/crud/test_bucket.py b/app/tests/crud/test_bucket.py new file mode 100644 index 0000000000000000000000000000000000000000..cd4467e1a1bbbaacfac062a3ee9bdddb90fde57f --- /dev/null +++ b/app/tests/crud/test_bucket.py @@ -0,0 +1,318 @@ +from datetime import datetime, timedelta + +import pytest +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select + +from app.crud.crud_bucket import CRUDBucket +from app.models.bucket import Bucket +from app.models.bucket_permission import PermissionEnum +from app.models.user import User +from app.schemas.bucket import BucketIn +from app.tests.utils.bucket import add_permission_for_bucket +from app.tests.utils.utils import random_lower_string + + +class TestBucketCRUDGet: + @pytest.mark.asyncio + async def test_get_bucket_by_name(self, db: AsyncSession, random_bucket: Bucket) -> None: + """ + Test for getting a user by id from the User CRUD Repository. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. pytest fixture. + random_bucket : app.models.bucket.Bucket + Random bucket for testing. pytest fixture. + """ + bucket = await CRUDBucket.get(db, random_bucket.name) + assert bucket + assert bucket.name == random_bucket.name + assert bucket.public == random_bucket.public + assert bucket.description == random_bucket.description + + @pytest.mark.asyncio + async def test_get_unknown_bucket(self, db: AsyncSession) -> None: + """ + Test for getting a not existing bucket from the CRUD Repository. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. pytest fixture. + """ + bucket = await CRUDBucket.get(db, "unknown Bucket") + assert bucket is None + + @pytest.mark.asyncio + async def test_get_own_buckets(self, db: AsyncSession, random_bucket: Bucket) -> None: + """ + Test for getting only the buckets where a user is the owner from CRUD Repository. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. pytest fixture. + random_bucket : app.models.bucket.Bucket + Random bucket for testing. pytest fixture. + """ + buckets = await CRUDBucket.get_own_buckets(db, random_bucket.owner_id) + + assert len(buckets) == 1 + assert buckets[0].name == random_bucket.name + + @pytest.mark.asyncio + async def test_get_bucket_with_read_permission_and_own( + self, + db: AsyncSession, + random_bucket: Bucket, + random_user: User, + random_second_user: User, + ) -> None: + """ + Test for getting the users own bucket and a foreign bucket with READ permissions from CRUD Repository. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. pytest fixture. + random_bucket : app.models.bucket.Bucket + Random bucket for testing. pytest fixture. + random_user : app.models.user.User + Random user for testing. pytest fixture. + random_second_user : app.models.user.User + Random second user for testing. pytest fixture. + """ + bucket = Bucket( + name=random_lower_string(), + description=random_lower_string(127), + owner_id=random_second_user.uid, + ) + db.add(bucket) + await db.commit() + await add_permission_for_bucket(db, bucket.name, random_user.uid, permission=PermissionEnum.READ) + + buckets = await CRUDBucket.get_for_user(db, random_user.uid) + + assert len(buckets) == 2 + assert buckets[0].name == random_bucket.name or buckets[1].name == random_bucket.name + assert buckets[0].name == bucket.name or buckets[1].name == bucket.name + + await db.delete(bucket) + + @pytest.mark.asyncio + async def test_get_bucket_with_read_permission( + self, db: AsyncSession, random_bucket: Bucket, random_second_user: User + ) -> None: + """ + Test for getting a foreign bucket with READ permissions from CRUD Repository. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. pytest fixture. + random_bucket : app.models.bucket.Bucket + Random bucket for testing. pytest fixture. + random_second_user : app.models.user.User + Random second user for testing. pytest fixture. + """ + await add_permission_for_bucket(db, random_bucket.name, random_second_user.uid, permission=PermissionEnum.READ) + + buckets = await CRUDBucket.get_for_user(db, random_second_user.uid) + + assert len(buckets) > 0 + assert buckets[0].name == random_bucket.name + + @pytest.mark.asyncio + async def test_get_bucket_with_readwrite_permission( + self, db: AsyncSession, random_bucket: Bucket, random_second_user: User + ) -> None: + """ + Test for getting a foreign bucket with READWRITE permissions from CRUD Repository. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. pytest fixture. + random_bucket : app.models.bucket.Bucket + Random bucket for testing. pytest fixture. + random_second_user : app.models.user.User + Random second user for testing. pytest fixture. + """ + await add_permission_for_bucket( + db, random_bucket.name, random_second_user.uid, permission=PermissionEnum.READWRITE + ) + + buckets = await CRUDBucket.get_for_user(db, random_second_user.uid) + + assert len(buckets) > 0 + assert buckets[0].name == random_bucket.name + + @pytest.mark.asyncio + async def test_get_bucket_with_write_permission( + self, db: AsyncSession, random_bucket: Bucket, random_second_user: User + ) -> None: + """ + Test for getting a foreign bucket with WRITE permissions from CRUD Repository. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. pytest fixture. + random_bucket : app.models.bucket.Bucket + Random bucket for testing. pytest fixture. + random_second_user : app.models.user.User + Random second user for testing. pytest fixture. + """ + await add_permission_for_bucket(db, random_bucket.name, random_second_user.uid, permission=PermissionEnum.WRITE) + + buckets = await CRUDBucket.get_for_user(db, random_second_user.uid) + + assert len(buckets) == 0 + + @pytest.mark.asyncio + async def test_get_bucket_with_valid_time_permission( + self, db: AsyncSession, random_bucket: Bucket, random_second_user: User + ) -> None: + """ + Test for getting a foreign bucket with valid time permission from CRUD Repository. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. pytest fixture. + random_bucket : app.models.bucket.Bucket + Random bucket for testing. pytest fixture. + random_second_user : app.models.user.User + Random second user for testing. pytest fixture. + """ + await add_permission_for_bucket( + db, + random_bucket.name, + random_second_user.uid, + from_=datetime.now() - timedelta(days=10), + to=datetime.now() + timedelta(days=10), + ) + + buckets = await CRUDBucket.get_for_user(db, random_second_user.uid) + + assert len(buckets) > 0 + assert buckets[0].name == random_bucket.name + + @pytest.mark.asyncio + async def test_get_bucket_with_invalid_from_time_permission( + self, db: AsyncSession, random_bucket: Bucket, random_second_user: User + ) -> None: + """ + Test for getting a foreign bucket with invalid 'from' time permission from CRUD Repository. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. pytest fixture. + random_bucket : app.models.bucket.Bucket + Random bucket for testing. pytest fixture. + random_second_user : app.models.user.User + Random second user for testing. pytest fixture. + """ + await add_permission_for_bucket( + db, random_bucket.name, random_second_user.uid, from_=datetime.now() + timedelta(days=10) + ) + + buckets = await CRUDBucket.get_for_user(db, random_second_user.uid) + + assert len(buckets) == 0 + + @pytest.mark.asyncio + async def test_get_bucket_with_invalid_to_time_permission( + self, db: AsyncSession, random_bucket: Bucket, random_second_user: User + ) -> None: + """ + Test for getting a foreign bucket with invalid 'to' time permission from CRUD Repository. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. pytest fixture. + random_bucket : app.models.bucket.Bucket + Random bucket for testing. pytest fixture. + random_second_user : app.models.user.User + Random second user for testing. pytest fixture. + """ + await add_permission_for_bucket( + db, random_bucket.name, random_second_user.uid, to=datetime.now() - timedelta(days=10) + ) + + buckets = await CRUDBucket.get_for_user(db, random_second_user.uid) + + assert len(buckets) == 0 + + +class TestBucketCRUDCreate: + @pytest.mark.asyncio + async def test_create_bucket(self, db: AsyncSession, random_user: User) -> None: + """ + Test for creating a bucket with the CRUD Repository. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. pytest fixture. + random_user : app.models.user.User + Random user for testing. pytest fixture. + """ + bucket_info = BucketIn(name=random_lower_string(), description=random_lower_string(127)) + bucket = await CRUDBucket.create(db, bucket_info, random_user.uid) + assert bucket + assert bucket.name == bucket_info.name + assert bucket.owner_id == random_user.uid + assert bucket.description == bucket_info.description + + stmt = select(Bucket).where(Bucket.name == bucket_info.name) + bucket_db = (await db.execute(stmt)).scalar() + + assert bucket_db + assert bucket_db.name == bucket_info.name + assert bucket_db.owner_id == random_user.uid + assert bucket_db.description == bucket_info.description + + await db.delete(bucket) + + @pytest.mark.asyncio + async def test_create_duplicated_bucket(self, db: AsyncSession, random_bucket: Bucket) -> None: + """ + Test for creating a duplicated bucket with the CRUD Repository. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. pytest fixture. + random_bucket : app.models.bucket.Bucket + Random bucket for testing. pytest fixture. + """ + bucket_info = BucketIn(name=random_bucket.name, description=random_lower_string(127)) + bucket = await CRUDBucket.create(db, bucket_info, random_bucket.owner_id) + + assert bucket is None + + +class TestBucketCRUDDelete: + @pytest.mark.asyncio + async def test_delete_bucket(self, db: AsyncSession, random_bucket: Bucket) -> None: + """ + Test for deleting a bucket with the CRUD Repository. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. pytest fixture. + random_bucket : app.models.bucket.Bucket + Random bucket for testing. pytest fixture. + """ + await CRUDBucket.delete(db, random_bucket) + + stmt = select(Bucket).where(Bucket.name == random_bucket.name) + bucket_db = (await db.execute(stmt)).scalar() + + assert bucket_db is None diff --git a/app/tests/crud/test_bucket_permission.py b/app/tests/crud/test_bucket_permission.py new file mode 100644 index 0000000000000000000000000000000000000000..d58307ebaa5da519438412c578dc1eb2661be29c --- /dev/null +++ b/app/tests/crud/test_bucket_permission.py @@ -0,0 +1,224 @@ +from datetime import datetime, timedelta + +import pytest +from sqlalchemy import and_ +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select + +from app.crud.crud_bucket_permission import CRUDBucketPermission, DuplicateError +from app.models.bucket import Bucket +from app.models.bucket_permission import BucketPermission as BucketPermissionDB +from app.models.bucket_permission import PermissionEnum +from app.models.user import User +from app.schemas.bucket_permission import BucketPermission as BucketPermissionSchema +from app.schemas.bucket_permission import BucketPermissionParameters as BucketPermissionParametersSchema + + +class TestBucketPermissionCRUDGet: + @pytest.mark.asyncio + async def test_get_specific_bucket_permission( + self, db: AsyncSession, random_bucket_permission: BucketPermissionDB + ) -> None: + """ + Test for getting a specific bucket permission from the CRUD Repository. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. pytest fixture. + random_bucket_permission : app.models.bucket_permission.BucketPermission + Bucket permission for a random bucket for testing. pytest fixture. + """ + bucket_permission = await CRUDBucketPermission.get( + db, bucket_name=random_bucket_permission.bucket_name, user_id=random_bucket_permission.user_id + ) + assert bucket_permission + assert bucket_permission.user_id == random_bucket_permission.user_id + assert bucket_permission.bucket_name == random_bucket_permission.bucket_name + assert bucket_permission.permissions == random_bucket_permission.permissions + + @pytest.mark.asyncio + async def test_get_bucket_permissions_by_bucket_name( + self, db: AsyncSession, random_bucket_permission: BucketPermissionDB + ) -> None: + """ + Test for getting all bucket permissions for a specific bucket from the CRUD Repository. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. pytest fixture. + random_bucket_permission : app.models.bucket_permission.BucketPermission + Bucket permission for a random bucket for testing. pytest fixture. + """ + bucket_permissions = await CRUDBucketPermission.get_permissions_for_bucket( + db, bucket_name=random_bucket_permission.bucket_name + ) + assert len(bucket_permissions) == 1 + bucket_permission = bucket_permissions[0] + assert bucket_permission.user_id == random_bucket_permission.user_id + assert bucket_permission.bucket_name == random_bucket_permission.bucket_name + assert bucket_permission.permissions == random_bucket_permission.permissions + + @pytest.mark.asyncio + async def test_get_bucket_permissions_by_uid( + self, db: AsyncSession, random_bucket_permission: BucketPermissionDB + ) -> None: + """ + Test for getting all bucket permissions for a specific user from the CRUD Repository. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. pytest fixture. + random_bucket_permission : app.models.bucket_permission.BucketPermission + Bucket permission for a random bucket for testing. pytest fixture. + """ + bucket_permissions = await CRUDBucketPermission.get_permissions_for_user( + db, user_id=random_bucket_permission.user_id + ) + assert len(bucket_permissions) == 1 + bucket_permission = bucket_permissions[0] + assert bucket_permission.user_id == random_bucket_permission.user_id + assert bucket_permission.bucket_name == random_bucket_permission.bucket_name + assert bucket_permission.permissions == random_bucket_permission.permissions + + +class TestBucketPermissionCRUDCreate: + @pytest.mark.asyncio + async def test_create_bucket_permissions_for_unknown_user(self, db: AsyncSession, random_bucket: Bucket) -> None: + """ + Test for creating a bucket permission for an unknown user. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. pytest fixture. + random_bucket : app.models.bucket.Bucket + Random bucket for testing. pytest fixture. + """ + permission = BucketPermissionSchema(bucket_name=random_bucket.name, uid="ImpossibleUser") + with pytest.raises(KeyError): + await CRUDBucketPermission.create(db, permission) + + @pytest.mark.asyncio + async def test_create_bucket_permissions_for_owner( + self, db: AsyncSession, random_user: User, random_bucket: Bucket + ) -> None: + """ + Test for creating a bucket permission for the owner of the bucket. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. pytest fixture. + random_user : app.models.user.User + Random user for testing who is owner of the bucket. pytest fixture. + random_bucket : app.models.bucket.Bucket + Random bucket for testing. pytest fixture. + """ + permission = BucketPermissionSchema(bucket_name=random_bucket.name, uid=random_user.uid) + with pytest.raises(ValueError): + await CRUDBucketPermission.create(db, permission) + + @pytest.mark.asyncio + async def test_create_duplicate_bucket_permissions( + self, db: AsyncSession, random_bucket_permission_schema: BucketPermissionSchema + ) -> None: + """ + Test for creating a duplicated bucket permission. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. pytest fixture. + random_bucket_permission_schema : app.schemas.bucket_permission.BucketPermission + Bucket permission for a random bucket for testing. pytest fixture. + """ + permission = BucketPermissionSchema( + bucket_name=random_bucket_permission_schema.bucket_name, uid=random_bucket_permission_schema.uid + ) + with pytest.raises(DuplicateError): + await CRUDBucketPermission.create(db, permission) + + @pytest.mark.asyncio + async def test_create_valid_bucket_permissions( + self, db: AsyncSession, random_second_user: User, random_bucket: Bucket + ) -> None: + """ + Test for creating a valid bucket permission. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. pytest fixture. + random_second_user : app.models.user.User + Random second user for testing. pytest fixture. + random_bucket : app.models.bucket.Bucket + Random bucket for testing. pytest fixture. + """ + permission = BucketPermissionSchema(bucket_name=random_bucket.name, uid=random_second_user.uid) + created_permission = await CRUDBucketPermission.create(db, permission) + + assert created_permission.user_id == random_second_user.uid + assert created_permission.bucket_name == random_bucket.name + + +class TestBucketPermissionCRUDDelete: + @pytest.mark.asyncio + async def test_delete_bucket_permissions( + self, db: AsyncSession, random_bucket_permission: BucketPermissionDB + ) -> None: + """ + Test for creating a valid bucket permission. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. pytest fixture. + random_bucket_permission : app.models.bucket_permission.BucketPermission + Bucket permission for a random bucket for testing. pytest fixture. + """ + await CRUDBucketPermission.delete(db, random_bucket_permission) + + stmt = select(BucketPermissionDB).where( + and_( + BucketPermissionDB.bucket_name == random_bucket_permission.bucket_name, + BucketPermissionDB.user_id == random_bucket_permission.user_id, + ) + ) + bucket_permission_db = (await db.execute(stmt)).scalar() + + assert bucket_permission_db is None + + +class TestBucketPermissionCRUDUpdate: + @pytest.mark.asyncio + async def test_update_bucket_permissions( + self, db: AsyncSession, random_bucket_permission: BucketPermissionDB + ) -> None: + """ + Test for updating a valid bucket permission in the database. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. pytest fixture. + random_bucket_permission : app.models.bucket_permission.BucketPermission + Bucket permission for a random bucket for testing. pytest fixture. + """ + new_from_time = datetime(2022, 1, 1, 0, 0) + new_params = BucketPermissionParametersSchema( + from_timestamp=new_from_time, + to_timestamp=new_from_time + timedelta(days=1), + permission=PermissionEnum.READWRITE, + file_prefix="pseudo/folder/", + ) + new_permission = await CRUDBucketPermission.update_permission(db, random_bucket_permission, new_params) + + assert new_permission.user_id == random_bucket_permission.user_id + assert new_permission.bucket_name == random_bucket_permission.bucket_name + assert new_permission.from_ == new_params.from_timestamp + assert new_permission.to == new_params.to_timestamp + assert new_permission.permissions == new_params.permission + assert new_permission.file_prefix == new_params.file_prefix diff --git a/app/tests/crud/test_user.py b/app/tests/crud/test_user.py new file mode 100644 index 0000000000000000000000000000000000000000..46d83dda670099faad10eaa5e6bc4afb221c87ca --- /dev/null +++ b/app/tests/crud/test_user.py @@ -0,0 +1,59 @@ +import pytest +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select + +from app.crud.crud_user import CRUDUser +from app.models.user import User +from app.tests.utils.utils import random_lower_string + + +class TestUserCRUD: + @pytest.mark.asyncio + async def test_create_user(self, db: AsyncSession) -> None: + """ + Test for creating a user in the User CRUD Repository. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. pytest fixture. + """ + user = User(uid=random_lower_string(), display_name=random_lower_string()) + await CRUDUser.create(db, user) + + db_user = (await db.execute(select(User).where(User.uid == user.uid))).scalar() + assert db_user + assert db_user.uid == user.uid + + await db.delete(db_user) + await db.commit() + + @pytest.mark.asyncio + async def test_get_user_by_id(self, db: AsyncSession, random_user: User) -> None: + """ + Test for getting a user by id from the User CRUD Repository. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. pytest fixture. + random_user : app.models.user.User + Random user for testing. pytest fixture. + """ + user = await CRUDUser.get(db, random_user.uid) + assert user + assert random_user.uid == user.uid + assert random_user.display_name == user.display_name + + @pytest.mark.asyncio + async def test_get_unknown_user_by_id(self, db: AsyncSession) -> None: + """ + Test for getting an unknown user by id from the User CRUD Repository. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. pytest fixture. + """ + user = await CRUDUser.get(db, random_lower_string(length=16)) + assert user is None diff --git a/app/tests/mocks/__init__.py b/app/tests/mocks/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/app/tests/mocks/mock_rgw_admin.py b/app/tests/mocks/mock_rgw_admin.py new file mode 100644 index 0000000000000000000000000000000000000000..fc2ba9460b253f223fc02e7a7fe9b358c8611a15 --- /dev/null +++ b/app/tests/mocks/mock_rgw_admin.py @@ -0,0 +1,116 @@ +from fastapi import status +from rgwadmin.exceptions import RGWAdminException + +from app.tests.utils.utils import random_lower_string + + +class MockRGWAdmin: + """ + A functional mock class of the rgwadmin.RGWAdmin for testing purposes. + + Functions + --------- + get_user(uid: str, stats: bool = False) -> dict[str, list[dict[str, str]]] + Returns a dict with only one key 'keys'. + create_key(uid: str, key_type: str = "s3", generate_key: bool = True) -> dict[str, list[dict[str, str]]] + Creates a new key for a user. + remove_key(access_key: str, uid: str) -> None + Remove a key for a user. + delete_user(uid: str) -> None + Deletes all keys for a user. + """ + + _keys: dict[str, list[dict[str, str]]] + + def __init__(self) -> None: + self._keys = {} + + def create_user(self, uid: str, max_buckets: int, display_name: str) -> None: + self.create_key(uid) + + def get_user(self, uid: str, stats: bool = False) -> dict[str, list[dict[str, str]]]: # noqa + """ + Get the keys from a user. + + Parameters + ---------- + uid : str + Username of a user. + stats : bool + Include stats in response. Will be ignored. + + Returns + ------- + user_keys : dict[str, list[dict[str, str]]] + The user object with the associated keys. See Notes. + + Notes + ----- + The dict this function returns has this form: + + { "keys" : [S3Key] } + + where each S3 Key dict has this form<br> + + { + "user" : str, + "access_key" : str, + "secret_key" : str + } + """ + if uid in self._keys: + return {"keys": self._keys[uid]} + return {"keys": []} + + def create_key(self, uid: str, key_type: str = "s3", generate_key: bool = True) -> list[dict[str, str]]: # noqa + """ + Create a S3 key for a user. + + Parameters + ---------- + uid : str + Username of a user. + key_type : str, default s3 + Type of the created key. Will be ignored. + generate_key : bool + Flag fore creating a random key. Will be ignored. + + Returns + ------- + keys : list[dict[str, str]] + All keys for the user including the new one. + """ + new_key = {"user": uid, "access_key": random_lower_string(20).upper(), "secret_key": random_lower_string(40)} + if uid in self._keys: + self._keys[uid].append(new_key) + else: + self._keys[uid] = [new_key] + return self._keys[uid] + + def remove_key(self, access_key: str, uid: str) -> None: + """ + Remove a specific S3 key for a user. Raises an exception if key is unknown for the user. + + Parameters + ---------- + access_key : str + Access key to delete. + uid : str + Username of a user + """ + if access_key not in map(lambda key: key["access_key"], self._keys[uid]): + raise RGWAdminException(code=status.HTTP_404_NOT_FOUND) + index = [key["access_key"] for key in self._keys[uid]].index(access_key) + self._keys[uid].pop(index) + + def delete_user(self, uid: str) -> None: + """ + Remove all S3 keys for a user. + Convenience function for testing. + + Parameters + ---------- + uid : str + Username of a user. + """ + self._keys[uid] = [] diff --git a/app/tests/mocks/mock_s3_resource.py b/app/tests/mocks/mock_s3_resource.py new file mode 100644 index 0000000000000000000000000000000000000000..a1de1b9c63ed15068362dcd856241369d376340e --- /dev/null +++ b/app/tests/mocks/mock_s3_resource.py @@ -0,0 +1,398 @@ +from datetime import datetime + +from botocore.exceptions import ClientError + + +class MockS3ObjectSummary: + """ + Mock S3 object for the boto3 S3ObjectSummary for testing purposes. + + Attributes + ---------- + key : str + Key of the S3 object. + bucket_name : str + Name of the corresponding bucket. + size : int + Size of object in bytes. Always 100. + last_modified : datetime + Time and date of last modification of this object. + """ + + def __init__(self, bucket_name: str, key: str) -> None: + """ + Initialize a MockS3ObjectSummary. + + Parameters + ---------- + bucket_name : str + Name of the corresponding bucket. + key : str + Key of the S3 object. + """ + self.key = key + self.bucket_name = bucket_name + self.size = 100 + self.last_modified = datetime.now() + + def __repr__(self) -> str: + return f"MockS3ObjectSummary(key={self.key}, bucket={self.bucket_name})" + + +class MockS3BucketPolicy: + """ + Mock S3 bucket policy the boto3 BucketPolicy for testing purposes. + + Functions + --------- + put(Policy: str) -> None + Save a new bucket policy. + + Attributes + ---------- + bucket_name : str + Name of the corresponding bucket. + policy : str + The policy in as a string. + """ + + def __init__(self, bucket_name: str): + self.bucket_name = bucket_name + self.policy: str = "" + + def put(self, Policy: str) -> None: + """ + Save a new bucket policy. + + Parameters + ---------- + Policy : str + The new policy as str. + """ + self.policy = Policy + + +class MockS3Bucket: + """ + Mock S3 bucket for the boto3 Bucket for testing purposes. + + Functions + --------- + Policy() -> app.tests.mocks.mock_s3_resource.MockS3BucketPolicy + Get the bucket policy from the bucket. + create() -> None + Create the bucket in the mock service. + delete() -> None + Delete the bucket in the mock service + delete_objects(Delete: dict[str, list[dict[str, str]]]) -> None + Delete multiple objects in the bucket. + get_objects() -> list[app.tests.mocks.mock_s3_resource.MockS3ObjectSummary] + List of MockS3ObjectSummary in the bucket. + add_object(obj: app.tests.mocks.mock_s3_resource.MockS3ObjectSummary) -> None + Add a MockS3ObjectSummary to the bucket. + + Attributes + ---------- + name: str + name of the bucket. + creation_date : datetime + Creation date of the bucket. + objects : app.tests.mocks.mock_s3_resource.MockS3Bucket.MockS3ObjectList + Object in the bucket in a proxy class. + """ + + class MockS3ObjectList: + """ + Proxy object to package a list in a class. + + Important because you have to access all the S3ObjectsSummary of a bucket with + S3Bucket.objects.all() + + Functions + --------- + all() -> list[app.tests.mocks.mock_s3_resource.MockS3ObjectSummary] + Get the saved list. + filter(Prefix: str) -> app.tests.mocks.mock_s3_resource.MockS3Bucket.MockS3ObjectList + Filter the object in the list by the prefix all their keys should have. + add(obj: app.tests.mocks.mock_s3_resource.MockS3ObjectSummary) -> None + Add a MockS3ObjectSummary to the list. + delete(key: str) -> None + Delete a MockS3ObjectSummary from the list + """ + + def __init__(self, obj_list: list[MockS3ObjectSummary] | None = None) -> None: + self._objs: list[MockS3ObjectSummary] = [] if obj_list is None else obj_list + + def all(self) -> list[MockS3ObjectSummary]: + """ + Get the saved list. + + Returns + ------- + objects : list[app.tests.mocks.mock_s3_resource.MockS3ObjectSummary] + List of MockS3ObjectSummary + """ + return self._objs + + def add(self, obj: MockS3ObjectSummary) -> None: + """ + Add a MockS3ObjectSummary to the list. + + Parameters + ---------- + obj : app.tests.mocks.mock_s3_resource.MockS3ObjectSummary + MockS3ObjectSummary which should be added to the list + """ + self._objs.append(obj) + + def delete(self, key: str) -> None: + """ + Delete a MockS3ObjectSummary from the list. + + Parameters + ---------- + key : str + Key of the object to delete + """ + self._objs = [obj for obj in self._objs if obj.key != key] + + def filter(self, Prefix: str) -> "MockS3Bucket.MockS3ObjectList": + """ + Filter the object in the list by the prefix all their keys should have. + + Parameters + ---------- + Prefix : str + The prefix that all keys should have. + + Returns + ------- + obj_list : app.tests.mocks.mock_s3_resource.MockS3Bucket.MockS3ObjectList + The filtered list. + """ + return MockS3Bucket.MockS3ObjectList(obj_list=list(filter(lambda x: x.key.startswith(Prefix), self._objs))) + + def __init__(self, name: str, parent_service: "MockS3ServiceResource"): + """ + Initialize a MockS3Bucket. + + Parameters + ---------- + name : str + Name of the bucket. + parent_service : app.tests.mocks.mock_s3_resource.MockS3ServiceResource + Mock service object where the bucket will be saved. + """ + self.name = name + self.creation_date: datetime = datetime.now() + self.objects = MockS3Bucket.MockS3ObjectList() + self._parent_service: MockS3ServiceResource = parent_service + self.policy = MockS3BucketPolicy(name) + + def Policy(self) -> MockS3BucketPolicy: + """ + Get the bucket policy from the bucket. + + Returns + ------- + bucket_policy : app.tests.mocks.mock_s3_resource.MockS3BucketPolicy + The corresponding bucket policy. + """ + return self.policy + + def create(self) -> None: + """ + Create the bucket in the mock S3 service. + """ + self._parent_service.create_bucket(self) + + def delete(self) -> None: + """ + Delete the bucket in the mock S3 service. + """ + self._parent_service.delete_bucket(self.name) + + def delete_objects(self, Delete: dict[str, list[dict[str, str]]]) -> None: + """ + Delete multiple objects in the bucket. + + Parameters + ---------- + Delete : dict[str, list[dict[str, str]]] + The keys of the objects to delete. + + Notes + ----- + The `Delete` parameter has the form + { + 'Objects': [ + { 'Key' : 'test.txt' },... + ] + } + """ + for key_object in Delete["Objects"]: + self.objects.delete(key=key_object["Key"]) + + def get_objects(self) -> list[MockS3ObjectSummary]: + """ + Get the MockS3ObjectSummary in the bucket. + Convenience function for testing. + + Returns + ------- + objects : list[app.tests.mocks.mock_s3_resource.MockS3ObjectSummary] + List of MockS3ObjectSummary in the bucket. + """ + return self.objects.all() + + def add_object(self, obj: MockS3ObjectSummary) -> None: + """ + Add a MockS3ObjectSummary to the bucket. + Convenience function for testing. + + Parameters + ---------- + obj : app.tests.mocks.mock_s3_resource.MockS3ObjectSummary + Object to add. + """ + self.objects.add(obj) + + def __repr__(self) -> str: + return f"MockS3Bucket(name={self.name}, objects={[obj.key for obj in self.get_objects()]})" + + +class MockS3ServiceResource: + """ + A functional mock class of the boto3 S3ServiceResource for testing purposes. + + Functions + --------- + Bucket(name: str) -> app.tests.mocks.mock_s3_resource.MockS3bucket + Get/Create a mock bucket. + ObjectSummary(bucket_name: str, key: str) -> app.tests.mocks.mock_s3_resource.MockS3objectSummary + Create a mock object summary. + BucketPolicy(bucket_name: str) -> app.tests.mocks.mock_s3_resource.MockS3BucketPolicy + Get the bucket policy from the corresponding bucket. + BucketAcl(bucket_name: str) -> app.tests.mocks.mock_s3_resource.MockS3BucketAcl + Get the bucket acl from the corresponding bucket. + create_bucket(bucket: app.tests.mocks.mock_s3_resource.MockS3bucket) -> None + Create a bucket in the mock service. + delete_bucket(name: str) -> None + Delete a bucket in the mock service. + create_object_in_bucket(bucket_name: str, key: str) -> app.tests.mocks.mock_s3_resource.MockS3objectSummary + Create an MockS3ObjectSummary in a bucket. + """ + + def __init__(self) -> None: + self._buckets: dict[str, MockS3Bucket] = {} + + def Bucket(self, name: str) -> MockS3Bucket: + """ + Get an existing bucket from the mock service or creat a new mock bucket but doesn't save it yet. + Call `bucket.create()` on returned object for that. + + Parameters + ---------- + name : str + Name of the bucket. + + Returns + ------- + bucket : app.tests.mocks.mock_s3_resource.MockS3bucket + Existing/Created mock bucket. + """ + return self._buckets[name] if name in self._buckets else MockS3Bucket(name=name, parent_service=self) + + def ObjectSummary(self, bucket_name: str, key: str) -> MockS3ObjectSummary: + """ + Create a mock object summary. + + Parameters + ---------- + bucket_name : str + Name of the corresponding bucket. + key : str + Key of the S3 object. + + Returns + ------- + obj : app.tests.mocks.mock_s3_resource.MockS3objectSummary + Existing MockS3ObjectSummary. + + Notes + ----- + Raises an `botocore.exceptions.ClientError` if the object doesn't exist in the bucket. + For creating a MockS3ObjectSummary see `create_object_in_bucket`. + """ + if key not in map(lambda obj: obj.key, self._buckets[bucket_name].get_objects()): + raise ClientError(operation_name="Mock", error_response={}) + return MockS3ObjectSummary(bucket_name=bucket_name, key=key) + + def BucketPolicy(self, bucket_name: str) -> MockS3BucketPolicy: + """ + Get the bucket policy from the corresponding bucket. + + Parameters + ---------- + bucket_name : str + Name of the bucket. + + Returns + ------- + bucket_policy : app.tests.mocks.mock_s3_resource.MockS3BucketPolicy + The corresponding bucket policy. + """ + return self._buckets[bucket_name].policy + + def create_bucket(self, bucket: MockS3Bucket) -> None: + """ + Create a bucket in the mock service. + Convenience function for testing. + + Parameters + ---------- + bucket : app.tests.mocks.mock_s3_resource.MockS3bucket + Bucket which should be created. + """ + self._buckets[bucket.name] = bucket + + def delete_bucket(self, name: str, force_delete: bool = False) -> None: + """ + Delete am empty bucket in the mock service. + Convenience function for testing. + + Parameters + ---------- + name : str + Name of the bucket. + force_delete : bool, default False + Force deletes even a non-empty bucket. + """ + if name in self._buckets: + if not force_delete and len(self._buckets[name].get_objects()) > 0: + raise ClientError(operation_name="Mock", error_response={}) + del self._buckets[name] + + def create_object_in_bucket(self, bucket_name: str, key: str) -> MockS3ObjectSummary: + """ + Create an MockS3ObjectSummary in a bucket. + Convenience function for testing. + + Parameters + ---------- + bucket_name : str + Name of the bucket. + key : str + key of the S3 object. + + Returns + ------- + obj : app.tests.mocks.mock_s3_resource.MockS3ObjectSummary + Newly created MockS3ObjectSummary. + """ + obj = MockS3ObjectSummary(bucket_name=bucket_name, key=key) + self._buckets[bucket_name].add_object(obj) + return obj + + def __repr__(self) -> str: + return f"MockS3ServiceResource(buckets={[bucket_name for bucket_name in self._buckets.keys()]})" diff --git a/app/tests/unit/__init__.py b/app/tests/unit/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/app/tests/unit/test_bucket_name.py b/app/tests/unit/test_bucket_name.py new file mode 100644 index 0000000000000000000000000000000000000000..079a57c577a61d48f04b1796bd620d7513bce191 --- /dev/null +++ b/app/tests/unit/test_bucket_name.py @@ -0,0 +1,46 @@ +import pytest +from pydantic import ValidationError + +from app.schemas.bucket import BucketIn +from app.tests.utils.utils import random_ipv4_string, random_lower_string + + +class TestBucketName: + def test_valid_name(self) -> None: + """ + Test bucket schema with a valid name. + """ + bucket = BucketIn(name=random_lower_string(), description=random_lower_string(130)) + assert bucket + + def test_ip_name(self) -> None: + """ + Test bucket schema with an invalid name.\n + 100 random IPv4 addresses will be evaluated. + """ + desc = random_lower_string(130) + for _ in range(100): + with pytest.raises(ValidationError): + BucketIn(name=random_ipv4_string(), description=desc) + + def test_too_short_name(self) -> None: + """ + Test bucket schema with a too short name. + """ + with pytest.raises(ValidationError): + BucketIn(name=random_lower_string(2), description=random_lower_string(130)) + + def test_too_long_name(self) -> None: + """ + Test bucket schema with a too long name. + """ + with pytest.raises(ValidationError): + BucketIn(name=random_lower_string(64), description=random_lower_string(130)) + + def test_name_with_tags(self) -> None: + """ + Test bucket schema with a name with tags. + """ + name = f"{random_lower_string(10)}.{random_lower_string(10)}.{random_lower_string(10)}.{random_lower_string(9)}" + bucket = BucketIn(name=name, description=random_lower_string(130)) + assert bucket diff --git a/app/tests/unit/test_bucket_permission_scheme.py b/app/tests/unit/test_bucket_permission_scheme.py new file mode 100644 index 0000000000000000000000000000000000000000..b0d237752806e07b0cd3afb9754acd0c06bc8c77 --- /dev/null +++ b/app/tests/unit/test_bucket_permission_scheme.py @@ -0,0 +1,182 @@ +from datetime import datetime + +import pytest + +from app.models.bucket_permission import PermissionEnum +from app.schemas.bucket_permission import BucketPermission +from app.tests.utils.utils import random_lower_string + + +class _TestPermissionPolicy: + @pytest.fixture(scope="function") + def random_base_permission(self) -> BucketPermission: + """ + Generate a base READ bucket permission schema. + """ + return BucketPermission( + uid=random_lower_string(), bucket_name=random_lower_string(), permission=PermissionEnum.READ + ) + + +class TestPermissionPolicyPermissionType(_TestPermissionPolicy): + def test_READ_permission(self, random_base_permission: BucketPermission) -> None: + """ + Test for converting a READ Permission into a bucket policy statement. + + Parameters + ---------- + random_base_permission : app.schemas.bucket_permission.BucketPermission + Random base bucket permission for testing. pytest fixture. + """ + uid = random_lower_string() + stmts = random_base_permission.map_to_bucket_policy_statement(user_id=uid) + assert len(stmts) == 2 + + object_stmt = stmts[0] + assert object_stmt["Effect"] == "Allow" + assert object_stmt["Sid"] == random_base_permission.to_hash(uid) + assert object_stmt["Principal"]["AWS"] == f"arn:aws:iam:::user/{random_base_permission.uid}" + assert object_stmt["Resource"] == f"arn:aws:s3:::{random_base_permission.bucket_name}/*" + with pytest.raises(KeyError): + assert object_stmt["Condition"] + assert len(object_stmt["Action"]) == 1 + assert object_stmt["Action"][0] == "s3:GetObject" + + bucket_stmt = stmts[1] + assert bucket_stmt["Sid"] == random_base_permission.to_hash(uid) + assert bucket_stmt["Effect"] == "Allow" + assert bucket_stmt["Principal"]["AWS"] == f"arn:aws:iam:::user/{random_base_permission.uid}" + assert bucket_stmt["Resource"] == f"arn:aws:s3:::{random_base_permission.bucket_name}" + with pytest.raises(KeyError): + assert bucket_stmt["Condition"] + assert len(bucket_stmt["Action"]) == 1 + assert bucket_stmt["Action"][0] == "s3:ListBucket" + + def test_WRITE_permission(self, random_base_permission: BucketPermission) -> None: + """ + Test for converting a WRITE Permission into a bucket policy statement. + + Parameters + ---------- + random_base_permission : app.schemas.bucket_permission.BucketPermission + Random base bucket permission for testing. pytest fixture. + """ + random_base_permission.permission = PermissionEnum.WRITE + stmts = random_base_permission.map_to_bucket_policy_statement(user_id=random_lower_string()) + assert len(stmts) == 1 + + object_stmt = stmts[0] + with pytest.raises(KeyError): + assert object_stmt["Condition"] + assert len(object_stmt["Action"]) == 2 + assert "s3:PutObject" in object_stmt["Action"] + assert "s3:DeleteObject" in object_stmt["Action"] + + def test_READWRITE_permission(self, random_base_permission: BucketPermission) -> None: + """ + Test for converting a READWRITE Permission into a bucket policy statement. + + Parameters + ---------- + random_base_permission : app.schemas.bucket_permission.BucketPermission + Random base bucket permission for testing. pytest fixture. + """ + random_base_permission.permission = PermissionEnum.READWRITE + stmts = random_base_permission.map_to_bucket_policy_statement(user_id=random_lower_string()) + assert len(stmts) == 2 + + object_stmt = stmts[0] + with pytest.raises(KeyError): + assert object_stmt["Condition"] + assert len(object_stmt["Action"]) == 3 + assert "s3:PutObject" in object_stmt["Action"] + assert "s3:DeleteObject" in object_stmt["Action"] + assert "s3:GetObject" in object_stmt["Action"] + + bucket_stmt = stmts[1] + with pytest.raises(KeyError): + assert bucket_stmt["Condition"] + assert len(bucket_stmt["Action"]) == 1 + assert bucket_stmt["Action"][0] == "s3:ListBucket" + + +class TestPermissionPolicyCondition(_TestPermissionPolicy): + def test_to_timestamp_condition(self, random_base_permission: BucketPermission) -> None: + """ + Test for converting a READ Permission with end time condition into a bucket policy statement. + + Parameters + ---------- + random_base_permission : app.schemas.bucket_permission.BucketPermission + Random base bucket permission for testing. pytest fixture. + """ + time = datetime.now() + random_base_permission.to_timestamp = time + + stmts = random_base_permission.map_to_bucket_policy_statement(user_id=random_lower_string()) + assert len(stmts) == 2 + + object_stmt = stmts[0] + assert object_stmt["Condition"] + assert object_stmt["Condition"]["DateLessThan"]["aws:CurrentTime"] == time.strftime("%Y-%m-%dT%H:%M:%SZ") + with pytest.raises(KeyError): + assert object_stmt["Condition"]["DateGreaterThan"] + + bucket_stmt = stmts[1] + assert bucket_stmt["Condition"] + assert bucket_stmt["Condition"]["DateLessThan"]["aws:CurrentTime"] == time.strftime("%Y-%m-%dT%H:%M:%SZ") + with pytest.raises(KeyError): + assert bucket_stmt["Condition"]["DateGreaterThan"] + + def test_from_timestamp_condition(self, random_base_permission: BucketPermission) -> None: + """ + Test for converting a READ Permission with start time condition into a bucket policy statement. + + Parameters + ---------- + random_base_permission : app.schemas.bucket_permission.BucketPermission + Random base bucket permission for testing. pytest fixture. + """ + time = datetime.now() + random_base_permission.from_timestamp = time + + stmts = random_base_permission.map_to_bucket_policy_statement(user_id=random_lower_string()) + assert len(stmts) == 2 + + object_stmt = stmts[0] + assert object_stmt["Condition"] + assert object_stmt["Condition"]["DateGreaterThan"]["aws:CurrentTime"] == time.strftime("%Y-%m-%dT%H:%M:%SZ") + with pytest.raises(KeyError): + assert object_stmt["Condition"]["DateLessThan"] + + bucket_stmt = stmts[1] + assert bucket_stmt["Condition"] + assert bucket_stmt["Condition"]["DateGreaterThan"]["aws:CurrentTime"] == time.strftime("%Y-%m-%dT%H:%M:%SZ") + with pytest.raises(KeyError): + assert bucket_stmt["Condition"]["DateLessThan"] + + def test_file_prefix_condition(self, random_base_permission: BucketPermission) -> None: + """ + Test for converting a READ Permission with file prefix condition into a bucket policy statement. + + Parameters + ---------- + random_base_permission : app.schemas.bucket_permission.BucketPermission + Random base bucket permission for testing. pytest fixture. + """ + random_base_permission.file_prefix = random_lower_string(length=8) + "/" + random_lower_string(length=8) + "/" + + stmts = random_base_permission.map_to_bucket_policy_statement(user_id=random_lower_string()) + assert len(stmts) == 2 + + object_stmt = stmts[0] + assert ( + object_stmt["Resource"] + == f"arn:aws:s3:::{random_base_permission.bucket_name}/{random_base_permission.file_prefix}*" + ) + with pytest.raises(KeyError): + assert object_stmt["Condition"] + + bucket_stmt = stmts[1] + assert bucket_stmt["Condition"] + assert bucket_stmt["Condition"]["StringLike"]["s3:prefix"] == random_base_permission.file_prefix + "*" diff --git a/app/tests/utils/__init__.py b/app/tests/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/app/tests/utils/bucket.py b/app/tests/utils/bucket.py new file mode 100644 index 0000000000000000000000000000000000000000..f4982fe5f0f6d1cfb4679cb22a7f5d5d26f0a5cf --- /dev/null +++ b/app/tests/utils/bucket.py @@ -0,0 +1,75 @@ +from datetime import datetime + +import pytest +from sqlalchemy.ext.asyncio import AsyncSession + +from app.models.bucket import Bucket +from app.models.bucket_permission import BucketPermission, PermissionEnum +from app.models.user import User + +from .utils import random_lower_string + + +@pytest.mark.asyncio +async def create_random_bucket(db: AsyncSession, user: User) -> Bucket: + """ + Creates a random bucket in the database. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. + user : app.models.user.User + Owner of the bucket. + + Returns + ------- + bucket : app.models.bucket.Bucket + Newly created bucket. + """ + bucket = Bucket( + name=random_lower_string(), + description=random_lower_string(length=127), + owner_id=user.uid, + ) + db.add(bucket) + await db.commit() + return bucket + + +@pytest.mark.asyncio +async def add_permission_for_bucket( + db: AsyncSession, + bucket_name: str, + uid: str, + from_: datetime | None = None, + to: datetime | None = None, + permission: PermissionEnum = PermissionEnum.READ, +) -> None: + """ + Creates Permission to a bucket for a user in the database. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. + bucket_name : str + name of the bucket. + uid : str + UID of the user who we grant the permission. + from_ : datetime.datetime | None, default None + Time when from when the permission should be active. + to : datetime.datetime | None, default None + Time till when the permissions should be active. + permission : app.models.bucket_permission.PermissionEnum, default PermissionEnum.READ + The permission the user is granted. + """ + perm = BucketPermission( + user_id=uid, + bucket_name=bucket_name, + from_=from_, + to=to, + permissions=permission.name, + ) + db.add(perm) + await db.commit() diff --git a/app/tests/utils/user.py b/app/tests/utils/user.py new file mode 100644 index 0000000000000000000000000000000000000000..ad70cf1298f8c6fae6fc95950f28492b7ff963e5 --- /dev/null +++ b/app/tests/utils/user.py @@ -0,0 +1,51 @@ +import pytest +from sqlalchemy.ext.asyncio import AsyncSession + +from app.core.security import create_access_token +from app.models.user import User + +from .utils import random_lower_string + + +@pytest.mark.asyncio +def get_authorization_headers(uid: str) -> dict[str, str]: + """ + Login a user and return the correct headers for subsequent requests. + + Parameters + ---------- + uid : str + UID of the user who should be logged in. + + Returns + ------- + headers : dict[str,str] + HTTP Headers to authorize each request. + """ + a_token = create_access_token(uid) + headers = {"Authorization": f"Bearer {a_token}"} + return headers + + +@pytest.mark.asyncio +async def create_random_user(db: AsyncSession) -> User: + """ + Creates a random user in the database. + + Parameters + ---------- + db : sqlalchemy.ext.asyncio.AsyncSession. + Async database session to perform query on. + + Returns + ------- + user : app.models.user.User + Newly created user. + """ + user = User( + uid=random_lower_string(), + display_name=random_lower_string(), + ) + db.add(user) + await db.commit() + return user diff --git a/app/tests/utils/utils.py b/app/tests/utils/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..3699b14788955fcee9b41c76eac1d811e3491e47 --- /dev/null +++ b/app/tests/utils/utils.py @@ -0,0 +1,53 @@ +import random +import string +from datetime import datetime +from typing import Any + + +def random_lower_string(length: int = 32) -> str: + """ + Creates a random string with arbitrary length. + + Parameters + ---------- + length : int, default 32 + Length for the random string. + + Returns + ------- + string : str + Random string. + """ + return "".join(random.choices(string.ascii_lowercase, k=length)) + + +def random_ipv4_string() -> str: + """ + Creates a random IPv4 address. + + Returns + ------- + string : str + Random IPv4 address. + """ + return ".".join(str(random.randint(0, 255)) for _ in range(4)) + + +def json_datetime_converter(obj: Any) -> str | None: + """ + helper function for the json converter to covert the object into a string format if it is a datetime object.\n + Parse a datetime object into the format YYYY-MM-DDTHH:MM:SS, e.g. 2022-01-01T00:00:00 + + Parameters + ---------- + obj : Any + Object to try convert as a datetime object. + + Returns + ------- + time : str | None + The str representation of a datetime object, None otherwise + """ + if isinstance(obj, datetime): + return obj.strftime("%Y-%m-%dT%H:%M:%S") + return None diff --git a/ceph/README.md b/ceph/README.md index 241a8a8e5b5a8e765002b5f1ca39a07247ac84af..3c417a3f7416598df2ac46c1695f1e9d3e7414e0 100644 --- a/ceph/README.md +++ b/ceph/README.md @@ -1,4 +1,4 @@ -Following mostly the [cephadm install guide](https://docs.ceph.com/en/pacific/cephadm/install/) to +Following mostly the [cephadm install guide](https://docs.ceph.com/en/pacific/cephadm/install/) to setup a ceph cluster within Openstack. # Requirements @@ -47,7 +47,7 @@ sudo cephadm shell -- ceph orch host add ceph-6 192.168.192.111 - Add ephemeral disks as OSD `sudo cephadm shell -- ceph orch apply osd --all-available-devices` -# Add RGWs +# Add RGWs - Add two rados gateways `sudo cephadm shell -- ceph orch apply rgw s3` @@ -56,8 +56,8 @@ sudo cephadm shell -- ceph orch host add ceph-6 192.168.192.111 `sudo cephadm shell -- ceph orch host label add ceph-2 rgw` `sudo cephadm shell -- ceph orch apply rgw s3 '--placement=label:rgw count-per-host:2' --port=8000` -# Create a VPN tunnel -With [sshuttle](https://github.com/sshuttle/sshuttle) you can easily create a VPN connection from your +# Create a VPN tunnel +With [sshuttle](https://github.com/sshuttle/sshuttle) you can easily create a VPN connection from your workstation/notebook to our cloud based ceph cluster. **sshuttle** can be installed using `pip`. -`$ sshuttle -r jkrueger@129.70.51.109:30118 192.168.192.0/24` \ No newline at end of file +`$ sshuttle -r jkrueger@129.70.51.109:30118 192.168.192.0/24` diff --git a/ceph/playbook/files/public_keys/dgoebel b/ceph/playbook/files/public_keys/dgoebel index 7d12516144a291e58a26987a7cc29d043fb85833..45fc14759d7cbe8423dc08290b1dffdd9ac59d0f 100644 --- a/ceph/playbook/files/public_keys/dgoebel +++ b/ceph/playbook/files/public_keys/dgoebel @@ -1 +1 @@ -ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAID7Cu6YaS6GanmMiL8pzFOCb8QXecUxFea51iBy97+OO daniel@daniel-thinkpad \ No newline at end of file +ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAID7Cu6YaS6GanmMiL8pzFOCb8QXecUxFea51iBy97+OO daniel@daniel-thinkpad diff --git a/ceph/playbook/files/public_keys/jkrueger b/ceph/playbook/files/public_keys/jkrueger index ddd59427709ed0ced3332c06fed1e29f6a384261..71eb3172c1ba3cf00d128932e2ab6bafe41a90e8 100644 --- a/ceph/playbook/files/public_keys/jkrueger +++ b/ceph/playbook/files/public_keys/jkrueger @@ -1 +1 @@ -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDTnooRQoKGIbdtZvnG/qsANHeU4qzD/iV/VdLBsbSo0KlJUvjh0kIJGxe2Ums5qh/CV3QA4xjq5A0rDUfU84k4iR8zGRnxIrCjWImfm5/Dd1OvokqorJ02PmRjM1krZhVZaWjERIzSHRJTVd4ivw8pSm080lv4uo9T/0xzWeyvBQ0ZWi6KjClJYA7gl0DGOgypIh54JCAIaWgoYXAcCw4a5wu2W8dpjCJWn4M1Ci6eiAFQooa2xrgFRJI6/BLa3GgI38e7W9IbAiWN224RWWkjVp49J2tBwlVFNsrWoIcbIpcGsjlRnZWtRCquRncq5KKTt2V2BdKJFF36OAJaHGIP \ No newline at end of file +ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDTnooRQoKGIbdtZvnG/qsANHeU4qzD/iV/VdLBsbSo0KlJUvjh0kIJGxe2Ums5qh/CV3QA4xjq5A0rDUfU84k4iR8zGRnxIrCjWImfm5/Dd1OvokqorJ02PmRjM1krZhVZaWjERIzSHRJTVd4ivw8pSm080lv4uo9T/0xzWeyvBQ0ZWi6KjClJYA7gl0DGOgypIh54JCAIaWgoYXAcCw4a5wu2W8dpjCJWn4M1Ci6eiAFQooa2xrgFRJI6/BLa3GgI38e7W9IbAiWN224RWWkjVp49J2tBwlVFNsrWoIcbIpcGsjlRnZWtRCquRncq5KKTt2V2BdKJFF36OAJaHGIP diff --git a/ceph/playbook/site.yml b/ceph/playbook/site.yml index 90a6374fa1bb9dfe37a92055cb32c7580b829bcd..6084591cd965239bad0918c406ed273782ca6226 100644 --- a/ceph/playbook/site.yml +++ b/ceph/playbook/site.yml @@ -48,4 +48,4 @@ mount: path: /mnt src: /dev/vdb - state: absent \ No newline at end of file + state: absent diff --git a/figures/cloud_object_storage.svg b/figures/cloud_object_storage.svg new file mode 100644 index 0000000000000000000000000000000000000000..7ffb9c165a0cf222fe021f88f8d626ce190c71d4 --- /dev/null +++ b/figures/cloud_object_storage.svg @@ -0,0 +1,577 @@ +<?xml version="1.0" encoding="UTF-8"?><svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" fill-opacity="1" color-rendering="auto" color-interpolation="auto" text-rendering="auto" stroke="black" stroke-linecap="square" width="685" stroke-miterlimit="10" shape-rendering="auto" stroke-opacity="1" fill="black" stroke-dasharray="none" font-weight="normal" stroke-width="1" height="658" font-family="'Dialog'" font-style="normal" stroke-linejoin="miter" font-size="12px" stroke-dashoffset="0" image-rendering="auto"> + <!--Generated by ySVG 2.6--> + <defs id="genericDefs"/> + <g> + <g fill="white" text-rendering="geometricPrecision" shape-rendering="geometricPrecision" transform="translate(-351,-156)" stroke="white"> + <rect x="351" width="685" height="658" y="156" clip-path="url(#clipPath2)" stroke="none"/> + </g> + <g text-rendering="geometricPrecision" shape-rendering="geometricPrecision" transform="matrix(0.998,0,0,1.0279,114.2205,546.37)"> + <g clip-path="url(#clipPath3)"> + <svg xml:space="preserve" opacity="1" writing-mode="lr-tb" stop-color="rgb(0, 0, 0)" shape-rendering="auto" glyph-orientation-horizontal="0deg" color-profile="auto" lighting-color="rgb(255, 255, 255)" color="rgb(0, 0, 0)" font-weight="400" alignment-baseline="auto" font-style="normal" version="1.1" color-interpolation-filters="linearrgb" text-anchor="start" stroke-linecap="butt" color-interpolation="srgb" font-variant="normal" word-spacing="normal" fill-opacity="1" text-rendering="auto" clip-path="none" text-decoration="none" letter-spacing="normal" viewBox="0 0 57 65" glyph-orientation-vertical="auto" display="inline" font-size-adjust="none" overflow="hidden" fill="rgb(0, 0, 0)" font-stretch="normal" stroke-dasharray="none" id="svg1.Ebene_1" stroke-miterlimit="4" stop-opacity="1" color-rendering="auto" font-size="12" pointer-events="visiblepainted" mask="none" direction="ltr" baseline-shift="baseline" enable-background="new 0 0 57 65" fill-rule="nonzero" image-rendering="auto" stroke-dashoffset="0" width="57px" marker-end="none" clip="auto" cursor="auto" stroke="none" filter="none" visibility="visible" kerning="auto" stroke-width="1" font-family=""Arial","Helvetica",sans-serif" flood-opacity="1" clip-rule="nonzero" src="none" height="65px" unicode-bidi="normal" stroke-linejoin="miter" stroke-opacity="1" flood-color="rgb(0, 0, 0)" dominant-baseline="auto" marker-start="none" x="0px" marker-mid="none" y="0px"> +<g> + + <radialGradient gradientTransform="matrix(1 0 0 -1 0.2803 -2253.9199)" gradientUnits="userSpaceOnUse" r="11.4448" cx="27.8696" id="svg1.neck_x5F_white_1_" cy="-2299.3257" fx="25.6369" fy="-2300.5422"> + <stop offset="0" style="stop-color:#B38E5D"/> + <stop offset="1" style="stop-color:#805126"/> + </radialGradient> + <path fill="url(#svg1.neck_x5F_white_1_)" id="svg1.neck_x5F_white_2_" d="M19.278,37.799h18.188 v13.23c-1.313,0.371-17.173,0.436-18.188,0.172V37.799z" stroke-miterlimit="10" stroke="#5B453B"/> + + <radialGradient gradientTransform="matrix(1 0 0 -1 0.2803 -2253.9199)" gradientUnits="userSpaceOnUse" r="31.0537" cx="27.3228" id="svg1.SVGID_1_" cy="-2307.6812" fx="21.2648" fy="-2310.9819"> + <stop offset="0" style="stop-color:#B38E5D"/> + <stop offset="1" style="stop-color:#805126"/> + </radialGradient> + <path fill="url(#svg1.SVGID_1_)" d="M49.529,51.225c-4.396-4.396-10.951-5.884-12.063-6.109 V37.8H19.278c0,0,0.038,6.903,0,6.868c0,0-6.874,0.997-12.308,6.432C1.378,56.691,0.5,62.77,0.5,62.77 c0,1.938,1.575,3.492,3.523,3.492h48.51c1.947,0,3.521-1.558,3.521-3.492C56.055,62.768,54.211,55.906,49.529,51.225z" stroke-miterlimit="10" stroke="#5B453B"/> + + <linearGradient gradientTransform="matrix(1 0 0 1 0.2402 -2319.0742)" x1="59.7656" x2="-3.673" gradientUnits="userSpaceOnUse" y1="2374.7451" y2="2374.7451" id="svg1.body_1_"> + <stop offset="0" style="stop-color:#49AD33"/> + <stop offset="1" style="stop-color:#C2DA92"/> + </linearGradient> + <path fill="url(#svg1.body_1_)" d="M0.5,62.768c0,1.938,1.575,3.494,3.523,3.494h48.51 c1.947,0,3.521-1.559,3.521-3.494c0,0-1.844-6.861-6.525-11.543c-4.815-4.813-11.244-6.146-11.244-6.146 c-1.771,1.655-5.61,3.802-10.063,3.802c-4.453,0-8.292-2.146-10.063-3.802c0,0-5.755,0.586-11.189,6.021 C1.378,56.689,0.5,62.768,0.5,62.768z" id="svg1.body_12_" stroke="#008D33"/> + <path fill="#2068A3" d="M28.106,33.486c-8.112,0-12.688,4.313-12.688,10.438c0,7.422,12.688,10.438,12.688,10.438 s14.688-3.016,14.688-10.438C42.793,38.75,36.215,33.486,28.106,33.486z M26.288,53.051c0,0-7.135-2.093-8.805-7.201 c-0.222-0.682,0.147-1.156,0.795-1.521V37.8h20.188v6.663c0.235,0.352,1.109,0.737,1.229,1.387 C40.445,49.917,26.288,53.051,26.288,53.051z" stroke="#2068A3"/> + <path fill="#5491CF" d="M49.529,51.225c-2.239-2.24-5.041-3.724-7.396-4.67 c-2.854,5.51-14.022,7.807-14.022,7.807s-10.472-2.484-12.387-8.514c-2.439,0.771-5.787,2.287-8.749,5.25 c-5.592,5.592-6.47,11.67-6.47,11.67c0,1.938,1.575,3.492,3.523,3.492h48.51c1.947,0,3.521-1.558,3.521-3.492 C56.055,62.768,54.211,55.906,49.529,51.225z" stroke-miterlimit="10" stroke="#2068A3"/> + <path fill="#5491CF" d="M13.404,44.173c1.15-1.81,2.039-3.832,3.332-5.397c-0.514,1.027-1.669,4.084-1.669,5.148 c0,5.186,10.366,9.079,14.688,10.438c-3.472,1.627-9.134-1.498-11.335-2.36c-3.601-1.419-4.071-3.063-5.89-4.854 C12.523,47.135,12.878,45,13.404,44.173z" stroke="#2068A3"/> + <path fill="#5491CF" d="M45.777,43.924c-1.317-1.568-5.11-9.424-6.604-6.617c0.516,1.025,3.617,3.693,3.617,6.617 c0,5.186-10.27,8.576-16.698,9.145c1.429,4.938,11.372,1.293,13.804-0.313c3.563-2.354,4.563-5.133,7.854-3.705 C47.754,49.045,48.006,46.574,45.777,43.924z" stroke="#2068A3"/> + <path fill="none" d="M30.777,54.167c0.357,0.836-0.153,1.983-0.352,2.813 c-0.256,1.084-0.072,2.104,0.102,3.186c0.164,1.02,0.156,2.107,0.25,3.167c0.082,0.916,0.482,1.849,0.357,2.75" stroke="#2068A3" stroke-linecap="round"/> + <path fill="none" d="M23.695,53.417c-0.508,0.584-0.476,2.209-0.398,3 c0.116,1.183,0.456,2.099,0.333,3.333c-0.192,1.943,0.154,4.479-0.436,6.333" stroke="#2068A3" stroke-linecap="round"/> + + <radialGradient gradientTransform="matrix(1 0 0 -1 0.2803 -2253.9199)" gradientUnits="userSpaceOnUse" r="23.425" cx="27.623" id="svg1.face_x5F_white_1_" cy="-2279.646" fx="23.0534" fy="-2282.1357"> + <stop offset="0" style="stop-color:#B38E5D"/> + <stop offset="1" style="stop-color:#805126"/> + </radialGradient> + <path fill="url(#svg1.face_x5F_white_1_)" id="svg1.face_x5F_white_2_" d="M43.676,23.357 c0.086,10.2-6.738,18.52-15.25,18.586c-8.5,0.068-15.464-8.146-15.55-18.344C12.794,13.4,19.618,5.079,28.123,5.012 C36.627,4.945,43.59,13.158,43.676,23.357z" stroke-miterlimit="10" stroke="#5B453B"/> + + <linearGradient gradientTransform="matrix(0.275 0 0 0.2733 -1558.9874 -3088.7842)" x1="5761.7578" x2="5785.3872" gradientUnits="userSpaceOnUse" y1="11331.9785" y2="11425.4277" id="svg1.face_highlight_1_"> + <stop offset="0" style="stop-color:#FFFFFF;stop-opacity:0.42"/> + <stop offset="1" style="stop-color:#FFFFFF;stop-opacity:0.12"/> + </linearGradient> + <path fill="url(#svg1.face_highlight_1_)" id="svg1.face_highlight_2_" d="M27.958,6.333c-6.035,0.047-10.747,4.493-12.787,10.386 c-0.664,1.919-0.294,4.043,0.98,5.629c2.73,3.398,5.729,6.283,9.461,8.088c3.137,1.518,7.535,2.385,11.893,1.247 c2.274-0.592,3.988-2.459,4.375-4.766c0.187-1.094,0.293-2.289,0.283-3.553C42.083,13.952,36.271,6.268,27.958,6.333z"/> + <path fill="#4B4B4B" d="M20.278,13.25 c3.417,4.333,9.333,6.917,9.333,6.917l-1.417-3.5c0,0,7.094,4.691,8.083,4.333c0.968-0.2-1.082-3.807-1.082-3.807 s3.138,1.795,4.854,3.969c1.803,2.28,4.285,3.504,4.285,3.504S47.027,2.719,27.289,2.744C8.278,2.709,12.058,27.678,12.058,27.678 L14.695,17c0,0,0.914,5.757,1.399,4.875C17.861,15.211,18.861,11.5,20.278,13.25z" id="svg1.Hair_Young_Black_1_" stroke="#000000" stroke-linecap="round" stroke-linejoin="round"/> +</g> +</svg> + </g> + </g> + <g text-rendering="geometricPrecision" stroke-miterlimit="1.45" shape-rendering="geometricPrecision" font-family="sans-serif" transform="matrix(1,0,0,1,-351,-156)" stroke-linecap="butt"> + <text x="473.8125" xml:space="preserve" y="794.0723" clip-path="url(#clipPath2)" stroke="none">User A</text> + </g> + <g text-rendering="geometricPrecision" shape-rendering="geometricPrecision" transform="matrix(0.9989,0,0,1.0275,225.2377,546.5038)"> + <g clip-path="url(#clipPath4)"> + <svg xml:space="preserve" opacity="1" writing-mode="lr-tb" stop-color="rgb(0, 0, 0)" shape-rendering="auto" glyph-orientation-horizontal="0deg" color-profile="auto" lighting-color="rgb(255, 255, 255)" color="rgb(0, 0, 0)" font-weight="400" alignment-baseline="auto" font-style="normal" version="1.1" color-interpolation-filters="linearrgb" text-anchor="start" stroke-linecap="butt" color-interpolation="srgb" font-variant="normal" word-spacing="normal" fill-opacity="1" text-rendering="auto" clip-path="none" text-decoration="none" letter-spacing="normal" viewBox="0 0 57 65" glyph-orientation-vertical="auto" display="inline" font-size-adjust="none" overflow="hidden" fill="rgb(0, 0, 0)" font-stretch="normal" stroke-dasharray="none" id="svg2.Ebene_1" stroke-miterlimit="4" stop-opacity="1" color-rendering="auto" font-size="12" pointer-events="visiblepainted" mask="none" direction="ltr" baseline-shift="baseline" enable-background="new 0 0 57 65" fill-rule="nonzero" image-rendering="auto" stroke-dashoffset="0" width="57px" marker-end="none" clip="auto" cursor="auto" stroke="none" filter="none" visibility="visible" kerning="auto" stroke-width="1" font-family=""Arial","Helvetica",sans-serif" flood-opacity="1" clip-rule="nonzero" src="none" height="65px" unicode-bidi="normal" stroke-linejoin="miter" stroke-opacity="1" flood-color="rgb(0, 0, 0)" dominant-baseline="auto" marker-start="none" x="0px" marker-mid="none" y="0px"> +<g> + <polygon fill="#FAE1AA" points="44.365,49.031 13.101,53.422 12.191,30.465 43.408,31.092 " stroke="#F9C969" stroke-linecap="round" stroke-linejoin="round"/> + + <linearGradient gradientTransform="matrix(1 0 0 1 0.3203 -3091.7656)" x1="26.3398" x2="27.5807" gradientUnits="userSpaceOnUse" y1="3115.7266" y2="3145.5239" id="svg2.SVGID_1_"> + <stop offset="0.2711" style="stop-color:#FFAB4F"/> + <stop offset="1" style="stop-color:#FFD28F"/> + </linearGradient> + <path fill="url(#svg2.SVGID_1_)" d="M49.529,51.225c-4.396-4.396-10.951-5.884-12.063-6.109 V37.8H19.278c0,0,0.038,6.903,0,6.868c0,0-6.874,0.997-12.308,6.432C1.378,56.691,0.5,62.77,0.5,62.77 c0,1.938,1.575,3.492,3.523,3.492h48.51c1.947,0,3.521-1.558,3.521-3.492C56.055,62.768,54.211,55.906,49.529,51.225z" stroke-miterlimit="10" stroke="#ED9135"/> + + <radialGradient gradientTransform="matrix(1 0 0 1 0.3203 -3091.7656)" gradientUnits="userSpaceOnUse" r="23.425" cx="27.5835" id="svg2.face_x5F_white_1_" cy="3117.4922" fx="23.0139" fy="3115.0024"> + <stop offset="0" style="stop-color:#FFD28F"/> + <stop offset="1" style="stop-color:#FFAB4F"/> + </radialGradient> + <path fill="url(#svg2.face_x5F_white_1_)" id="svg2.face_x5F_white_3_" d="M43.676,23.357 c0.086,10.2-6.738,18.52-15.25,18.586c-8.5,0.068-15.464-8.146-15.55-18.344C12.794,13.4,19.618,5.079,28.123,5.012 C36.627,4.945,43.59,13.158,43.676,23.357z" stroke-miterlimit="10" stroke="#ED9135"/> + + <linearGradient gradientTransform="matrix(0.275 0 0 -0.2733 -1752.8849 -3351.7349)" x1="6468.501" x2="6492.1304" gradientUnits="userSpaceOnUse" y1="-12291.5195" y2="-12384.9688" id="svg2.face_highlight_1_"> + <stop offset="0" style="stop-color:#FFFFFF;stop-opacity:0.24"/> + <stop offset="1" style="stop-color:#FFFFFF;stop-opacity:0.16"/> + </linearGradient> + <path fill="url(#svg2.face_highlight_1_)" id="svg2.face_highlight_3_" d="M28.415,5.625c-6.035,0.047-10.747,4.493-12.787,10.386 c-0.664,1.919-0.294,4.043,0.98,5.629c2.73,3.398,5.729,6.283,9.461,8.088c3.137,1.518,7.535,2.385,11.893,1.247 c2.274-0.592,3.988-2.459,4.375-4.766c0.187-1.094,0.293-2.289,0.283-3.553C42.54,13.244,36.729,5.56,28.415,5.625z"/> + <path fill="#FAE1AA" d="M17.754,45.004 c-0.413-3.02-0.771-6.182-0.946-9.033c-0.574-9.349,2.886-17.721,2.886-17.721l0.882-2.078l0.94,2.419l1.872-4.956l-0.049,4.553 l1.311-0.749l0.701-1.653L25.715,18l1.055-0.988l2.13-0.729L29.84,17l0.479-2.375l0.834,2.554l1.125-1.024l1.68-0.214l1.07,1.372 l-0.243-4.246l1.883,3.967L37.611,17c0,0,5.595,10.622,3.313,29.1c2.154,0.76,4.916,2.016,7.316,3.976 c0.001-0.001,0.011-0.013,0.011-0.013L44.111,19.38C43.079,12.873,38.752,2.599,28.73,2.648 C5.153,2.568,13.025,42.055,8.945,49.334c-0.007,0.014-0.012,0.025-0.02,0.039C12.292,46.729,15.809,45.516,17.754,45.004z" stroke="#E2B354" stroke-linecap="round" stroke-linejoin="round"/> + + <radialGradient gradientTransform="matrix(1 0 0 -1 0.04 64.1543)" id="svg2.collar_x5F_body_1_" r="32.4004" cx="15.2412" gradientUnits="userSpaceOnUse" cy="9.9829"> + <stop offset="0" style="stop-color:#FCB57A"/> + <stop offset="1" style="stop-color:#FF8C36"/> + </radialGradient> + <path fill="url(#svg2.collar_x5F_body_1_)" d="M0.5,62.768c0,1.938,1.575,3.494,3.523,3.494 h48.51c1.947,0,3.521-1.559,3.521-3.494c0,0-1.844-6.861-6.525-11.543c-4.815-4.813-11.244-6.146-11.244-6.146 c-1.771,1.655-5.61,2.802-10.063,2.802c-4.453,0-8.292-1.146-10.063-2.802c0,0-5.755,0.586-11.189,6.021 C1.378,56.689,0.5,62.768,0.5,62.768z" id="svg2.collar_x5F_body_2_" stroke="#E55E03"/> + + <radialGradient gradientTransform="matrix(1 0 0 -1 0.04 64.1543)" id="svg2.collar_x5F_r_1_" r="9.2823" cx="31.5801" gradientUnits="userSpaceOnUse" cy="15.957"> + <stop offset="0" style="stop-color:#FCB57A"/> + <stop offset="1" style="stop-color:#FF8C36"/> + </radialGradient> + <path fill="url(#svg2.collar_x5F_r_1_)" d="M38.159,41.381c0,0-0.574,2.369-3.013,4.441 c-2.108,1.795-5.783,2.072-5.783,2.072l3.974,6.217c0,0,2.957-1.637,5.009-3.848c1.922-2.072,1.37-5.479,1.37-5.479L38.159,41.381z " id="svg2.collar_x5F_r_2_" stroke="#E55E03"/> + + <radialGradient gradientTransform="matrix(1 0 0 -1 0.04 64.1543)" id="svg2.collar_x5F_l_1_" r="9.2843" cx="19.2178" gradientUnits="userSpaceOnUse" cy="15.916"> + <stop offset="0" style="stop-color:#FCB57A"/> + <stop offset="1" style="stop-color:#FF8C36"/> + </radialGradient> + <path fill="url(#svg2.collar_x5F_l_1_)" d="M18.63,41.422c0,0,0.576,2.369,3.012,4.441 c2.109,1.793,5.785,2.072,5.785,2.072l-3.974,6.217c0,0-2.957-1.637-5.007-3.85c-1.922-2.072-1.37-5.48-1.37-5.48L18.63,41.422z" id="svg2.collar_x5F_l_2_" stroke="#E55E03"/> + <circle fill="#E55E03" r="0.584" id="svg2.Knob2_2_" cx="28.258" cy="56.254" stroke="#E55E03"/> + <circle fill="#E55E03" r="0.584" id="svg2.Knob1_2_" cx="28.297" cy="62.499" stroke="#E55E03"/> +</g> +</svg> + </g> + </g> + <g text-rendering="geometricPrecision" stroke-miterlimit="1.45" shape-rendering="geometricPrecision" font-family="sans-serif" transform="matrix(1,0,0,1,-351,-156)" stroke-linecap="butt"> + <text x="587.3934" xml:space="preserve" y="793.124" clip-path="url(#clipPath2)" stroke="none">User B</text> + </g> + <g fill="rgb(255,204,0)" text-rendering="geometricPrecision" shape-rendering="geometricPrecision" transform="matrix(1,0,0,1,-351,-156)" stroke="rgb(255,204,0)"> + <ellipse rx="137" ry="32.3196" clip-path="url(#clipPath2)" cx="528" cy="550.3196" stroke="none"/> + </g> + <g text-rendering="geometricPrecision" stroke-miterlimit="1.45" shape-rendering="geometricPrecision" transform="matrix(1,0,0,1,-351,-156)" stroke-linecap="butt"> + <ellipse rx="137" fill="none" ry="32.3196" clip-path="url(#clipPath2)" cx="528" cy="550.3196"/> + <text x="495.6504" xml:space="preserve" y="547.4896" clip-path="url(#clipPath2)" font-family="sans-serif" stroke="none">CephProxy</text> + <text x="487.1279" xml:space="preserve" y="561.4583" clip-path="url(#clipPath2)" font-family="sans-serif" stroke="none">UI + Backend</text> + </g> + <g fill="rgb(255,204,0)" text-rendering="geometricPrecision" shape-rendering="geometricPrecision" transform="matrix(1,0,0,1,-351,-156)" stroke="rgb(255,204,0)"> + <rect x="366" width="56.5541" height="52" y="389.3196" clip-path="url(#clipPath2)" stroke="none"/> + </g> + <g text-rendering="geometricPrecision" stroke-miterlimit="1.45" shape-rendering="geometricPrecision" transform="matrix(1,0,0,1,-351,-156)" stroke-linecap="butt"> + <rect fill="none" x="366" width="56.5541" height="52" y="389.3196" clip-path="url(#clipPath2)"/> + <text x="384.2985" xml:space="preserve" y="419.4739" clip-path="url(#clipPath2)" font-family="sans-serif" stroke="none">AAI</text> + <path fill="none" d="M478.1254 734.2412 L457.6377 730.6742 L443.6825 727.4989 L432.4453 723.9235 L423.9767 719.6747 L418.3271 714.4792 L415.5472 708.0637 L415.6875 700.1547 L418.7985 690.479 L424.9307 678.7632 L446.4609 648.1179 L480.6826 606.0323 L495.9049 588.1093" clip-path="url(#clipPath2)"/> + <path d="M501.0836 582.0117 L489.5045 587.9213 L495.2575 588.8715 L497.1265 594.3948 Z" clip-path="url(#clipPath2)" stroke="none"/> + <path fill="none" d="M544.2219 589.8914 L589.9002 701.319" clip-path="url(#clipPath2)"/> + <path d="M541.1875 582.4892 L541.1127 595.489 L544.6012 590.8167 L550.3655 591.696 Z" clip-path="url(#clipPath2)" stroke="none"/> + <path d="M592.9346 708.7211 L593.0093 695.7214 L589.5209 700.3937 L583.7567 699.5144 Z" clip-path="url(#clipPath2)" stroke="none"/> + <path fill="none" d="M420.0311 441.3196 L491.196 513.1642" clip-path="url(#clipPath2)"/> + <path d="M496.8259 518.8478 L491.9333 506.8036 L490.4923 512.4537 L484.8288 513.8411 Z" clip-path="url(#clipPath2)" stroke="none"/> + </g> + <g fill="rgb(245,245,245)" text-rendering="geometricPrecision" shape-rendering="geometricPrecision" transform="matrix(1,0,0,1,-351,-156)" stroke="rgb(245,245,245)"> + <rect x="536.4736" y="171.5391" clip-path="url(#clipPath2)" width="483.6896" rx="4" ry="4" height="224.3984" stroke="none"/> + <rect x="536.4736" y="171.5391" clip-path="url(#clipPath2)" fill="rgb(235,235,235)" width="483.6896" height="21.4609" stroke="none"/> + </g> + <g font-size="15px" stroke-linecap="butt" transform="matrix(1,0,0,1,-351,-156)" text-rendering="geometricPrecision" font-family="sans-serif" shape-rendering="geometricPrecision" stroke-miterlimit="1.45"> + <text x="774.9919" xml:space="preserve" y="187.4624" clip-path="url(#clipPath2)" stroke="none">Object Storage (S3) - Ceph RGW</text> + <rect x="536.4736" y="171.5391" clip-path="url(#clipPath2)" fill="none" width="483.6896" stroke-dasharray="6,2" rx="4" ry="4" height="224.3984"/> + </g> + <g text-rendering="geometricPrecision" shape-rendering="geometricPrecision" transform="matrix(1,0,0,1,228.8885,58.669)"> + <g clip-path="url(#clipPath5)"> + <svg xml:space="preserve" opacity="1" writing-mode="lr-tb" stop-color="rgb(0, 0, 0)" shape-rendering="auto" glyph-orientation-horizontal="0deg" color-profile="auto" lighting-color="rgb(255, 255, 255)" color="rgb(0, 0, 0)" font-weight="400" alignment-baseline="auto" font-style="normal" version="1.1" color-interpolation-filters="linearrgb" text-anchor="start" stroke-linecap="butt" color-interpolation="srgb" font-variant="normal" word-spacing="normal" fill-opacity="1" text-rendering="auto" clip-path="none" text-decoration="none" letter-spacing="normal" viewBox="-0.875 -0.887 41 48" glyph-orientation-vertical="auto" display="inline" font-size-adjust="none" overflow="hidden" fill="rgb(0, 0, 0)" font-stretch="normal" stroke-dasharray="none" stroke-miterlimit="4" stop-opacity="1" color-rendering="auto" font-size="12" pointer-events="visiblepainted" mask="none" direction="ltr" baseline-shift="baseline" enable-background="new -0.875 -0.887 41 48" fill-rule="nonzero" image-rendering="auto" stroke-dashoffset="0" width="41px" marker-end="none" clip="auto" cursor="auto" stroke="none" filter="none" visibility="visible" kerning="auto" stroke-width="1" font-family=""Arial","Helvetica",sans-serif" flood-opacity="1" clip-rule="nonzero" src="none" height="48px" unicode-bidi="normal" stroke-linejoin="miter" stroke-opacity="1" flood-color="rgb(0, 0, 0)" dominant-baseline="auto" marker-start="none" x="0px" marker-mid="none" y="0px"> +<defs> +<clipPath clipPathUnits="userSpaceOnUse" id="clipPath1"><path d="M0 0 L685 0 L685 658 L0 658 L0 0 Z"/></clipPath><clipPath clipPathUnits="userSpaceOnUse" id="clipPath2"><path d="M351 156 L1036 156 L1036 814 L351 814 L351 156 Z"/></clipPath><clipPath clipPathUnits="userSpaceOnUse" id="clipPath3"><path d="M-114.4495 -531.5206 L571.924 -531.5206 L571.924 108.5961 L-114.4495 108.5961 L-114.4495 -531.5206 Z"/></clipPath><clipPath clipPathUnits="userSpaceOnUse" id="clipPath4"><path d="M-225.475 -531.8928 L460.2467 -531.8928 L460.2467 108.5154 L-225.475 108.5154 L-225.475 -531.8928 Z"/></clipPath><clipPath clipPathUnits="userSpaceOnUse" id="clipPath5"><path d="M-228.8885 -58.669 L456.1115 -58.669 L456.1115 599.331 L-228.8885 599.331 L-228.8885 -58.669 Z"/></clipPath><clipPath clipPathUnits="userSpaceOnUse" id="clipPath6"><path d="M-354.125 -58.669 L330.875 -58.669 L330.875 599.331 L-354.125 599.331 L-354.125 -58.669 Z"/></clipPath><clipPath clipPathUnits="userSpaceOnUse" id="clipPath7"><path d="M-399.1352 -551.7792 L286.5266 -551.7792 L286.5266 106.2208 L-399.1352 106.2208 L-399.1352 -551.7792 Z"/></clipPath><clipPath clipPathUnits="userSpaceOnUse" id="clipPath8"><path d="M-108.4999 -405.0899 L496.3192 -83.5019 L187.4069 497.4776 L-417.4122 175.8896 L-108.4999 -405.0899 Z"/></clipPath></defs> +<linearGradient gradientTransform="matrix(1 0 0 -1 -642.8008 -939.4756)" x1="642.8008" x2="682.0508" gradientUnits="userSpaceOnUse" y1="-979.1445" y2="-979.1445" id="svg3.SVGID_1_"> + <stop offset="0" style="stop-color:#3C89C9"/> + <stop offset="0.1482" style="stop-color:#60A6DD"/> + <stop offset="0.3113" style="stop-color:#81C1F0"/> + <stop offset="0.4476" style="stop-color:#95D1FB"/> + <stop offset="0.5394" style="stop-color:#9CD7FF"/> + <stop offset="0.636" style="stop-color:#98D4FD"/> + <stop offset="0.7293" style="stop-color:#8DCAF6"/> + <stop offset="0.8214" style="stop-color:#79BBEB"/> + <stop offset="0.912" style="stop-color:#5EA5DC"/> + <stop offset="1" style="stop-color:#3C89C9"/> +</linearGradient> +<path fill="url(#svg3.SVGID_1_)" d="M19.625,36.763C8.787,36.763,0,34.888,0,32.575v10c0,2.313,8.787,4.188,19.625,4.188 c10.839,0,19.625-1.875,19.625-4.188v-10C39.25,34.888,30.464,36.763,19.625,36.763z"/> +<linearGradient gradientTransform="matrix(1 0 0 -1 -642.8008 -939.4756)" x1="642.8008" x2="682.0508" gradientUnits="userSpaceOnUse" y1="-973.1445" y2="-973.1445" id="svg3.SVGID_2_"> + <stop offset="0" style="stop-color:#9CD7FF"/> + <stop offset="0.0039" style="stop-color:#9DD7FF"/> + <stop offset="0.2273" style="stop-color:#BDE5FF"/> + <stop offset="0.4138" style="stop-color:#D1EEFF"/> + <stop offset="0.5394" style="stop-color:#D9F1FF"/> + <stop offset="0.6155" style="stop-color:#D5EFFE"/> + <stop offset="0.6891" style="stop-color:#C9E7FA"/> + <stop offset="0.7617" style="stop-color:#B6DAF3"/> + <stop offset="0.8337" style="stop-color:#9AC8EA"/> + <stop offset="0.9052" style="stop-color:#77B0DD"/> + <stop offset="0.9754" style="stop-color:#4D94CF"/> + <stop offset="1" style="stop-color:#3C89C9"/> +</linearGradient> +<path fill="url(#svg3.SVGID_2_)" d="M19.625,36.763c10.839,0,19.625-1.875,19.625-4.188l-1.229-2c0,2.168-8.235,3.927-18.396,3.927 c-9.481,0-17.396-1.959-18.396-3.927l-1.229,2C0,34.888,8.787,36.763,19.625,36.763z"/> +<path fill="#3C89C9" d="M19.625,26.468c10.16,0,19.625,2.775,19.625,2.775c-0.375,2.721-5.367,5.438-19.554,5.438 c-12.125,0-18.467-2.484-19.541-4.918C-0.127,29.125,9.465,26.468,19.625,26.468z"/> +<linearGradient gradientTransform="matrix(1 0 0 -1 -642.8008 -939.4756)" x1="642.8008" x2="682.0508" gradientUnits="userSpaceOnUse" y1="-965.6948" y2="-965.6948" id="svg3.SVGID_3_"> + <stop offset="0" style="stop-color:#3C89C9"/> + <stop offset="0.1482" style="stop-color:#60A6DD"/> + <stop offset="0.3113" style="stop-color:#81C1F0"/> + <stop offset="0.4476" style="stop-color:#95D1FB"/> + <stop offset="0.5394" style="stop-color:#9CD7FF"/> + <stop offset="0.636" style="stop-color:#98D4FD"/> + <stop offset="0.7293" style="stop-color:#8DCAF6"/> + <stop offset="0.8214" style="stop-color:#79BBEB"/> + <stop offset="0.912" style="stop-color:#5EA5DC"/> + <stop offset="1" style="stop-color:#3C89C9"/> +</linearGradient> +<path fill="url(#svg3.SVGID_3_)" d="M19.625,23.313C8.787,23.313,0,21.438,0,19.125v10c0,2.313,8.787,4.188,19.625,4.188 c10.839,0,19.625-1.875,19.625-4.188v-10C39.25,21.438,30.464,23.313,19.625,23.313z"/> +<linearGradient gradientTransform="matrix(1 0 0 -1 -642.8008 -939.4756)" x1="642.8008" x2="682.0508" gradientUnits="userSpaceOnUse" y1="-959.6948" y2="-959.6948" id="svg3.SVGID_4_"> + <stop offset="0" style="stop-color:#9CD7FF"/> + <stop offset="0.0039" style="stop-color:#9DD7FF"/> + <stop offset="0.2273" style="stop-color:#BDE5FF"/> + <stop offset="0.4138" style="stop-color:#D1EEFF"/> + <stop offset="0.5394" style="stop-color:#D9F1FF"/> + <stop offset="0.6155" style="stop-color:#D5EFFE"/> + <stop offset="0.6891" style="stop-color:#C9E7FA"/> + <stop offset="0.7617" style="stop-color:#B6DAF3"/> + <stop offset="0.8337" style="stop-color:#9AC8EA"/> + <stop offset="0.9052" style="stop-color:#77B0DD"/> + <stop offset="0.9754" style="stop-color:#4D94CF"/> + <stop offset="1" style="stop-color:#3C89C9"/> +</linearGradient> +<path fill="url(#svg3.SVGID_4_)" d="M19.625,23.313c10.839,0,19.625-1.875,19.625-4.188l-1.229-2c0,2.168-8.235,3.926-18.396,3.926 c-9.481,0-17.396-1.959-18.396-3.926l-1.229,2C0,21.438,8.787,23.313,19.625,23.313z"/> +<path fill="#3C89C9" d="M19.476,13.019c10.161,0,19.625,2.775,19.625,2.775c-0.375,2.721-5.367,5.438-19.555,5.438 c-12.125,0-18.467-2.485-19.541-4.918C-0.277,15.674,9.316,13.019,19.476,13.019z"/> +<linearGradient gradientTransform="matrix(1 0 0 -1 -642.8008 -939.4756)" x1="642.8008" x2="682.0508" gradientUnits="userSpaceOnUse" y1="-952.4946" y2="-952.4946" id="svg3.SVGID_5_"> + <stop offset="0" style="stop-color:#3C89C9"/> + <stop offset="0.1482" style="stop-color:#60A6DD"/> + <stop offset="0.3113" style="stop-color:#81C1F0"/> + <stop offset="0.4476" style="stop-color:#95D1FB"/> + <stop offset="0.5394" style="stop-color:#9CD7FF"/> + <stop offset="0.636" style="stop-color:#98D4FD"/> + <stop offset="0.7293" style="stop-color:#8DCAF6"/> + <stop offset="0.8214" style="stop-color:#79BBEB"/> + <stop offset="0.912" style="stop-color:#5EA5DC"/> + <stop offset="1" style="stop-color:#3C89C9"/> +</linearGradient> +<path fill="url(#svg3.SVGID_5_)" d="M19.625,10.113C8.787,10.113,0,8.238,0,5.925v10c0,2.313,8.787,4.188,19.625,4.188 c10.839,0,19.625-1.875,19.625-4.188v-10C39.25,8.238,30.464,10.113,19.625,10.113z"/> +<linearGradient gradientTransform="matrix(1 0 0 -1 -642.8008 -939.4756)" x1="642.8008" x2="682.0508" gradientUnits="userSpaceOnUse" y1="-946.4946" y2="-946.4946" id="svg3.SVGID_6_"> + <stop offset="0" style="stop-color:#9CD7FF"/> + <stop offset="0.0039" style="stop-color:#9DD7FF"/> + <stop offset="0.2273" style="stop-color:#BDE5FF"/> + <stop offset="0.4138" style="stop-color:#D1EEFF"/> + <stop offset="0.5394" style="stop-color:#D9F1FF"/> + <stop offset="0.6155" style="stop-color:#D5EFFE"/> + <stop offset="0.6891" style="stop-color:#C9E7FA"/> + <stop offset="0.7617" style="stop-color:#B6DAF3"/> + <stop offset="0.8337" style="stop-color:#9AC8EA"/> + <stop offset="0.9052" style="stop-color:#77B0DD"/> + <stop offset="0.9754" style="stop-color:#4D94CF"/> + <stop offset="1" style="stop-color:#3C89C9"/> +</linearGradient> +<path fill="url(#svg3.SVGID_6_)" d="M19.625,10.113c10.839,0,19.625-1.875,19.625-4.188l-1.229-2c0,2.168-8.235,3.926-18.396,3.926 c-9.481,0-17.396-1.959-18.396-3.926L0,5.925C0,8.238,8.787,10.113,19.625,10.113z"/> +<linearGradient gradientTransform="matrix(1 0 0 -1 -642.8008 -939.4756)" x1="644.0293" x2="680.8223" gradientUnits="userSpaceOnUse" y1="-943.4014" y2="-943.4014" id="svg3.SVGID_7_"> + <stop offset="0" style="stop-color:#9CD7FF"/> + <stop offset="1" style="stop-color:#3C89C9"/> +</linearGradient> +<ellipse rx="18.396" fill="url(#svg3.SVGID_7_)" ry="3.926" cx="19.625" cy="3.926"/> +<path fill="#FFFFFF" d="M31.04,45.982c0,0-4.354,0.664-7.29,0.781 c-3.125,0.125-8.952,0-8.952,0l-2.384-10.292l0.044-2.108l-1.251-1.154L9.789,23.024l-0.082-0.119L9.5,20.529l-1.65-1.254 L5.329,8.793c0,0,4.213,0.903,7.234,1.07s8.375,0.25,8.375,0.25l3,9.875l-0.25,1.313l1.063,2.168l2.312,9.645l-0.521,1.416 l1.46,1.834L31.04,45.982z" enable-background="new " opacity="0.24"/> +</svg> + </g> + </g> + <g text-rendering="geometricPrecision" stroke-miterlimit="1.45" shape-rendering="geometricPrecision" font-family="sans-serif" transform="matrix(1,0,0,1,-351,-156)" stroke-linecap="butt"> + <text x="553.4736" xml:space="preserve" y="286.1543" clip-path="url(#clipPath2)" stroke="none">Owner: CephProxy </text> + <text x="553.4736" xml:space="preserve" y="300.123" clip-path="url(#clipPath2)" stroke="none">User: A</text> + <text x="579.5137" xml:space="preserve" y="248.1543" clip-path="url(#clipPath2)" stroke="none">Bucket</text> + </g> + <g text-rendering="geometricPrecision" shape-rendering="geometricPrecision" transform="matrix(1,0,0,1,354.125,58.669)"> + <g clip-path="url(#clipPath6)"> + <svg xml:space="preserve" opacity="1" writing-mode="lr-tb" stop-color="rgb(0, 0, 0)" shape-rendering="auto" glyph-orientation-horizontal="0deg" color-profile="auto" lighting-color="rgb(255, 255, 255)" color="rgb(0, 0, 0)" font-weight="400" alignment-baseline="auto" font-style="normal" version="1.1" color-interpolation-filters="linearrgb" text-anchor="start" stroke-linecap="butt" color-interpolation="srgb" font-variant="normal" word-spacing="normal" fill-opacity="1" text-rendering="auto" clip-path="none" text-decoration="none" letter-spacing="normal" viewBox="-0.875 -0.887 41 48" glyph-orientation-vertical="auto" display="inline" font-size-adjust="none" overflow="hidden" fill="rgb(0, 0, 0)" font-stretch="normal" stroke-dasharray="none" stroke-miterlimit="4" stop-opacity="1" color-rendering="auto" font-size="12" pointer-events="visiblepainted" mask="none" direction="ltr" baseline-shift="baseline" enable-background="new -0.875 -0.887 41 48" fill-rule="nonzero" image-rendering="auto" stroke-dashoffset="0" width="41px" marker-end="none" clip="auto" cursor="auto" stroke="none" filter="none" visibility="visible" kerning="auto" stroke-width="1" font-family=""Arial","Helvetica",sans-serif" flood-opacity="1" clip-rule="nonzero" src="none" height="48px" unicode-bidi="normal" stroke-linejoin="miter" stroke-opacity="1" flood-color="rgb(0, 0, 0)" dominant-baseline="auto" marker-start="none" x="0px" marker-mid="none" y="0px"> +<defs> +</defs> +<linearGradient gradientTransform="matrix(1 0 0 -1 -642.8008 -939.4756)" x1="642.8008" x2="682.0508" gradientUnits="userSpaceOnUse" y1="-979.1445" y2="-979.1445" id="svg4.SVGID_1_"> + <stop offset="0" style="stop-color:#3C89C9"/> + <stop offset="0.1482" style="stop-color:#60A6DD"/> + <stop offset="0.3113" style="stop-color:#81C1F0"/> + <stop offset="0.4476" style="stop-color:#95D1FB"/> + <stop offset="0.5394" style="stop-color:#9CD7FF"/> + <stop offset="0.636" style="stop-color:#98D4FD"/> + <stop offset="0.7293" style="stop-color:#8DCAF6"/> + <stop offset="0.8214" style="stop-color:#79BBEB"/> + <stop offset="0.912" style="stop-color:#5EA5DC"/> + <stop offset="1" style="stop-color:#3C89C9"/> +</linearGradient> +<path fill="url(#svg4.SVGID_1_)" d="M19.625,36.763C8.787,36.763,0,34.888,0,32.575v10c0,2.313,8.787,4.188,19.625,4.188 c10.839,0,19.625-1.875,19.625-4.188v-10C39.25,34.888,30.464,36.763,19.625,36.763z"/> +<linearGradient gradientTransform="matrix(1 0 0 -1 -642.8008 -939.4756)" x1="642.8008" x2="682.0508" gradientUnits="userSpaceOnUse" y1="-973.1445" y2="-973.1445" id="svg4.SVGID_2_"> + <stop offset="0" style="stop-color:#9CD7FF"/> + <stop offset="0.0039" style="stop-color:#9DD7FF"/> + <stop offset="0.2273" style="stop-color:#BDE5FF"/> + <stop offset="0.4138" style="stop-color:#D1EEFF"/> + <stop offset="0.5394" style="stop-color:#D9F1FF"/> + <stop offset="0.6155" style="stop-color:#D5EFFE"/> + <stop offset="0.6891" style="stop-color:#C9E7FA"/> + <stop offset="0.7617" style="stop-color:#B6DAF3"/> + <stop offset="0.8337" style="stop-color:#9AC8EA"/> + <stop offset="0.9052" style="stop-color:#77B0DD"/> + <stop offset="0.9754" style="stop-color:#4D94CF"/> + <stop offset="1" style="stop-color:#3C89C9"/> +</linearGradient> +<path fill="url(#svg4.SVGID_2_)" d="M19.625,36.763c10.839,0,19.625-1.875,19.625-4.188l-1.229-2c0,2.168-8.235,3.927-18.396,3.927 c-9.481,0-17.396-1.959-18.396-3.927l-1.229,2C0,34.888,8.787,36.763,19.625,36.763z"/> +<path fill="#3C89C9" d="M19.625,26.468c10.16,0,19.625,2.775,19.625,2.775c-0.375,2.721-5.367,5.438-19.554,5.438 c-12.125,0-18.467-2.484-19.541-4.918C-0.127,29.125,9.465,26.468,19.625,26.468z"/> +<linearGradient gradientTransform="matrix(1 0 0 -1 -642.8008 -939.4756)" x1="642.8008" x2="682.0508" gradientUnits="userSpaceOnUse" y1="-965.6948" y2="-965.6948" id="svg4.SVGID_3_"> + <stop offset="0" style="stop-color:#3C89C9"/> + <stop offset="0.1482" style="stop-color:#60A6DD"/> + <stop offset="0.3113" style="stop-color:#81C1F0"/> + <stop offset="0.4476" style="stop-color:#95D1FB"/> + <stop offset="0.5394" style="stop-color:#9CD7FF"/> + <stop offset="0.636" style="stop-color:#98D4FD"/> + <stop offset="0.7293" style="stop-color:#8DCAF6"/> + <stop offset="0.8214" style="stop-color:#79BBEB"/> + <stop offset="0.912" style="stop-color:#5EA5DC"/> + <stop offset="1" style="stop-color:#3C89C9"/> +</linearGradient> +<path fill="url(#svg4.SVGID_3_)" d="M19.625,23.313C8.787,23.313,0,21.438,0,19.125v10c0,2.313,8.787,4.188,19.625,4.188 c10.839,0,19.625-1.875,19.625-4.188v-10C39.25,21.438,30.464,23.313,19.625,23.313z"/> +<linearGradient gradientTransform="matrix(1 0 0 -1 -642.8008 -939.4756)" x1="642.8008" x2="682.0508" gradientUnits="userSpaceOnUse" y1="-959.6948" y2="-959.6948" id="svg4.SVGID_4_"> + <stop offset="0" style="stop-color:#9CD7FF"/> + <stop offset="0.0039" style="stop-color:#9DD7FF"/> + <stop offset="0.2273" style="stop-color:#BDE5FF"/> + <stop offset="0.4138" style="stop-color:#D1EEFF"/> + <stop offset="0.5394" style="stop-color:#D9F1FF"/> + <stop offset="0.6155" style="stop-color:#D5EFFE"/> + <stop offset="0.6891" style="stop-color:#C9E7FA"/> + <stop offset="0.7617" style="stop-color:#B6DAF3"/> + <stop offset="0.8337" style="stop-color:#9AC8EA"/> + <stop offset="0.9052" style="stop-color:#77B0DD"/> + <stop offset="0.9754" style="stop-color:#4D94CF"/> + <stop offset="1" style="stop-color:#3C89C9"/> +</linearGradient> +<path fill="url(#svg4.SVGID_4_)" d="M19.625,23.313c10.839,0,19.625-1.875,19.625-4.188l-1.229-2c0,2.168-8.235,3.926-18.396,3.926 c-9.481,0-17.396-1.959-18.396-3.926l-1.229,2C0,21.438,8.787,23.313,19.625,23.313z"/> +<path fill="#3C89C9" d="M19.476,13.019c10.161,0,19.625,2.775,19.625,2.775c-0.375,2.721-5.367,5.438-19.555,5.438 c-12.125,0-18.467-2.485-19.541-4.918C-0.277,15.674,9.316,13.019,19.476,13.019z"/> +<linearGradient gradientTransform="matrix(1 0 0 -1 -642.8008 -939.4756)" x1="642.8008" x2="682.0508" gradientUnits="userSpaceOnUse" y1="-952.4946" y2="-952.4946" id="svg4.SVGID_5_"> + <stop offset="0" style="stop-color:#3C89C9"/> + <stop offset="0.1482" style="stop-color:#60A6DD"/> + <stop offset="0.3113" style="stop-color:#81C1F0"/> + <stop offset="0.4476" style="stop-color:#95D1FB"/> + <stop offset="0.5394" style="stop-color:#9CD7FF"/> + <stop offset="0.636" style="stop-color:#98D4FD"/> + <stop offset="0.7293" style="stop-color:#8DCAF6"/> + <stop offset="0.8214" style="stop-color:#79BBEB"/> + <stop offset="0.912" style="stop-color:#5EA5DC"/> + <stop offset="1" style="stop-color:#3C89C9"/> +</linearGradient> +<path fill="url(#svg4.SVGID_5_)" d="M19.625,10.113C8.787,10.113,0,8.238,0,5.925v10c0,2.313,8.787,4.188,19.625,4.188 c10.839,0,19.625-1.875,19.625-4.188v-10C39.25,8.238,30.464,10.113,19.625,10.113z"/> +<linearGradient gradientTransform="matrix(1 0 0 -1 -642.8008 -939.4756)" x1="642.8008" x2="682.0508" gradientUnits="userSpaceOnUse" y1="-946.4946" y2="-946.4946" id="svg4.SVGID_6_"> + <stop offset="0" style="stop-color:#9CD7FF"/> + <stop offset="0.0039" style="stop-color:#9DD7FF"/> + <stop offset="0.2273" style="stop-color:#BDE5FF"/> + <stop offset="0.4138" style="stop-color:#D1EEFF"/> + <stop offset="0.5394" style="stop-color:#D9F1FF"/> + <stop offset="0.6155" style="stop-color:#D5EFFE"/> + <stop offset="0.6891" style="stop-color:#C9E7FA"/> + <stop offset="0.7617" style="stop-color:#B6DAF3"/> + <stop offset="0.8337" style="stop-color:#9AC8EA"/> + <stop offset="0.9052" style="stop-color:#77B0DD"/> + <stop offset="0.9754" style="stop-color:#4D94CF"/> + <stop offset="1" style="stop-color:#3C89C9"/> +</linearGradient> +<path fill="url(#svg4.SVGID_6_)" d="M19.625,10.113c10.839,0,19.625-1.875,19.625-4.188l-1.229-2c0,2.168-8.235,3.926-18.396,3.926 c-9.481,0-17.396-1.959-18.396-3.926L0,5.925C0,8.238,8.787,10.113,19.625,10.113z"/> +<linearGradient gradientTransform="matrix(1 0 0 -1 -642.8008 -939.4756)" x1="644.0293" x2="680.8223" gradientUnits="userSpaceOnUse" y1="-943.4014" y2="-943.4014" id="svg4.SVGID_7_"> + <stop offset="0" style="stop-color:#9CD7FF"/> + <stop offset="1" style="stop-color:#3C89C9"/> +</linearGradient> +<ellipse rx="18.396" fill="url(#svg4.SVGID_7_)" ry="3.926" cx="19.625" cy="3.926"/> +<path fill="#FFFFFF" d="M31.04,45.982c0,0-4.354,0.664-7.29,0.781 c-3.125,0.125-8.952,0-8.952,0l-2.384-10.292l0.044-2.108l-1.251-1.154L9.789,23.024l-0.082-0.119L9.5,20.529l-1.65-1.254 L5.329,8.793c0,0,4.213,0.903,7.234,1.07s8.375,0.25,8.375,0.25l3,9.875l-0.25,1.313l1.063,2.168l2.312,9.645l-0.521,1.416 l1.46,1.834L31.04,45.982z" enable-background="new " opacity="0.24"/> +</svg> + </g> + </g> + <g text-rendering="geometricPrecision" stroke-miterlimit="1.45" shape-rendering="geometricPrecision" font-family="sans-serif" transform="matrix(1,0,0,1,-351,-156)" stroke-linecap="butt"> + <text x="680.6174" xml:space="preserve" y="286.1543" clip-path="url(#clipPath2)" stroke="none">Owner: CephProxy</text> + <text x="680.6174" xml:space="preserve" y="300.123" clip-path="url(#clipPath2)" stroke="none">User: B</text> + <text x="680.6174" xml:space="preserve" y="314.0918" clip-path="url(#clipPath2)" stroke="none">Core Facility</text> + <text x="704.7502" xml:space="preserve" y="248.1543" clip-path="url(#clipPath2)" stroke="none">Bucket</text> + </g> + <g fill="rgb(255,204,0)" text-rendering="geometricPrecision" shape-rendering="geometricPrecision" transform="matrix(1,0,0,1,-351,-156)" stroke="rgb(255,204,0)"> + <rect x="779" width="211.25" height="30" y="342.75" clip-path="url(#clipPath2)" stroke="none"/> + </g> + <g text-rendering="geometricPrecision" stroke-miterlimit="1.45" shape-rendering="geometricPrecision" transform="matrix(1,0,0,1,-351,-156)" stroke-linecap="butt"> + <rect fill="none" x="779" width="211.25" height="30" y="342.75" clip-path="url(#clipPath2)"/> + <text x="875.1328" xml:space="preserve" y="361.9043" clip-path="url(#clipPath2)" font-family="sans-serif" stroke="none">API</text> + </g> + <g text-rendering="geometricPrecision" shape-rendering="geometricPrecision" transform="matrix(0.999,0,0,1,398.75,551.7792)"> + <g clip-path="url(#clipPath7)"> + <svg xml:space="preserve" opacity="1" writing-mode="lr-tb" stop-color="rgb(0, 0, 0)" shape-rendering="auto" glyph-orientation-horizontal="0deg" color-profile="auto" lighting-color="rgb(255, 255, 255)" color="rgb(0, 0, 0)" font-weight="400" alignment-baseline="auto" font-style="normal" version="1.1" color-interpolation-filters="linearrgb" text-anchor="start" stroke-linecap="butt" color-interpolation="srgb" font-variant="normal" word-spacing="normal" fill-opacity="1" text-rendering="auto" clip-path="none" text-decoration="none" letter-spacing="normal" viewBox="0 0 59 59" glyph-orientation-vertical="auto" display="inline" font-size-adjust="none" overflow="hidden" fill="rgb(0, 0, 0)" font-stretch="normal" stroke-dasharray="none" id="svg5.Ebene_1" stroke-miterlimit="4" stop-opacity="1" color-rendering="auto" font-size="12" pointer-events="visiblepainted" mask="none" direction="ltr" baseline-shift="baseline" enable-background="new 0 0 59 59" fill-rule="nonzero" image-rendering="auto" stroke-dashoffset="0" width="59px" marker-end="none" clip="auto" cursor="auto" stroke="none" filter="none" visibility="visible" kerning="auto" stroke-width="1" font-family=""Arial","Helvetica",sans-serif" flood-opacity="1" clip-rule="nonzero" src="none" height="59px" unicode-bidi="normal" stroke-linejoin="miter" stroke-opacity="1" flood-color="rgb(0, 0, 0)" dominant-baseline="auto" marker-start="none" x="0px" marker-mid="none" y="0px"> +<g> + + <linearGradient gradientTransform="matrix(1 0 0 -1 0.2002 -1487.2285)" x1="13.6323" x2="14.3617" gradientUnits="userSpaceOnUse" y1="-1502.7422" y2="-1520.2559" id="svg5.neck_x5F_white_1_"> + <stop offset="0.2711" style="stop-color:#FFAB4F"/> + <stop offset="1" style="stop-color:#FFD28F"/> + </linearGradient> + <path fill="url(#svg5.neck_x5F_white_1_)" id="svg5.neck_x5F_white_19_" stroke-width="0.5" d=" M9.639,18.899h9.093v10.115c-0.656,0.186-8.586,0.218-9.093,0.086V18.899z" stroke-miterlimit="10" stroke="#FFAB4F"/> + + <linearGradient gradientTransform="matrix(1 0 0 -1 0.2002 -1487.2285)" x1="13.1294" x2="13.75" gradientUnits="userSpaceOnUse" y1="-1499.207" y2="-1514.1072" id="svg5.SVGID_1_"> + <stop offset="0.2711" style="stop-color:#FFAB4F"/> + <stop offset="1" style="stop-color:#FFD28F"/> + </linearGradient> + <path fill="url(#svg5.SVGID_1_)" stroke-width="0.5" d="M24.766,25.612 c-2.199-2.197-5.477-2.941-6.033-3.055v-3.658H9.639c0,0,0.019,3.452,0,3.435c0,0-3.437,0.499-6.154,3.216 c-2.796,2.796-3.235,5.835-3.235,5.835c0,0.971,0.788,1.746,1.762,1.746h24.255c0.974,0,1.761-0.777,1.761-1.746 C28.027,31.384,27.105,27.953,24.766,25.612z" stroke-miterlimit="10" stroke="#ED9135"/> + + <radialGradient gradientTransform="matrix(1 0 0 -1 0.2002 -1487.2285)" gradientUnits="userSpaceOnUse" r="11.7123" cx="13.7515" id="svg5.face_x5F_white_1_" cy="-1500.0908" fx="11.4666" fy="-1501.3357"> + <stop offset="0" style="stop-color:#FFD28F"/> + <stop offset="1" style="stop-color:#FFAB4F"/> + </radialGradient> + <path fill="url(#svg5.face_x5F_white_1_)" id="svg5.face_x5F_white_19_" stroke-width="0.5" d=" M21.838,11.679c0.043,5.1-3.369,9.26-7.623,9.293C9.964,21.006,6.483,16.899,6.44,11.8c-0.042-5.1,3.37-9.261,7.622-9.294 C18.314,2.473,21.795,6.579,21.838,11.679z" stroke-miterlimit="10" stroke="#ED9135"/> + + <linearGradient gradientTransform="matrix(0.275 0 0 0.2733 -1169.7629 -2324.9595)" x1="4300.0391" x2="4311.8545" gradientUnits="userSpaceOnUse" y1="8522.0703" y2="8568.7988" id="svg5.face_highlight_1_"> + <stop offset="0" style="stop-color:#FFFFFF;stop-opacity:0.24"/> + <stop offset="1" style="stop-color:#FFFFFF;stop-opacity:0.16"/> + </linearGradient> + <path fill="url(#svg5.face_highlight_1_)" id="svg5.face_highlight_19_" d="M13.979,3.166c-3.018,0.023-5.374,2.247-6.394,5.193 c-0.332,0.96-0.147,2.021,0.49,2.814c1.365,1.699,2.865,3.142,4.73,4.044c1.569,0.759,3.767,1.192,5.946,0.624 c1.139-0.297,1.994-1.229,2.188-2.383c0.092-0.548,0.146-1.145,0.143-1.777C21.041,6.977,18.137,3.134,13.979,3.166z"/> + + <path stroke-linecap="round" fill="#4B4B4B" stroke-linejoin="round" d=" M14.187,0.25c-5.418,0-10.052,3.624-9.61,12.813c0.282,3.281,2.931,6.021,4.683,6.766C7.795,14.344,9.107,7.317,8.967,7.064 c-0.14-0.252,1.769,3.364,1.769,3.364l1.591-4.155c2.771,2.14,0.197,5.654,0.524,5.528c2.129-0.815,2.67-4.614,2.67-4.614 s1.364,1.829,1.35,2.752c-0.025,1.781,1.098-3.033,1.098-3.033l0.514,1.016c3.363,4.911,1.842,8.104,0.826,11.391 c7.834-0.352,6.146-5.24,4.83-9.203C21.795,3.046,19.604,0.25,14.187,0.25z" id="svg5.Hair_Female_1_Black_9_" stroke="#000000" stroke-width="0.5"/> + + <linearGradient gradientTransform="matrix(0.9852 0 0 0.9852 -22.6981 -2330.2188)" x1="53.4854" x2="21.2897" gradientUnits="userSpaceOnUse" y1="2393.7295" y2="2393.7295" id="svg5.body_1_"> + <stop offset="0" style="stop-color:#49AD33"/> + <stop offset="1" style="stop-color:#C2DA92"/> + </linearGradient> + <path fill="url(#svg5.body_1_)" id="svg5.body_63_" d="M0.25,31.384c0,0.97,0.788,1.747,1.762,1.747 h24.255c0.974,0,1.761-0.779,1.761-1.747c0,0-0.922-3.431-3.262-5.771c-2.408-2.406-4.123-2.572-4.123-2.572 c-0.723,3.491-4.277,4.393-6.503,4.393c-2.227,0-5.75-0.994-6.06-4.393c0,0-1.877-0.207-4.594,2.51 C0.689,28.345,0.25,31.384,0.25,31.384z" stroke-width="0.5" stroke="#008D33"/> + + <linearGradient gradientTransform="matrix(1 0 0 -1 0.2002 -1487.2285)" x1="43.8525" x2="44.5819" gradientUnits="userSpaceOnUse" y1="-1502.7417" y2="-1520.2552" id="svg5.neck_x5F_white_3_"> + <stop offset="0.2711" style="stop-color:#FFAB4F"/> + <stop offset="1" style="stop-color:#FFD28F"/> + </linearGradient> + <path fill="url(#svg5.neck_x5F_white_3_)" id="svg5.neck_x5F_white_17_" stroke-width="0.5" d=" M39.859,18.899h9.094v10.115c-0.656,0.186-8.586,0.218-9.094,0.086V18.899z" stroke-miterlimit="10" stroke="#FFAB4F"/> + + <linearGradient gradientTransform="matrix(1 0 0 -1 0.2002 -1487.2285)" x1="43.3506" x2="43.9712" gradientUnits="userSpaceOnUse" y1="-1499.2075" y2="-1514.1077" id="svg5.SVGID_2_"> + <stop offset="0.2711" style="stop-color:#FFAB4F"/> + <stop offset="1" style="stop-color:#FFD28F"/> + </linearGradient> + <path fill="url(#svg5.SVGID_2_)" stroke-width="0.5" d="M54.986,25.612 c-2.199-2.197-5.478-2.941-6.033-3.055v-3.658h-9.094c0,0,0.02,3.452,0,3.435c0,0-3.438,0.499-6.153,3.216 c-2.796,2.796-3.235,5.835-3.235,5.835c0,0.971,0.787,1.746,1.762,1.746h24.256c0.974,0,1.761-0.777,1.761-1.746 C58.248,31.384,57.326,27.953,54.986,25.612z" stroke-miterlimit="10" stroke="#ED9135"/> + + <radialGradient gradientTransform="matrix(1 0 0 -1 0.2002 -1487.2285)" gradientUnits="userSpaceOnUse" r="11.7111" cx="43.9727" id="svg5.face_x5F_white_3_" cy="-1500.0908" fx="41.6881" fy="-1501.3356"> + <stop offset="0" style="stop-color:#FFD28F"/> + <stop offset="1" style="stop-color:#FFAB4F"/> + </radialGradient> + <path fill="url(#svg5.face_x5F_white_3_)" id="svg5.face_x5F_white_17_" stroke-width="0.5" d=" M52.059,11.679c0.043,5.1-3.369,9.26-7.623,9.293c-4.25,0.034-7.731-4.073-7.774-9.172c-0.042-5.1,3.368-9.261,7.622-9.294 C48.536,2.473,52.016,6.579,52.059,11.679z" stroke-miterlimit="10" stroke="#ED9135"/> + + <linearGradient gradientTransform="matrix(0.275 0 0 0.2733 -1169.7629 -2324.9595)" x1="4409.9326" x2="4421.7476" gradientUnits="userSpaceOnUse" y1="8522.0703" y2="8568.7969" id="svg5.face_highlight_3_"> + <stop offset="0" style="stop-color:#FFFFFF;stop-opacity:0.24"/> + <stop offset="1" style="stop-color:#FFFFFF;stop-opacity:0.16"/> + </linearGradient> + <path fill="url(#svg5.face_highlight_3_)" id="svg5.face_highlight_17_" d="M44.2,3.166c-3.019,0.023-5.374,2.247-6.394,5.193 c-0.332,0.96-0.147,2.021,0.489,2.814c1.364,1.699,2.864,3.142,4.729,4.044c1.568,0.759,3.768,1.192,5.945,0.624 c1.139-0.297,1.994-1.229,2.188-2.383c0.092-0.548,0.146-1.145,0.143-1.777C51.262,6.977,48.357,3.134,44.2,3.166z"/> + + <path stroke-linecap="round" fill="#ECECEC" stroke-linejoin="round" d=" M40.359,6.625c0,0,2.66,3.625,7.5,1.875c1.365-0.281,4.529,0.518,4.529,0.518s-1.828-8.085-8.523-7.646 c-8.943,0.69-7.615,11.467-7.615,11.467s1.384-0.342,2.518-2.401C39.255,9.553,40.359,6.625,40.359,6.625z" id="svg5.hair_x5F_gray_17_" stroke="#9B9B9B" stroke-width="0.5"/> + + <radialGradient gradientTransform="matrix(1 0 0 -1 0.2002 -1487.2285)" id="svg5.collar_x5F_body_2_" r="16.2003" cx="37.6602" gradientUnits="userSpaceOnUse" cy="-1515.813"> + <stop offset="0" style="stop-color:#B0E8FF"/> + <stop offset="1" style="stop-color:#74AEEE"/> + </radialGradient> + <path fill="url(#svg5.collar_x5F_body_2_)" id="svg5.collar_x5F_body_8_" d="M30.471,31.384 c0,0.97,0.787,1.747,1.762,1.747h24.256c0.974,0,1.761-0.779,1.761-1.747c0,0-0.922-3.431-3.263-5.771 c-2.407-2.406-5.623-3.072-5.623-3.072c-0.885,0.827-2.805,1.4-5.03,1.4s-4.146-0.573-5.031-1.4c0,0-2.878,0.293-5.595,3.01 C30.91,28.345,30.471,31.384,30.471,31.384z" stroke-width="0.5" stroke="#5491CF"/> + + <radialGradient gradientTransform="matrix(1 0 0 -1 0.2002 -1487.2285)" id="svg5.collar_x5F_r_2_" r="4.6401" cx="45.8311" gradientUnits="userSpaceOnUse" cy="-1510.8745"> + <stop offset="0" style="stop-color:#80CCFF"/> + <stop offset="1" style="stop-color:#74AEEE"/> + </radialGradient> + <path fill="url(#svg5.collar_x5F_r_2_)" id="svg5.collar_x5F_r_8_" d="M49.301,20.69 c0,0-0.287,1.185-1.506,2.221c-1.055,0.897-2.893,1.036-2.893,1.036l1.986,3.108c0,0,1.479-0.818,2.504-1.924 c0.961-1.036,0.686-2.74,0.686-2.74L49.301,20.69z" stroke-width="0.5" stroke="#5491CF"/> + + <radialGradient gradientTransform="matrix(1 0 0 -1 0.2002 -1487.2285)" id="svg5.collar_x5F_l_2_" r="4.6426" cx="39.6494" gradientUnits="userSpaceOnUse" cy="-1510.896"> + <stop offset="0" style="stop-color:#80CCFF"/> + <stop offset="1" style="stop-color:#74AEEE"/> + </radialGradient> + <path fill="url(#svg5.collar_x5F_l_2_)" id="svg5.collar_x5F_l_8_" d="M39.536,20.711 c0,0,0.288,1.185,1.506,2.221c1.056,0.896,2.894,1.036,2.894,1.036l-1.987,3.108c0,0-1.479-0.818-2.505-1.925 c-0.961-1.036-0.685-2.74-0.685-2.74L39.536,20.711z" stroke-width="0.5" stroke="#5491CF"/> + + <radialGradient gradientTransform="matrix(1 0 0 -1 0.1201 -714.5371)" id="svg5.Knob2_2_" r="0.4846" cx="44.0645" gradientUnits="userSpaceOnUse" cy="-742.6421"> + <stop offset="0" style="stop-color:#80CCFF"/> + <stop offset="1" style="stop-color:#74AEEE"/> + </radialGradient> + <circle fill="url(#svg5.Knob2_2_)" r="0.292" cx="44.35" id="svg5.Knob2_8_" cy="28.127" stroke="#5491CF" stroke-width="0.5"/> + + <radialGradient gradientTransform="matrix(1 0 0 -1 0.1201 -714.5371)" id="svg5.Knob1_2_" r="0.4847" cx="44.083" gradientUnits="userSpaceOnUse" cy="-745.7642"> + <stop offset="0" style="stop-color:#80CCFF"/> + <stop offset="1" style="stop-color:#74AEEE"/> + </radialGradient> + <circle fill="url(#svg5.Knob1_2_)" r="0.292" cx="44.369" id="svg5.Knob1_8_" cy="31.25" stroke="#5491CF" stroke-width="0.5"/> + + <radialGradient gradientTransform="matrix(1 0 0 -1 0.2002 -1487.2285)" id="svg5.jacket_x5F_body_1_" r="20.459" cx="36.7842" gradientUnits="userSpaceOnUse" cy="-1515.3755"> + <stop offset="0" style="stop-color:#D0D0D0"/> + <stop offset="1" style="stop-color:#9B9B9B"/> + </radialGradient> + + <path stroke-linecap="round" fill="url(#svg5.jacket_x5F_body_1_)" stroke-linejoin="round" d=" M54.986,25.612c-2.217-2.217-5.111-2.955-5.557-3.059l-2.695,10.572h-4.43L39.686,22.84c-0.145-0.096-0.271-0.196-0.385-0.301 c0,0-2.877,0.293-5.595,3.01c-2.796,2.796-3.233,5.835-3.233,5.835c0,0.97,0.786,1.747,1.76,1.747h24.256 c0.975,0,1.761-0.779,1.761-1.747C58.248,31.384,57.326,27.953,54.986,25.612z" id="svg5.jacket_x5F_body_8_" stroke="#4B4B4B" stroke-width="0.5"/> + + <polygon stroke-linecap="round" points=" 49.688,21.038 51.305,23.187 50.615,27.835 48.914,28.25 49.547,29.563 47.509,33.126 46.734,33.126 " fill="#9B9B9B" stroke-linejoin="round" id="svg5.jacket_x5F_r_8_" stroke="#4B4B4B" stroke-width="0.5"/> + + <polygon stroke-linecap="round" points=" 39.354,21.038 37.734,23.187 38.426,27.835 40.127,28.25 39.493,29.563 41.531,33.126 42.307,33.126 " fill="#B2B2B2" stroke-linejoin="round" id="svg5.jacket_x5F_l_8_" stroke="#4B4B4B" stroke-width="0.5"/> + <path fill="#D54A30" id="svg5.path5135_8_" d="M43.936,27.61c0,0-0.926,1.028-1.041,3.271 c-0.115,2.244,0,2.244,0,2.244h3.273c0,0,0.115,0.031-0.077-2.182c-0.2-2.302-1.194-3.334-1.194-3.334L43.936,27.61L43.936,27.61z" stroke-width="0.5" stroke="#B51A19"/> + <path fill="#D54A30" id="svg5.path5131_8_" d="M44.377,24.34h0.063l1.275,2.001 c0.258,0.477-0.604,0.898-0.729,1.274l-1.139-0.008c-0.121-0.38-1.129-0.683-0.738-1.292L44.377,24.34z" stroke-width="0.5" stroke="#B51A19"/> + + <linearGradient gradientTransform="matrix(1 0 0 -1 0.2002 -1487.2285)" x1="43.6328" x2="44.3621" gradientUnits="userSpaceOnUse" y1="-1527.7417" y2="-1545.2545" id="svg5.neck_x5F_white_4_"> + <stop offset="0.2711" style="stop-color:#FFAB4F"/> + <stop offset="1" style="stop-color:#FFD28F"/> + </linearGradient> + <path fill="url(#svg5.neck_x5F_white_4_)" id="svg5.neck_x5F_white_11_" stroke-width="0.5" d=" M39.639,43.898h9.094v10.115c-0.655,0.187-8.586,0.219-9.094,0.086V43.898z" stroke-miterlimit="10" stroke="#FFAB4F"/> + + <linearGradient gradientTransform="matrix(1 0 0 -1 0.2002 -1487.2285)" x1="43.1309" x2="43.7515" gradientUnits="userSpaceOnUse" y1="-1524.2065" y2="-1539.1072" id="svg5.SVGID_3_"> + <stop offset="0.2711" style="stop-color:#FFAB4F"/> + <stop offset="1" style="stop-color:#FFD28F"/> + </linearGradient> + <path fill="url(#svg5.SVGID_3_)" stroke-width="0.5" d="M54.766,50.611 c-2.199-2.196-5.477-2.94-6.033-3.055v-3.658h-9.094c0,0,0.021,3.453,0,3.436c0,0-3.437,0.499-6.152,3.216 c-2.797,2.796-3.235,5.835-3.235,5.835c0,0.971,0.787,1.746,1.763,1.746h24.254c0.975,0,1.762-0.777,1.762-1.746 C58.027,56.384,57.105,52.953,54.766,50.611z" stroke-miterlimit="10" stroke="#ED9135"/> + + <radialGradient gradientTransform="matrix(1 0 0 -1 0.2002 -1487.2285)" gradientUnits="userSpaceOnUse" r="11.7114" cx="43.752" id="svg5.face_x5F_white_4_" cy="-1525.0908" fx="41.4673" fy="-1526.3356"> + <stop offset="0" style="stop-color:#FFD28F"/> + <stop offset="1" style="stop-color:#FFAB4F"/> + </radialGradient> + <path fill="url(#svg5.face_x5F_white_4_)" id="svg5.face_x5F_white_11_" stroke-width="0.5" d=" M51.838,36.68c0.043,5.1-3.369,9.26-7.623,9.293c-4.251,0.033-7.732-4.074-7.775-9.173c-0.041-5.1,3.369-9.261,7.623-9.294 C48.314,27.473,51.795,31.579,51.838,36.68z" stroke-miterlimit="10" stroke="#ED9135"/> + + <linearGradient gradientTransform="matrix(0.275 0 0 0.2733 -1169.7629 -2324.9595)" x1="4409.1299" x2="4420.9448" gradientUnits="userSpaceOnUse" y1="8613.5469" y2="8660.2725" id="svg5.face_highlight_4_"> + <stop offset="0" style="stop-color:#FFFFFF;stop-opacity:0.24"/> + <stop offset="1" style="stop-color:#FFFFFF;stop-opacity:0.16"/> + </linearGradient> + <path fill="url(#svg5.face_highlight_4_)" id="svg5.face_highlight_11_" d="M43.979,28.166c-3.018,0.023-5.373,2.247-6.394,5.193 c-0.332,0.959-0.147,2.021,0.49,2.813c1.364,1.699,2.864,3.142,4.729,4.044c1.568,0.76,3.768,1.192,5.945,0.624 c1.139-0.297,1.994-1.229,2.188-2.383c0.092-0.548,0.146-1.146,0.144-1.776C51.041,31.977,48.137,28.134,43.979,28.166z"/> + <path fill="#CC9869" d="M46.107,29.969 c0,0,2.845,1.375,3.845,4.062c1.052,2.826,2.062,4.117,2.095,4c0.938-3.395,0.531-10.718-5.086-10.792 c-10.229-3.832-12.79,5.98-11.947,9.824c0.539,2.457,1.117,3.344,1.971,4.041C36.982,41.104,37.514,33.781,46.107,29.969z" stroke-width="0.5" stroke="#99724F" stroke-linecap="round" stroke-linejoin="round"/> + + <linearGradient gradientTransform="matrix(0.9852 0 0 0.9852 -22.6981 -2330.2188)" x1="83.9365" x2="51.7404" gradientUnits="userSpaceOnUse" y1="2419.1064" y2="2419.1064" id="svg5.body_2_"> + <stop offset="0" style="stop-color:#49AD33"/> + <stop offset="1" style="stop-color:#C2DA92"/> + </linearGradient> + <path fill="url(#svg5.body_2_)" id="svg5.body_35_" d="M30.25,56.384c0,0.97,0.787,1.747,1.762,1.747 h24.256c0.974,0,1.76-0.779,1.76-1.747c0,0-0.922-3.431-3.262-5.771c-2.408-2.406-4.123-2.572-4.123-2.572 c-0.723,3.49-4.276,4.393-6.504,4.393c-2.227,0-5.75-0.994-6.059-4.393c0,0-1.878-0.207-4.596,2.51 C30.689,53.346,30.25,56.384,30.25,56.384z" stroke-width="0.5" stroke="#008D33"/> + + <radialGradient gradientTransform="matrix(1 0 0 -1 0.2002 -1487.2285)" gradientUnits="userSpaceOnUse" r="5.7236" cx="14.0952" id="svg5.neck_x5F_white_5_" cy="-1534.9302" fx="12.9786" fy="-1535.5386"> + <stop offset="0" style="stop-color:#B38E5D"/> + <stop offset="1" style="stop-color:#805126"/> + </radialGradient> + <path fill="url(#svg5.neck_x5F_white_5_)" id="svg5.neck_x5F_white_2_" stroke-width="0.5" d=" M9.859,43.898h9.094v6.615c-0.656,0.187-8.586,0.219-9.094,0.086V43.898z" stroke-miterlimit="10" stroke="#5B453B"/> + + <radialGradient gradientTransform="matrix(1 0 0 -1 0.2002 -1487.2285)" gradientUnits="userSpaceOnUse" r="15.5272" cx="13.8213" id="svg5.SVGID_4_" cy="-1539.1084" fx="10.7923" fy="-1540.7589"> + <stop offset="0" style="stop-color:#B38E5D"/> + <stop offset="1" style="stop-color:#805126"/> + </radialGradient> + <path fill="url(#svg5.SVGID_4_)" stroke-width="0.5" d="M24.986,50.611 c-2.199-2.196-5.477-2.94-6.033-3.055v-3.658H9.859c0,0,0.02,3.453,0,3.436c0,0-3.437,0.499-6.153,3.216 c-2.796,2.796-3.235,5.835-3.235,5.835c0,0.971,0.787,1.746,1.762,1.746h24.255c0.974,0,1.761-0.777,1.761-1.746 C28.248,56.384,27.326,52.953,24.986,50.611z" stroke-miterlimit="10" stroke="#5B453B"/> + + <radialGradient gradientTransform="matrix(1 0 0 -1 0.2002 -1487.2285)" gradientUnits="userSpaceOnUse" r="11.7123" cx="13.9722" id="svg5.face_x5F_white_5_" cy="-1525.0908" fx="11.6873" fy="-1526.3357"> + <stop offset="0" style="stop-color:#B38E5D"/> + <stop offset="1" style="stop-color:#805126"/> + </radialGradient> + <path fill="url(#svg5.face_x5F_white_5_)" id="svg5.face_x5F_white_2_" stroke-width="0.5" d=" M22.059,36.68c0.043,5.1-3.369,9.26-7.623,9.293c-4.251,0.033-7.732-4.074-7.775-9.173c-0.042-5.1,3.369-9.261,7.622-9.294 C18.536,27.473,22.016,31.579,22.059,36.68z" stroke-miterlimit="10" stroke="#5B453B"/> + + <linearGradient gradientTransform="matrix(0.275 0 0 0.2733 -1169.7629 -2324.9595)" x1="4300.8428" x2="4312.6582" gradientUnits="userSpaceOnUse" y1="8613.5449" y2="8660.2734" id="svg5.face_highlight_5_"> + <stop offset="0" style="stop-color:#FFFFFF;stop-opacity:0.42"/> + <stop offset="1" style="stop-color:#FFFFFF;stop-opacity:0.12"/> + </linearGradient> + <path fill="url(#svg5.face_highlight_5_)" id="svg5.face_highlight_2_" d="M14.2,28.166c-3.018,0.023-5.374,2.247-6.394,5.193 c-0.332,0.959-0.147,2.021,0.49,2.813c1.364,1.699,2.864,3.142,4.73,4.044c1.568,0.76,3.767,1.192,5.945,0.624 c1.139-0.297,1.994-1.229,2.188-2.383c0.092-0.548,0.146-1.146,0.143-1.776C21.262,31.977,18.357,28.134,14.2,28.166z"/> + + <path stroke-linecap="round" fill="#4B4B4B" stroke-linejoin="round" d=" M10.359,31.625c1.709,2.166,4.667,3.459,4.667,3.459l-0.708-1.75c0,0,3.547,2.346,4.041,2.166c0.484-0.1-0.541-1.902-0.541-1.902 s1.568,0.896,2.428,1.983c0.9,1.14,2.143,1.752,2.143,1.752s1.346-10.974-8.523-10.961C4.359,26.354,6.25,38.839,6.25,38.839 L7.568,33.5c0,0,0.457,2.879,0.699,2.438C9.151,32.605,9.651,30.75,10.359,31.625z" id="svg5.Hair_Young_Black_1_" stroke="#000000" stroke-width="0.5"/> + + <radialGradient gradientTransform="matrix(1 0 0 -1 0.2002 -1487.2285)" id="svg5.collar_x5F_body_3_" r="16.2003" cx="7.6602" gradientUnits="userSpaceOnUse" cy="-1540.813"> + <stop offset="0" style="stop-color:#B0E8FF"/> + <stop offset="1" style="stop-color:#74AEEE"/> + </radialGradient> + <path fill="url(#svg5.collar_x5F_body_3_)" id="svg5.collar_x5F_body_1_" d="M0.471,56.384 c0,0.97,0.787,1.747,1.762,1.747h24.255c0.974,0,1.761-0.779,1.761-1.747c0,0-0.922-3.431-3.262-5.771 c-2.408-2.406-5.623-3.072-5.623-3.072c-0.885,0.827-2.805,1.4-5.031,1.4s-4.146-0.573-5.031-1.4c0,0-2.878,0.293-5.595,3.01 C0.91,53.346,0.471,56.384,0.471,56.384z" stroke-width="0.5" stroke="#5491CF"/> + + <radialGradient gradientTransform="matrix(1 0 0 -1 0.2002 -1487.2285)" id="svg5.collar_x5F_r_3_" r="4.6412" cx="15.8306" gradientUnits="userSpaceOnUse" cy="-1535.874"> + <stop offset="0" style="stop-color:#80CCFF"/> + <stop offset="1" style="stop-color:#74AEEE"/> + </radialGradient> + <path fill="url(#svg5.collar_x5F_r_3_)" id="svg5.collar_x5F_r_1_" d="M19.301,45.689 c0,0-0.287,1.186-1.506,2.222c-1.055,0.897-2.893,1.036-2.893,1.036l1.986,3.107c0,0,1.479-0.818,2.504-1.924 c0.961-1.035,0.686-2.74,0.686-2.74L19.301,45.689z" stroke-width="0.5" stroke="#5491CF"/> + + <radialGradient gradientTransform="matrix(1 0 0 -1 0.2002 -1487.2285)" id="svg5.collar_x5F_l_3_" r="4.6422" cx="9.6494" gradientUnits="userSpaceOnUse" cy="-1535.896"> + <stop offset="0" style="stop-color:#80CCFF"/> + <stop offset="1" style="stop-color:#74AEEE"/> + </radialGradient> + <path fill="url(#svg5.collar_x5F_l_3_)" id="svg5.collar_x5F_l_1_" d="M9.536,45.711 c0,0,0.288,1.186,1.506,2.221c1.055,0.896,2.893,1.037,2.893,1.037l-1.987,3.107c0,0-1.479-0.818-2.504-1.926 c-0.961-1.035-0.685-2.739-0.685-2.739L9.536,45.711z" stroke-width="0.5" stroke="#5491CF"/> + + <radialGradient gradientTransform="matrix(1 0 0 -1 0.1201 -714.5371)" id="svg5.Knob2_3_" r="0.4842" cx="14.0645" gradientUnits="userSpaceOnUse" cy="-767.6421"> + <stop offset="0" style="stop-color:#80CCFF"/> + <stop offset="1" style="stop-color:#74AEEE"/> + </radialGradient> + <circle fill="url(#svg5.Knob2_3_)" r="0.292" cx="14.35" id="svg5.Knob2_1_" cy="53.127" stroke="#5491CF" stroke-width="0.5"/> + + <radialGradient gradientTransform="matrix(1 0 0 -1 0.1201 -714.5371)" id="svg5.Knob1_3_" r="0.4844" cx="14.0835" gradientUnits="userSpaceOnUse" cy="-770.7642"> + <stop offset="0" style="stop-color:#80CCFF"/> + <stop offset="1" style="stop-color:#74AEEE"/> + </radialGradient> + <circle fill="url(#svg5.Knob1_3_)" r="0.292" cx="14.369" id="svg5.Knob1_1_" cy="56.25" stroke="#5491CF" stroke-width="0.5"/> + <path fill="#D54A30" id="svg5.path5135_1_" d="M13.935,52.609c0,0-0.925,1.029-1.04,3.271 c-0.115,2.244,0,2.244,0,2.244h3.273c0,0,0.115,0.031-0.077-2.182c-0.2-2.303-1.194-3.334-1.194-3.334H13.935L13.935,52.609z" stroke-width="0.5" stroke="#B51A19"/> + <path fill="#D54A30" id="svg5.path5131_1_" d="M14.377,49.34h0.062l1.276,2.001 c0.258,0.478-0.604,0.897-0.728,1.274l-1.14-0.008c-0.121-0.381-1.128-0.684-0.738-1.293L14.377,49.34z" stroke-width="0.5" stroke="#B51A19"/> +</g> +</svg> + </g> + </g> + <g text-rendering="geometricPrecision" stroke-miterlimit="1.45" shape-rendering="geometricPrecision" font-family="sans-serif" transform="matrix(1,0,0,1,-351,-156)" stroke-linecap="butt"> + <text x="741.7432" xml:space="preserve" y="793.2531" clip-path="url(#clipPath2)" stroke="none">Core Facility</text> + <path fill="none" d="M589.887 516.902 L849.8068 376.5511" clip-path="url(#clipPath2)"/> + <path d="M582.8477 520.7031 L595.7823 519.4011 L590.7668 516.4269 L591.0309 510.6019 Z" clip-path="url(#clipPath2)" stroke="none"/> + <path d="M856.8461 372.75 L843.9114 374.052 L848.9268 377.0262 L848.6628 382.8512 Z" clip-path="url(#clipPath2)" stroke="none"/> + </g> + <g text-rendering="geometricPrecision" stroke-miterlimit="1.45" shape-rendering="geometricPrecision" font-family="sans-serif" transform="matrix(0.8829,-0.4695,0.4695,0.8829,285.9779,306.7355)" stroke-linecap="butt"> + <text x="2" xml:space="preserve" y="13.1387" clip-path="url(#clipPath8)" stroke="none">Set Bucket Policy</text> + </g> + <g text-rendering="geometricPrecision" stroke-miterlimit="1.45" shape-rendering="geometricPrecision" transform="matrix(1,0,0,1,-351,-156)" stroke-linecap="butt"> + <path fill="none" d="M624.6377 709.7267 L868.7921 379.1849" clip-path="url(#clipPath2)"/> + <path d="M619.8846 716.1617 L631.0361 709.48 L625.2318 708.9224 L622.9924 703.5386 Z" clip-path="url(#clipPath2)" stroke="none"/> + <path d="M873.5453 372.75 L862.3937 379.4316 L868.198 379.9892 L870.4373 385.373 Z" clip-path="url(#clipPath2)" stroke="none"/> + <path fill="none" d="M515.5966 715.4969 L863.408 378.3184" clip-path="url(#clipPath2)"/> + <path d="M509.8526 721.0653 L521.9488 716.3027 L516.3146 714.8009 L514.9883 709.1228 Z" clip-path="url(#clipPath2)" stroke="none"/> + <path d="M869.152 372.75 L857.0558 377.5126 L862.69 379.0144 L864.0162 384.6925 Z" clip-path="url(#clipPath2)" stroke="none"/> + <path fill="none" d="M743.3195 710.4368 L575.8471 585.9" clip-path="url(#clipPath2)"/> + <path d="M749.7391 715.2105 L743.0933 704.0377 L742.5171 709.84 L737.1262 712.0621 Z" clip-path="url(#clipPath2)" stroke="none"/> + <path d="M569.4276 581.1262 L576.0733 592.2991 L576.6496 586.4966 L582.0405 584.2746 Z" clip-path="url(#clipPath2)" stroke="none"/> + <path fill="none" d="M789.2825 700.0531 L878.3005 380.4566" clip-path="url(#clipPath2)"/> + <path d="M787.1359 707.7598 L795.1724 697.5414 L789.5508 699.0898 L785.5391 694.8582 Z" clip-path="url(#clipPath2)" stroke="none"/> + <path d="M880.447 372.75 L872.4105 382.9684 L878.0322 381.42 L882.0438 385.6516 Z" clip-path="url(#clipPath2)" stroke="none"/> + </g> + </g> +</svg> diff --git a/oidc_dev_example/clients_config.json b/oidc_dev_example/clients_config.json new file mode 100644 index 0000000000000000000000000000000000000000..afc8e5ea257ad11ea1e693fd8860d6833052e7f4 --- /dev/null +++ b/oidc_dev_example/clients_config.json @@ -0,0 +1,21 @@ +[ + { + "ClientId": "", + "ClientSecrets": [ + "" + ], + "RedirectUris": ["http://localhost:8000/api/auth/callback", "http://localhost:9999/api/auth/callback", + "http://127.0.0.1:8000/api/auth/callback", "http://127.0.0.1:9999/api/auth/callback"], + "Description": "Client for authorization code flow", + "AllowedGrantTypes": [ + "authorization_code" + ], + "AlwaysIncludeUserClaimsInIdToken": false, + "AllowedScopes": [ + "openid", + "profile", + "aarc" + ], + "RequirePkce": true + } +] diff --git a/oidc_dev_example/identity_resources.json b/oidc_dev_example/identity_resources.json new file mode 100644 index 0000000000000000000000000000000000000000..e0db9cc12acd1ba78343b347ca5aabdd8c9da5db --- /dev/null +++ b/oidc_dev_example/identity_resources.json @@ -0,0 +1,6 @@ +[ + { + "Name": "aarc", + "ClaimTypes": ["voperson_id"] + } +] diff --git a/oidc_dev_example/server_options.json b/oidc_dev_example/server_options.json new file mode 100644 index 0000000000000000000000000000000000000000..49137b4ed943246c6841a6b1f44fbdddcea3c748 --- /dev/null +++ b/oidc_dev_example/server_options.json @@ -0,0 +1,10 @@ +{ + "AccessTokenJwtType": "JWT", + "Discovery": { + "ShowKeySet": true + }, + "Authentication": { + "CookieSameSiteMode": "Lax", + "CheckSessionCookieSameSiteMode": "Lax" + } +} diff --git a/oidc_dev_example/users_config.json b/oidc_dev_example/users_config.json new file mode 100644 index 0000000000000000000000000000000000000000..8fe946d378d50bc989546c13616887e141d7b261 --- /dev/null +++ b/oidc_dev_example/users_config.json @@ -0,0 +1,22 @@ +[ + { + "SubjectId": "1", + "Username": "skywalker", + "Password": "password", + "Name": "Luke Skywalker", + "Claims": [ + { + "Type": "name", + "Value": "Luke Skuwalker" + }, + { + "Type": "voperson_id", + "Value": "4f127a515bf8a1056c67db90d751b1692ec33d8d4ba2d3f5611d15a23aa8a387@lifescience-ri.eu", + }, + { + "Type": "eduperson_principal_name", + "Value": "skywalker" + } + ] + } +] diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000000000000000000000000000000000000..d9d192261fac585fdca05b73dd32f5e8c23c8d71 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,40 @@ +[tool.isort] +profile = "black" +line_length = 120 +balanced_wrapping = true + +[tool.black] +line-length = 120 + +[tool.mypy] +plugins = ["pydantic.mypy", "sqlalchemy.ext.mypy.plugin"] +ignore_missing_imports = true +disallow_untyped_defs = true + +[tool.coverage.run] +concurrency = [ + "greenlet", + "thread" +] +omit = [ + "app/tests/*", + "app/check_database_connection.py", + "app/check_ceph_connection.py", + "app/check_oidc_connection.py", + "app/db/base*", + "app/core/config.py", + "app/main.py", + "app/api/miscellaneous_endpoints.py" +] + +[tool.coverage.report] +exclude_lines = [ + "pragma: no cover", + "def __repr__", + "def __eq__", + "if TYPE_CHECKING", + "if __name__ = = .__main__.:" +] + +[tool.pytest.ini_options] +asyncio_mode = "strict" diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 0000000000000000000000000000000000000000..abaace2b4f54833c12926357630abb9b01bd9bea --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,18 @@ +# test packages +pytest>=7.1.0,<7.2.0 +pytest-asyncio>=0.18.0,<0.19.0 +pytest-cov>=3.0.0,<3.1.0 +coverage[toml]>=6.4.0,<6.5.0 +# Linters +flake8>=4.0.0,<4.1.0 +autoflake>=1.4.0,<1.5.0 +black>=22.3.0,<22.4.0 +isort>=5.10.0,<5.11.0 +mypy>=0.960,<0.970 +# stubs for mypy +boto3-stubs-lite[s3]>=1.24.0,<1.25.0 +sqlalchemy2-stubs +types-requests +# Miscellaneous +pre-commit>=2.19.0,<2.20.0 +python-dotenv diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..479abb18700bd0e22e366a663be00acbd292388f --- /dev/null +++ b/requirements.txt @@ -0,0 +1,19 @@ +# Webserver packages +anyio>=3.5.0,<3.6.0 +fastapi>=0.79.0,<0.80.0 +pydantic>=1.9.0,<2.0.0 +uvicorn>=0.17.0,<0.18.0 +# Database packages +PyMySQL>=1.0.2,<1.1.0 +SQLAlchemy>=1.4.0,<1.5.0 +alembic>=1.7.0,<1.8.0 +aiomysql>=0.1.0,<0.2.0 +# Security packages +authlib +# Ceph and S3 packages +boto3>=1.24.0,<1.25.0 +rgwadmin>=2.3.0,<2.4.0 +# Miscellaneous +tenacity>=8.0.0,<8.1.0 +httpx>=0.23.0,<0.24.0 +itsdangerous diff --git a/scripts/format-imports.sh b/scripts/format-imports.sh new file mode 100755 index 0000000000000000000000000000000000000000..da788e464e6e4f7eaa4d7ba9e8084cbf206dac26 --- /dev/null +++ b/scripts/format-imports.sh @@ -0,0 +1,6 @@ +#!/bin/sh -e +set -x + +# Sort imports one per line, so autoflake can remove unused imports +isort --force-single-line-imports app +sh ./scripts/format.sh diff --git a/scripts/format.sh b/scripts/format.sh new file mode 100755 index 0000000000000000000000000000000000000000..2670b18649ec35a4cb6f9914e36210f61d09fe11 --- /dev/null +++ b/scripts/format.sh @@ -0,0 +1,6 @@ +#!/bin/sh -e +set -x + +autoflake --remove-all-unused-imports --recursive --remove-unused-variables --in-place app --exclude=__init__.py +black app +isort app diff --git a/scripts/lint.sh b/scripts/lint.sh new file mode 100755 index 0000000000000000000000000000000000000000..2ae13873d90b0b4ba889c7de7f290b28def2f579 --- /dev/null +++ b/scripts/lint.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +set -x + +mypy app +black app --check +isort -c app +flake8 app diff --git a/scripts/test.sh b/scripts/test.sh new file mode 100755 index 0000000000000000000000000000000000000000..19f94b5fc57f629f047e11b9ba21ce51bd5068a7 --- /dev/null +++ b/scripts/test.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +set -e +set -x + +alembic downgrade base +alembic upgrade head + +pytest --cov=app --cov-report=term-missing app/tests "${@}" diff --git a/start_service.sh b/start_service.sh new file mode 100755 index 0000000000000000000000000000000000000000..d91930b65fb6249c633d2c55668fb7963ce07e7d --- /dev/null +++ b/start_service.sh @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +# Check Connection to Ceph RGW +python app/check_ceph_connection.py +# Check Connection to OIDC provider +python app/check_oidc_connection.py +# Let the DB start +python app/check_database_connection.py + +# Run migrations +alembic upgrade head + +# Start webserver +uvicorn app.main:app --host 0.0.0.0 --port 80 diff --git a/tests-start.sh b/tests-start.sh new file mode 100755 index 0000000000000000000000000000000000000000..eed174eb12d42c2f6d1d5352a362b3c059a2b943 --- /dev/null +++ b/tests-start.sh @@ -0,0 +1,6 @@ +#! /usr/bin/env bash +set -e + +python app/check_database_connection.py + +bash scripts/test.sh "$@" diff --git a/traefik_dev/routes.toml b/traefik_dev/routes.toml new file mode 100644 index 0000000000000000000000000000000000000000..859e3b595b4336c8effbe06885ef40884c591409 --- /dev/null +++ b/traefik_dev/routes.toml @@ -0,0 +1,28 @@ +[http] + [http.middlewares] + [http.middlewares.api-stripprefix.stripPrefix] + prefixes = ["/api"] + + [http.routers] + + [http.routers.api-http] + entryPoints = ["http"] + service = "proxyapi" + rule = "PathPrefix(`/api`)" + middlewares = ["api-stripprefix"] + [http.routers.api-ui] + entryPoints = ["http"] + service = "proxyapi-ui" + rule = "!PathPrefix(`/api`)" + + [http.services] + + [http.services.proxyapi] + [http.services.proxyapi.loadBalancer] + [[http.services.proxyapi.loadBalancer.servers]] + url = "http://127.0.0.1:8000" + [http.services.proxyapi-ui] + [http.services.proxyapi-ui.loadBalancer] + [[http.services.proxyapi-ui.loadBalancer.servers]] + url = "http://127.0.0.1:5173" + #url = "http://127.0.0.1:8080" diff --git a/traefik_dev/traefik.toml b/traefik_dev/traefik.toml new file mode 100644 index 0000000000000000000000000000000000000000..c994ccd6ed8366d4abdc91ad64beceb2c05ca645 --- /dev/null +++ b/traefik_dev/traefik.toml @@ -0,0 +1,7 @@ +[entryPoints] + [entryPoints.http] + address = ":9999" + +[providers] + [providers.file] + filename = "routes.toml"