diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..28f1442 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,11 @@ +.git +.vscode +venv +log/* +__pycache__ +**/__pycache__ + +**/.env +**/db.sqlite3 +clouds.yaml +docker-compose.yaml \ No newline at end of file diff --git a/.drone.yml b/.drone.yml new file mode 100644 index 0000000..cf4ba7f --- /dev/null +++ b/.drone.yml @@ -0,0 +1,120 @@ +--- +kind: pipeline +name: notify-pipeline-start +type: kubernetes + +steps: + - name: slack + image: plugins/slack + settings: + webhook: + from_secret: SLACK_WEBHOOK + link_names: true + template: > + {{#if build.pull }} + *Build started*: {{ repo.owner }}/{{ repo.name }} - + {{else}} + *Build started: {{ repo.owner }}/{{ repo.name }} - Build #{{ build.number }}* (type: `{{ build.event }}`) + {{/if}} + Commit: + Branch: + Author: {{ build.author }} + <{{ build.link }}|Visit build page ↗> + +--- +kind: pipeline +name: build-docker-image +type: kubernetes + +steps: + - name: build-docker-image-branch + image: plugins/docker + settings: + cache_from: + - ${DRONE_REPO,,}:${DRONE_SOURCE_BRANCH/\//-} + username: + from_secret: rug_docker_repo_user + password: + from_secret: rug_docker_repo_password + repo: registry.webhosting.rug.nl/${DRONE_REPO,,} + registry: registry.webhosting.rug.nl + dockerfile: docker/Dockerfile.api + tags: + - ${DRONE_SOURCE_BRANCH/\//-} + - ${DRONE_SOURCE_BRANCH/\//-}-${DRONE_COMMIT_SHA:0:8} + when: + event: + exclude: + - tag + + - name: nginx-frontend-proxy + image: plugins/docker + settings: + cache_from: + - ${DRONE_REPO,,}:${DRONE_SOURCE_BRANCH/\//-} + username: + from_secret: rug_docker_repo_user + password: + from_secret: rug_docker_repo_password + repo: registry.webhosting.rug.nl/${DRONE_REPO,,}-ngx + registry: registry.webhosting.rug.nl + dockerfile: docker/Dockerfile.nginx + tags: + - ${DRONE_SOURCE_BRANCH/\//-} + - ${DRONE_SOURCE_BRANCH/\//-}-${DRONE_COMMIT_SHA:0:8} + when: + event: + exclude: + - tag + + - name: scheduler + image: plugins/docker + settings: + cache_from: + - ${DRONE_REPO,,}:${DRONE_SOURCE_BRANCH/\//-} + username: + from_secret: rug_docker_repo_user + password: + from_secret: rug_docker_repo_password + repo: registry.webhosting.rug.nl/${DRONE_REPO,,}-ngx + registry: registry.webhosting.rug.nl + dockerfile: docker/Dockerfile.scheduler + tags: + - ${DRONE_SOURCE_BRANCH/\//-} + - ${DRONE_SOURCE_BRANCH/\//-}-${DRONE_COMMIT_SHA:0:8} + when: + event: + exclude: + - tag + +--- +kind: pipeline +name: notify-pipeline-end +type: kubernetes + +steps: + - name: slack + image: plugins/slack + settings: + webhook: + from_secret: SLACK_WEBHOOK + link_names: true + template: > + {{#if build.pull }} + *{{#success build.status}}✔{{ else }}✘{{/success}} {{ uppercasefirst build.status }}*: {{ repo.owner }}/{{ repo.name }} - + {{else}} + *{{#success build.status}}✔{{ else }}✘{{/success}} {{ uppercasefirst build.status }}: {{ repo.owner }}/{{ repo.name }} - Build #{{ build.number }}* (type: `{{ build.event }}`) + {{/if}} + Commit: + Branch: + Author: {{ build.author }} + Duration: {{ since build.created }} + <{{ build.link }}|Visit build page ↗> + +depends_on: + - build-docker-image + +trigger: + status: + - success + - failure \ No newline at end of file diff --git a/.gitignore b/.gitignore index f8b73e7..c7e6c86 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,7 @@ +clouds.yaml +doc/_build/ +doc/output/ + # ---> Python # Byte-compiled / optimized / DLL files __pycache__/ diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 0000000..2e83d0e --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,19 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "Django VRE Broker API", + "type": "python", + "request": "launch", + "program": "${workspaceFolder}/VRE/manage.py", + "args": [ + "runserver", + "0.0.0.0:8080" + ], + "django": true + } + ] +} \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..bcd1055 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,11 @@ +{ + "python.pythonPath": "venv/bin/python", + "files.watcherExclude": { + "**/.git/objects/**": true, + "**/.git/subtree-cache/**": true, + "**/node_modules/*/**": true, + "**/venv/*/**": true, + }, + "restructuredtext.languageServer.disabled": true, + "restructuredtext.confPath": "${workspaceFolder}/doc" +} diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..7a71d9e --- /dev/null +++ b/LICENSE @@ -0,0 +1,19 @@ +MIT License Copyright (c) 2020 Joshua Rubingh, Elwin Buisman + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is furnished +to do so, subject to the following conditions: + +The above copyright notice and this permission notice (including the next +paragraph) shall be included in all copies or substantial portions of the +Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS +OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF +OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/README.md b/README.md index 89565b1..04e6c78 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,310 @@ -# Broker +# Virtual Research Environment -VRE Backend API and Scheduler \ No newline at end of file +Secure data drop-off & routing software. + +With this software it is possible to safely upload private and sensitive data like WeTransfer or Dropbox. It is possible to upload single or multiple files at once though a web interface or through an API. + +## Installation + +In order to install this Data drop off project, we need the following packages / software. + +- Django +- TUS (The Upload Server) +- NGINX + +## Django +We install Django with standard settings. We could run it in Aync way, but then you need some more steps: https://docs.djangoproject.com/en/3.0/howto/deployment/asgi/ So for now, we keep it simple. + +### Install +Clone the code on `/opt/deploy/data_drop-off` +```sh +git clone https://git.web.rug.nl/VRE/data_drop-off.git /opt/deploy/data_drop-off +``` +Then create a virtual environment +```sh +cd /opt/deploy/data_drop-off +python3 -m venv . +source bin/activate +``` +Finally we install the required Python modules +```python +pip install -r requirements +``` +This will install all the needed Python modules we need to run this Django project. + +### External libraries: +#### Production +https://gitlab.com/eeriksp/django-model-choices +https://github.com/georgemarshall/django-cryptography +https://github.com/jacobian/dj-database-url +https://github.com/ierror/django-js-reverse + +https://github.com/henriquebastos/python-decouple +https://github.com/ezhov-evgeny/webdav-client-python-3 +https://github.com/dblueai/giteapy +https://pypi.org/project/PyGithub/ + +#### Development +https://github.com/jazzband/django-debug-toolbar + +### Settings +The settings for Django are set in an `.env` file so that you can easily change the environment from production to testing. There is an `.env.example` file that could be used as a template. + +```ini +# A uniquely secret key +SECRET_KEY=@wb=#(f4uc0l%e!5*eo+aoflnxb(@!l9!=c5w=4b+x$=!8&vy%a + +# Disable debug in production +DEBUG=False + +# Allowed hosts that Django does server. Take care when NGINX is proxying infront of Django +ALLOWED_HOSTS=127.0.0.1,localhost + +# Enter the database url connection: https://github.com/jacobian/dj-database-url +DATABASE_URL=sqlite:////opt/deploy/data_drop-off/db.sqlite3 + +# Email settings + +# Mail host +EMAIL_HOST= + +# Email user name +EMAIL_HOST_USER= + +# Email password +EMAIL_HOST_PASSWORD= + +# Email server port number to use +EMAIL_PORT=25 + +# Does the email server supports TLS? +EMAIL_USE_TLS= +``` + +Next we have to make the database structure. If you are using SQLite3 as a backend, make sure the database file **DOES** exist on disk. + +```sh +touch /opt/deploy/data_drop-off/db.sqlite3 +``` + +Then in the Python virtual environment we run the following commands: +```sh +./manage.py migrate +./manage.py loaddata virtual_machine_initial_data +./manage.py createsuperuser +./manage.py collectstatic +``` + +And finally you should be able to start the Django application +```sh +./manage.py runserver +``` + +### TUS +TUS = [The Upload Server](https://tus.io/). This is a resumable upload server that speaks HTTP. This server is a stand-alone server that is running behind the NGINX server. + +It is even possible to run a TUS instance on a different location (Amsterdam). As long as the TUS is reachable by the NGINX frontend server, and the TUS server can post webhooks back to the frontend server. + +#### Setup +If needs the package ecnfs* so install that first: `sudo apt install encfs` + +The setup is quit simple. This works the same way as Django by using .env file. So start by creating a new settings files based on the example. + +`cp .env.example .env` + +```ini +# TUS Daemon settings +# Change the variable below to your needs. You can also add more variables that are used in the startup.sh script + +WEBHOOK_URL="http://localhost:8000/datadrops/webhook/" +DROPOFF_API_HAWK_KEY="[ENTER_HAWK_KEY]" +DROPOFF_API_HAWK_SECRET="[ENTER_HAWK_SECRET]" +``` + +You need to create an API user in Django that is allowed to communicatie between the TUS daemon and Django. This can be done by creating a new usre in the Django admin. This will also generate a new token, which is needed. This token can be found at the API -> Tokens page. + +The default webhook url is: /datadrops/webhook/ + +Then you can start the upload server by starting with the 'start.sh' script: `./start.sh` + +This will start the TUS server running on TCP port 1050. + + +#### Data storage +The upload data is stored at a folder that is configured in the TUS startup command. This should be folder that is writable by the user that is running the TUS instance. Make sure that the upload folder is not directly accessible by the webserver. Else files can be downloaded. + + +#### Hooks +The TUS is capable of handling hooks based on uploaded files. There are two types of hooks. 'Normal' hooks and webhooks. It is not possible to run both hook systems at the same time due to the blocking nature of the pre-create hook. So we use the 'normal' hook system. That means that custom scripts are run. Those scripts can then post the data to a webserver in order to get a Webhook functionality with the 'normal' hooks. +At the moment, there is only a HTTP webcall done in the hook system. There is no actual file movement yet. +For now we have used the following hooks: + +- **pre-create**: This hook will run when a new upload starts. This will trigger the Django server to store the upload in the database, and check if the upload is allowed based on an unique upload url and unique upload code. +- **post-finish**: This hook will run when an upload is finished. And will update the Database/Django with the file size and actual filename (unique) on disk. + +An example of a hook as used in this project. The only changes that should be done is: +- **WEBHOOK_URL**: This is the full url to the Django webhook +Do not change the **HTTP_HOOK_NAME** as this will give errors with Django. + +```python +#!/usr/bin/env python + +import sys +import json +import requests + +# Tus webhook name +HTTP_HOOK_NAME='pre-create' +# Django webserver with hook url path +WEBHOOK_URL='http://localhost:8000/webhook/' + +# Read stdin input data from the TUS daemon +data = ''.join(sys.stdin.readlines()) + +# Test if data is valid JSON... just to be sure... +try: + json.loads(data) +except Exception as ex: + print(ex) + # Send exit code higher then 0 to stop the upload process on the Tus server + sys.exit(1) + +# We know for sure that JSON input data is 'valid'. So we post to the webhook for further checking +try: + # Create a webhook POST request with the needed headers and data. The data is the RAW data from the input. + webhook = requests.post(WEBHOOK_URL, headers={'HOOK-NAME':HTTP_HOOK_NAME}, data=data) + # If the POST is ok, and we get a 200 status back, so the upload can continue + if webhook.status_code == requests.codes.ok: + # This will make the Tus server continue the upload + sys.exit(0) + +except requests.exceptions.RequestException as ex: + # Webhook post failed + print(ex) + +# We had some errors, so upload has to be stopped +sys.exit(1) +``` +This hook uses the same data payload as when TUS would use the Webhook system. So using 'Normal' hooks or using Webhooks with DJANGO should both work out of the box. + +### NGINX +Install NGINX with LUA support through the package manager. For Ubuntu this would be +```sh +apt install nginx libnginx-mod-http-lua +``` +Also configure SSL to make the connections secure. This is outside this installation scope. + +#### LUA +There is usage of LUA in NGINX so we can handle some dynamic data on the server side. All LUA code should be placed in the folder `/etc/nginx/lua`. + +#### Setup +After installation of the packages, create a symbolic link in the `/etc/nginx/sites-enabled` so that a new VHost is created. + +Important parts of the VHost configuration: +```nginx +lua_package_path "/etc/nginx/lua/?.lua;;"; + +server { + listen 80 default_server; + listen [::]:80 default_server; + + # SSL configuration + # + # listen 443 ssl default_server; + # listen [::]:443 ssl default_server; + # + # Note: You should disable gzip for SSL traffic. + # See: https://bugs.debian.org/773332 + # + # Read up on ssl_ciphers to ensure a secure configuration. + # See: https://bugs.debian.org/765782 + # + # Self signed certs generated by the ssl-cert package + # Don't use them in a production server! + # + # include snippets/snakeoil.conf; + + root /var/www/html; + + # Add index.php to the list if you are using PHP + index index.html; + + server_name localhost; + + # This location is hit when the Tus upload is starting and providing meta data for the upload. + # The actual upload is done with the /files location below + location ~ /files/([0-9a-f]+\-[0-9a-f]+\-[1-5][0-9a-f]+\-[89ab][0-9a-f]+\-[0-9a-f]+)?/ { + set $project_id $1; # Here we capture the UUIDv4 value to use in the Tus metadata manipulation + set $tusmetadata ''; + + # Here we manipulate the metadata from the TUS upload server. + # Now we are able to store some extra meta data based on the upload url. + access_by_lua_block { + local dropoff_tus = require('dropoff_tus'); + local project_metadata = ngx.req.get_headers()['Upload-Metadata']; + if project_metadata ~= nill then + ngx.var.tusmetadata = dropoff_tus.updateTusMetadata(project_metadata,ngx.var.project_id); + end + } + + # Here we update the Tus server metadata so we can add the project uuid to it for further processing + proxy_set_header Upload-Metadata $tusmetadata; + + # Rewrite the url so that the project UUIDv4 is stripped from the url to the Tus server + rewrite ^.*$ /files/ break; + + # Disable request and response buffering + proxy_request_buffering off; + proxy_buffering off; + + client_max_body_size 0; + + # Forward incoming requests to local tusd instance. + # This can also be a remote server on a different location. + proxy_pass http://localhost:1080; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; + + proxy_redirect off; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Host $server_name; + proxy_set_header X-Forwarded-Proto $scheme; + } + + location ~ /files { + # Disable request and response buffering + proxy_request_buffering off; + proxy_buffering off; + + client_max_body_size 0; + + # Forward incoming requests to local tusd instance + proxy_pass http://localhost:1080; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; + + proxy_redirect off; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Host $server_name; + proxy_set_header X-Forwarded-Proto $scheme; + } +} +``` + +And there should be a `lua` folder in the `/etc/nginx` folder. + +In order to test if NGINX is configured correctly run `nginx -t` and it should give an OK message: +```sh +nginx: the configuration file /etc/nginx/nginx.conf syntax is ok +nginx: configuration file /etc/nginx/nginx.conf test is successful +``` + +## Security (not yet inplemented) +It is possible to secure the upload files with PGP encryption. This is done automatically in the Web interface. When you want PGP encryption though API upload, the encryption has to be done before the upload is started. This is a manual action done by the uploader. +So automatic encryption is only available through the Web upload. \ No newline at end of file diff --git a/VRE/VRE/.env.example b/VRE/VRE/.env.example new file mode 100644 index 0000000..acbe044 --- /dev/null +++ b/VRE/VRE/.env.example @@ -0,0 +1,76 @@ +# A uniquely secret key +# https://docs.djangoproject.com/en/dev/ref/settings/#secret-key +SECRET_KEY=@wb=#(f4uc0l%e!5*eo+aoflnxb(@!l9!=c5w=4b+x$=!8&vy%' + +# Disable debug in production +# https://docs.djangoproject.com/en/dev/ref/settings/#debug +DEBUG=False + +# Allowed hosts that Django does server. Use comma separated list Take care when NGINX is proxying in front of Django +# https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts +ALLOWED_HOSTS=127.0.0.1,localhost + +# All internal IPS for Django. Use comma separated list +# https://docs.djangoproject.com/en/dev/ref/settings/#internal-ips +INTERNAL_IPS=127.0.0.1 + +# Enter the database url connection. Enter all parts even the port numbers: https://github.com/jacobian/dj-database-url +# By default a local sqlite3 database is used. +DATABASE_URL=sqlite:///db.sqlite3 + +# The location on disk where the static files will be placed during deployment. Setting is required +# https://docs.djangoproject.com/en/dev/ref/settings/#static-root +STATIC_ROOT= + +# Enter the default timezone for the visitors when it is not known. +# https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-TIME_ZONE +TIME_ZONE=Europe/Amsterdam + +# Email settings +# https://docs.djangoproject.com/en/dev/ref/settings/#email-host +# EMAIL_HOST= + +# Email user name +# https://docs.djangoproject.com/en/dev/ref/settings/#email-host-user +# EMAIL_HOST_USER= + +# Email password +# https://docs.djangoproject.com/en/dev/ref/settings/#email-host-password +# EMAIL_HOST_PASSWORD= + +# Email server port number to use. Default is 25 +# https://docs.djangoproject.com/en/dev/ref/settings/#email-port +# EMAIL_PORT= + +# Does the email server supports TLS? +# https://docs.djangoproject.com/en/dev/ref/settings/#email-use-tls +# EMAIL_USE_TLS= + +https://docs.djangoproject.com/en/dev/ref/settings/#default-from-email +DEFAULT_FROM_EMAIL=Do not reply + +# The sender address. This needs to be one of the allowed domains due to SPF checks +# The code will use a reply-to header to make sure that replies goes to the researcher and not this address +EMAIL_FROM_ADDRESS=Do not reply + +# The Redis server is used for background tasks. Enter the variables below. Leave password empty if authentication is not enabled. +# The hostname or IP where the Redis server is running. Default is localhost +REDIS_HOST=localhost + +# The Redis port number on which the server is running. Default is 6379 +REDIS_PORT=6379 + +# The Redis password when authentication is enabled +# REDIS_PASSWORD= + +# The amount of connections to be made inside a connection pool. Default is 10 +REDIS_CONNECTIONS=10 + +# Enter the full path to the Webbased file uploading without the Study ID part. The Study ID will be added to this url based on the visitor. +DROPOFF_BASE_URL=http://localhost:8000/dropoffs/ + +# Enter the full url to the NGINX service that is in front of the TUSD service. By default that is http://localhost:1090 +DROPOFF_UPLOAD_HOST=http://localhost:1090 + +# Which file extensions are **NOT** allowed to be uploaded. By default the extensions exe,com,bat,lnk,sh are not allowed +DROPOFF_NOT_ALLOWED_EXTENSIONS=exe,com,bat,lnk,sh diff --git a/VRE/VRE/__init__.py b/VRE/VRE/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/VRE/VRE/asgi.py b/VRE/VRE/asgi.py new file mode 100644 index 0000000..29b1868 --- /dev/null +++ b/VRE/VRE/asgi.py @@ -0,0 +1,16 @@ +""" +ASGI config for VRE project. + +It exposes the ASGI callable as a module-level variable named ``application``. + +For more information on this file, see +https://docs.djangoproject.com/en/3.0/howto/deployment/asgi/ +""" + +import os + +from django.core.asgi import get_asgi_application + +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'VRE.settings') + +application = get_asgi_application() diff --git a/VRE/VRE/settings.py b/VRE/VRE/settings.py new file mode 100644 index 0000000..42a5b89 --- /dev/null +++ b/VRE/VRE/settings.py @@ -0,0 +1,234 @@ +""" +Django settings for VRE project. + +Generated by 'django-admin startproject' using Django 3.1.5. + +For more information on this file, see +https://docs.djangoproject.com/en/3.1/topics/settings/ + +For the full list of settings and their values, see +https://docs.djangoproject.com/en/3.1/ref/settings/ +""" + +from redis import ConnectionPool +from pathlib import Path +from decouple import config, Csv +from dj_database_url import parse as db_url +from django.utils.translation import ugettext_lazy as _ + +# Build paths inside the project like this: BASE_DIR / 'subdir'. +BASE_DIR = Path(__file__).resolve().parent.parent + +# Quick-start development settings - unsuitable for production +# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/ + +# SECURITY WARNING: keep the secret key used in production secret! +SECRET_KEY = config('SECRET_KEY') + +# SECURITY WARNING: don't run with debug turned on in production! +DEBUG = config('DEBUG', default=False, cast=bool) + +ALLOWED_HOSTS = config('ALLOWED_HOSTS', default='localhost,127.0.0.1', cast=Csv()) + +# Application definition +# We load the application in steps, based on which are available on disk +INSTALLED_APPS = [ + 'django.contrib.admin', + 'django.contrib.auth', + 'django.contrib.contenttypes', + 'django.contrib.sessions', + 'django.contrib.messages', + 'django.contrib.staticfiles', + + 'apps.api', + 'apps.dropoff', + 'apps.invitation', + 'apps.researcher', + 'apps.storage', + 'apps.study', + 'apps.virtual_machine', + + 'djoser', + 'rest_framework', + + 'drf_yasg', + 'hawkrest', + 'huey.contrib.djhuey', +] + +MIDDLEWARE = [ + 'django.middleware.security.SecurityMiddleware', + 'django.contrib.sessions.middleware.SessionMiddleware', + 'django.middleware.common.CommonMiddleware', + 'django.middleware.csrf.CsrfViewMiddleware', + 'hawkrest.middleware.HawkResponseMiddleware', + 'django.contrib.auth.middleware.AuthenticationMiddleware', + 'django.contrib.messages.middleware.MessageMiddleware', + 'django.middleware.clickjacking.XFrameOptionsMiddleware', + 'django.middleware.locale.LocaleMiddleware', +] + +ROOT_URLCONF = 'VRE.urls' + +TEMPLATES = [ + { + 'BACKEND': 'django.template.backends.django.DjangoTemplates', + 'DIRS': [], + 'APP_DIRS': True, + 'OPTIONS': { + 'context_processors': [ + 'django.template.context_processors.debug', + 'django.template.context_processors.request', + 'django.contrib.auth.context_processors.auth', + 'django.contrib.messages.context_processors.messages', + ], + }, + }, +] + +WSGI_APPLICATION = 'VRE.wsgi.application' + + +# Database +# https://docs.djangoproject.com/en/3.1/ref/settings/#databases + +DATABASES = { + 'default': config( + 'DATABASE_URL', + default=f'sqlite:///{BASE_DIR / "db.sqlite3"}', # + os.path.join(BASE_DIR, 'db.sqlite3') + cast=db_url + ) +} + +# Password validation +# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators + +AUTH_PASSWORD_VALIDATORS = [ + { + 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', + }, +] + +# Internationalization +# https://docs.djangoproject.com/en/3.1/topics/i18n/ + +LANGUAGE_CODE = 'en-us' + +TIME_ZONE = config('TIME_ZONE', default='UTC') + +USE_I18N = True + +USE_L10N = True + +USE_TZ = True + +# Static files (CSS, JavaScript, Images) +# https://docs.djangoproject.com/en/3.1/howto/static-files/ + +STATIC_URL = '/static/' + +STATICFILES_DIRS = [ + BASE_DIR / 'static', +] + +STATIC_ROOT = config('STATIC_ROOT',None) + +INTERNAL_IPS = config('INTERNAL_IPS',default='127.0.0.1',cast=Csv()) + +# SSL Checks / Setup +# This will tell Django if the request is trough SSL (proxy). This is needed for Hawk authentication +SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') + +# settings.py +HUEY = { + 'huey_class': 'huey.RedisHuey', # Huey implementation to use. + 'name': DATABASES['default']['NAME'].split('/')[-1], # Use db name for huey. + 'results': True, # Store return values of tasks. + 'store_none': False, # If a task returns None, do not save to results. + 'immediate': False, # If DEBUG=True, run synchronously. + 'utc': True, # Use UTC for all times internally. + 'blocking': True, # Perform blocking pop rather than poll Redis. + 'connection': { +# 'host': config('REDIS_HOST'), +# 'port': 6379, +# 'db': 0, + 'connection_pool': ConnectionPool( + host=config('REDIS_HOST', 'localhost'), + password=config('REDIS_PASSWORD', None), + port=config('REDIS_PORT', default=6379, cast=int), + max_connections=config('REDIS_CONNECTIONS', default=10, cast=int)), # Definitely you should use pooling! + # ... tons of other options, see redis-py for details. + + # huey-specific connection parameters. + 'read_timeout': 1, # If not polling (blocking pop), use timeout. + 'url': None, # Allow Redis config via a DSN. + }, + 'consumer': { + 'workers': 1, + 'worker_type': 'thread', + 'initial_delay': 0.1, # Smallest polling interval, same as -d. + 'backoff': 1.15, # Exponential backoff using this rate, -b. + 'max_delay': 10.0, # Max possible polling interval, -m. + 'scheduler_interval': 1, # Check schedule every second, -s. + 'periodic': True, # Enable crontab feature. + 'check_worker_health': True, # Enable worker health checks. + 'health_check_interval': 1, # Check worker health every second. + }, +} + +# Email settings for sending out upload invitations. +DEFAULT_FROM_EMAIL = config('DEFAULT_FROM_EMAIL', default='Do not reply') +EMAIL_HOST = config('EMAIL_HOST', default='') +EMAIL_HOST_USER = config('EMAIL_HOST_USER', default='') +EMAIL_HOST_PASSWORD = config('EMAIL_HOST_PASSWORD', default='') +EMAIL_PORT = config('EMAIL_PORT', default=25, cast=int) +EMAIL_USE_TLS = config('EMAIL_USE_TLS', default=False, cast=bool) + +# The sender address. This needs to be one of the allowed domains due to SPF checks +# The code will use a reply-to header to make sure that replies goes to the researcher and not this address +EMAIL_FROM_ADDRESS = config('EMAIL_FROM_ADDRESS', default='Do not reply') + +if DEBUG: + EMAIL_BACKEND = 'django.core.mail.backends.filebased.EmailBackend' + EMAIL_FILE_PATH = BASE_DIR / 'sent_emails' + +# Dropoff settings. +# Enter the full path to the Webbased file uploading without the Study ID part. The Study ID will be added to this url based on the visitor. +DROPOFF_BASE_URL = config('DROPOFF_BASE_URL', default='http://localhost:8000/dropoffs/',) +# Enter the full url to the NGINX service that is in front of the TUSD service. By default that is http://localhost:1090 +DROPOFF_UPLOAD_HOST = config('DROPOFF_UPLOAD_HOST', default='http://localhost:1090',) +# Which file extensions are **NOT** allowed to be uploaded. By default the extensions exe,com,bat,lnk,sh are not allowed +DROPOFF_NOT_ALLOWED_EXTENSIONS = config('DROPOFF_NOT_ALLOWED_EXTENSIONS',default='exe,com,bat,lnk,sh',cast=Csv()) + + +# LOGGING = { +# 'version': 1, +# 'disable_existing_loggers': False, +# 'handlers': { +# 'file': { +# 'class': 'logging.FileHandler', +# 'filename': f'{BASE_DIR}/../log/debug.log', +# }, +# }, +# 'loggers': { +# 'django': { +# 'handlers': ['file'], +# 'level': 'DEBUG' if DEBUG else 'INFO', +# 'propagate': True, +# }, + +# 'hawkrest': { +# 'handlers': ['file'], +# 'level': 'DEBUG' if DEBUG else 'INFO', +# } +# }, +# } \ No newline at end of file diff --git a/VRE/VRE/urls.py b/VRE/VRE/urls.py new file mode 100644 index 0000000..f02e4e7 --- /dev/null +++ b/VRE/VRE/urls.py @@ -0,0 +1,22 @@ +"""VRE URL Configuration + +The `urlpatterns` list routes URLs to views. For more information please see: + https://docs.djangoproject.com/en/3.1/topics/http/urls/ +Examples: +Function views + 1. Add an import: from my_app import views + 2. Add a URL to urlpatterns: path('', views.home, name='home') +Class-based views + 1. Add an import: from other_app.views import Home + 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') +Including another URLconf + 1. Import the include() function: from django.urls import include, path + 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) +""" +from django.contrib import admin +from django.urls import path, include, re_path + +urlpatterns = [ + path('api/', include('apps.api.urls')), + path('admin/', admin.site.urls), +] \ No newline at end of file diff --git a/VRE/VRE/wsgi.py b/VRE/VRE/wsgi.py new file mode 100644 index 0000000..170f688 --- /dev/null +++ b/VRE/VRE/wsgi.py @@ -0,0 +1,16 @@ +""" +WSGI config for VRE project. + +It exposes the WSGI callable as a module-level variable named ``application``. + +For more information on this file, see +https://docs.djangoproject.com/en/3.0/howto/deployment/wsgi/ +""" + +import os + +from django.core.wsgi import get_wsgi_application + +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'VRE.settings') + +application = get_wsgi_application() diff --git a/VRE/apps/api/__init__.py b/VRE/apps/api/__init__.py new file mode 100644 index 0000000..b536550 --- /dev/null +++ b/VRE/apps/api/__init__.py @@ -0,0 +1 @@ +default_app_config = 'apps.api.apps.ApiConfig' \ No newline at end of file diff --git a/VRE/apps/api/admin.py b/VRE/apps/api/admin.py new file mode 100644 index 0000000..726805c --- /dev/null +++ b/VRE/apps/api/admin.py @@ -0,0 +1,9 @@ +from django.contrib import admin +from .models import Token + +@admin.register(Token) +class TokenAdmin(admin.ModelAdmin): + list_display = ('key', 'user','is_supertoken', 'last_access') + ordering = ('-last_access', 'user', ) + search_fields = ('key', 'user__username',) + readonly_fields = ('created_at', 'updated_at') diff --git a/VRE/apps/api/apps.py b/VRE/apps/api/apps.py new file mode 100644 index 0000000..8c66a5b --- /dev/null +++ b/VRE/apps/api/apps.py @@ -0,0 +1,90 @@ +from django.apps import AppConfig +from django.utils.translation import ugettext_lazy as _ + +from django.conf import settings + +class ApiConfig(AppConfig): + name = 'apps.api' + label = 'api' + verbose_name = _('API') + verbose_name_plural = _('APIs') + + try: + assert settings.SWAGGER_SETTINGS + except AttributeError: + # We only load this setting, if it is not available in the overall settings.py file + settings.SWAGGER_SETTINGS = { + 'SECURITY_DEFINITIONS': { + 'Hawk': { + 'type': 'apiKey', + 'description': 'HTTP Holder-Of-Key Authentication Scheme, https://github.com/hapijs/hawk, https://hawkrest.readthedocs.io/en/latest/
Ex header:
\'Authorization\': \'Hawk mac="F4+S9cu7yZiZEgdtqzMpOOdudvqcV2V2Yzk2WcphECc=", hash="+7fKUX+djeQolvnLTxr0X47e//UHKbkRlajwMw3tx3w=", id="7FI5JET4", ts="1592905433", nonce="DlV-fL"\'', + 'name': 'Authorization', + 'in': 'header' + } + } + } + + try: + assert settings.REST_FRAMEWORK + except AttributeError: + # We only load this setting, if it is not available in the overall settings.py file + # To protect all API views with Hawk by default, put this in your settings: + # https://hawkrest.readthedocs.io/en/latest/usage.html#protecting-api-views-with-hawk + settings.REST_FRAMEWORK = { + + 'DEFAULT_AUTHENTICATION_CLASSES': ( + 'apps.api.authentication.APIHawk', + ), + + 'DEFAULT_PERMISSION_CLASSES': ( + 'rest_framework.permissions.IsAuthenticated', + ), + + # 'DEFAULT_AUTHENTICATION_CLASSES': ( + # 'rest_framework.authentication.TokenAuthentication', + # ), + + # 'DEFAULT_PERMISSION_CLASSES': ( + # 'rest_framework.permissions.IsAuthenticated', ), + + # Use Django's standard `django.contrib.auth` permissions, + # or allow read-only access for unauthenticated users. + #'DEFAULT_PERMISSION_CLASSES': [ + # 'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly' + #], + 'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination', + 'PAGE_SIZE': 10 + } + + # try: + # assert settings.HAWK_CREDENTIALS_LOOKUP + # except AttributeError: + # settings.HAWK_CREDENTIALS_LOOKUP = 'apps.api.authentication.hawk_credentials_lookup' + + # try: + # assert settings.HAWK_USER_LOOKUP + # except AttributeError: + # settings.HAWK_USER_LOOKUP = 'apps.api.authentication.hawk_user_lookup' + + try: + assert settings.HAWK_MESSAGE_EXPIRATION + except AttributeError: + # We only load this setting, if it is not available in the overall settings.py file + settings.HAWK_MESSAGE_EXPIRATION = 60 + + + try: + assert settings.DJOSER + except AttributeError: + settings.DJOSER = { +# 'PASSWORD_RESET_CONFIRM_URL': '#/password/reset/confirm/{uid}/{token}', +# 'USERNAME_RESET_CONFIRM_URL': '#/username/reset/confirm/{uid}/{token}', + 'ACTIVATION_URL': '#/activate/{uid}/{token}', + 'SEND_ACTIVATION_EMAIL': False, + 'SEND_CONFIRMATION_EMAIL' : True, + 'HIDE_USERS': True, + 'SERIALIZERS': {}, + } + + def ready(self): + from . import signals \ No newline at end of file diff --git a/VRE/apps/api/authentication.py b/VRE/apps/api/authentication.py new file mode 100644 index 0000000..df0939c --- /dev/null +++ b/VRE/apps/api/authentication.py @@ -0,0 +1,73 @@ +# import the logging library +import logging +# Get an instance of a logger +logger = logging.getLogger(__name__) + +import django.utils +from rest_framework import exceptions +from hawkrest import HawkAuthentication + +from .models import Token +class APIHawk(HawkAuthentication): + """This is the API authentication that is using the HAWK authentication mechanism. + + This class will implement a custom credentials and user lookups so that we can dynamically add new users and update tokens. + """ + + def hawk_credentials_lookup(self, id): + """This method will perform the check if the used token is an existing/known token in the database. This will not lookup a user. Only an existing token. + + Args: + id (string): The token key to lookup in the database for existing token. + + Raises: + exceptions.AuthenticationFailed: If the given token does not exists. + + Returns: + dict: The dictionary holds the token id, the token secret and the used hashing algoritem that is used. + """ + try: + token = Token.objects.get(key=id) + except Token.DoesNotExist: + logger.warning(f'Requested to validate with invalid/non existing token: {id}') + raise exceptions.AuthenticationFailed(f'No such token: {id}') + + return { + 'id' : id, + 'key' : token.secret, + 'algorithm' : 'sha256' + } + + def hawk_user_lookup(self, request, credentials): + """Return the user account that is connected to the used token. + + Args: + request ([type]): The incoming HTTP/API request + credentials (dict): The credentials from ~hawk_credentials_lookup + + Raises: + exceptions.AuthenticationFailed: If the given token does not exists to an existing user + + Returns: + tuple: Returns a tuple holding the user as first item + """ + user = None + try: + user = Token.objects.get(key=credentials['id']).user + except Token.DoesNotExist: + logger.warning(f'Requested to validate non existing user: {id}') + raise exceptions.AuthenticationFailed(f'No user for token: {id}') + + # Update the date time stamp to now for last access data + user.token.last_access = django.utils.timezone.now() + user.token.save() + + return (user,None) + + def __str__(self): + """Authentication identifier. + + Returns: + string: Returns the name of the used authentication mechanism. + """ + return 'Hawk authenticator' \ No newline at end of file diff --git a/VRE/apps/api/locale/en/LC_MESSAGES/django.po b/VRE/apps/api/locale/en/LC_MESSAGES/django.po new file mode 100644 index 0000000..98dea6a --- /dev/null +++ b/VRE/apps/api/locale/en/LC_MESSAGES/django.po @@ -0,0 +1,67 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# FIRST AUTHOR , YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2021-02-19 11:19+0100\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME \n" +"Language-Team: LANGUAGE \n" +"Language: \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +#: apps/api/apps.py:9 +msgid "API" +msgstr "" + +#: apps/api/apps.py:10 +msgid "APIs" +msgstr "" + +#: apps/api/models.py:43 +msgid "token" +msgstr "" + +#: apps/api/models.py:44 +msgid "tokens" +msgstr "" + +#: apps/api/models.py:46 +msgid "Select the user for this token" +msgstr "" + +#: apps/api/models.py:47 +msgid "Key" +msgstr "" + +#: apps/api/models.py:47 +msgid "The key for this token. This is used for Hawk verification." +msgstr "" + +#: apps/api/models.py:48 +msgid "Secret" +msgstr "" + +#: apps/api/models.py:48 +msgid "The secret for this token. This is used for Hawk signing." +msgstr "" + +#: apps/api/models.py:49 +msgid "Last access" +msgstr "" + +#: apps/api/models.py:49 +msgid "The date and time when this token is last used." +msgstr "" + +#: apps/api/models.py:64 +msgid "Super token" +msgstr "" diff --git a/VRE/apps/api/locale/nl/LC_MESSAGES/django.po b/VRE/apps/api/locale/nl/LC_MESSAGES/django.po new file mode 100644 index 0000000..1a853ab --- /dev/null +++ b/VRE/apps/api/locale/nl/LC_MESSAGES/django.po @@ -0,0 +1,67 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# FIRST AUTHOR , YEAR. +# +msgid "" +msgstr "" +"Project-Id-Version: \n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2021-02-19 11:19+0100\n" +"PO-Revision-Date: 2020-05-27 16:25+0200\n" +"Last-Translator: Joshua Rubingh \n" +"Language-Team: \n" +"Language: nl\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" +"X-Generator: Poedit 2.0.6\n" + +#: apps/api/apps.py:9 +msgid "API" +msgstr "API" + +#: apps/api/apps.py:10 +msgid "APIs" +msgstr "APIs" + +#: apps/api/models.py:43 +msgid "token" +msgstr "" + +#: apps/api/models.py:44 +msgid "tokens" +msgstr "" + +#: apps/api/models.py:46 +msgid "Select the user for this token" +msgstr "" + +#: apps/api/models.py:47 +msgid "Key" +msgstr "" + +#: apps/api/models.py:47 +msgid "The key for this token. This is used for Hawk verification." +msgstr "" + +#: apps/api/models.py:48 +msgid "Secret" +msgstr "" + +#: apps/api/models.py:48 +msgid "The secret for this token. This is used for Hawk signing." +msgstr "" + +#: apps/api/models.py:49 +msgid "Last access" +msgstr "" + +#: apps/api/models.py:49 +msgid "The date and time when this token is last used." +msgstr "" + +#: apps/api/models.py:64 +msgid "Super token" +msgstr "" diff --git a/VRE/apps/api/management/commands/dockersetup.py b/VRE/apps/api/management/commands/dockersetup.py new file mode 100644 index 0000000..d405e82 --- /dev/null +++ b/VRE/apps/api/management/commands/dockersetup.py @@ -0,0 +1,40 @@ +from django.core.management.base import BaseCommand, CommandError +from django.contrib.auth.models import User + +from django.db.utils import IntegrityError + + +#from polls.models import Question as Poll + +class Command(BaseCommand): + help = 'Setting up admin and tusd users for VRE' + + def add_arguments(self, parser): + parser.add_argument('username', help='Username') + parser.add_argument('password', help='Password') + parser.add_argument('email', help='Email address') + + parser.add_argument('--key', help='Token key') + parser.add_argument('--secret', help='Token secret') + + def handle(self, *args, **options): + try: + user = User.objects.create_superuser(username=options['username'], password=options['password'], email=options['email']) + self.stdout.write(self.style.SUCCESS('Successfully created user "%s"' % options['username'])) + + if options['key'] is not None and options['secret'] is not None: + user.token.key=options['key'] + user.token.secret=options['secret'] + user.token.save() + + self.stdout.write(self.style.SUCCESS('Successfully created token for user "%s"' % options['username'])) + + else: + # We do not want an token for the admin + user.token.delete() + + except IntegrityError as ex: + if 'unique constraint' in str(ex).lower(): + self.stdout.write(self.style.WARNING('User "%s" already exists' % options['username'])) + else: + raise CommandError('Could not create user "%s": %s' % (options['username'],ex)) \ No newline at end of file diff --git a/VRE/apps/api/migrations/0001_initial.py b/VRE/apps/api/migrations/0001_initial.py new file mode 100644 index 0000000..2376bf6 --- /dev/null +++ b/VRE/apps/api/migrations/0001_initial.py @@ -0,0 +1,35 @@ +# Generated by Django 3.1.7 on 2021-02-23 14:37 + +import apps.api.models +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +import django_cryptography.fields + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ] + + operations = [ + migrations.CreateModel( + name='Token', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='The date and time this model has been created', verbose_name='Date created')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='The date and time this model has been updated', verbose_name='Date updated')), + ('key', models.CharField(default=apps.api.models.get_random_key, help_text='The key for this token. This is used for Hawk verification.', max_length=16, unique=True, verbose_name='Key')), + ('secret', django_cryptography.fields.encrypt(models.CharField(default=apps.api.models.get_random_secret, help_text='The secret for this token. This is used for Hawk signing.', max_length=64, verbose_name='Secret'))), + ('last_access', models.DateTimeField(auto_now_add=True, help_text='The date and time when this token is last used.', verbose_name='Last access')), + ('user', models.OneToOneField(help_text='Select the user for this token', on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ], + options={ + 'verbose_name': 'token', + 'verbose_name_plural': 'tokens', + }, + ), + ] diff --git a/VRE/apps/api/migrations/__init__.py b/VRE/apps/api/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/VRE/apps/api/models.py b/VRE/apps/api/models.py new file mode 100644 index 0000000..ffb12e5 --- /dev/null +++ b/VRE/apps/api/models.py @@ -0,0 +1,70 @@ +from django.contrib.auth.models import User +from django.db import models +from django.utils.translation import gettext_lazy as _ + +from django_cryptography.fields import encrypt + +from lib.utils.general import get_random_string +from lib.models.base import MetaDataModel + + +def get_random_key(): + return get_random_string(8) + +def get_random_secret(): + return get_random_string(32) + +class TokenManager(models.Manager): + """ + Custom queryset which will prefetch related user table data when requesting a token from the database as the user is mostly needed every time the token is requested. + """ + + def get_queryset(self): + return super(TokenManager, self).get_queryset().select_related('user') + +class Token(MetaDataModel): + """Token model that holds all the tokens that are used for the API authentication. + + A new token is generated every time when a new user is created. So there is no need for manual token creating. This is done through a signal :attr:`~apps.api.signals.create_user_token` + + Attributes + ---------- + user : :class:`~django.contrib.auth.models.User` + The user to which this token belongs too + key : str + The key value that is used for token lookups + secret : str + The secret that is used for encrypting/signing the API messages + last_access : datetime + The date and time when the token is last used (logged in) + """ + + class Meta: + verbose_name = _('token') + verbose_name_plural = _('tokens') + + user = models.OneToOneField(User, on_delete=models.CASCADE, help_text=_('Select the user for this token')) + key = models.CharField(_('Key') , unique=True, default=get_random_key, max_length=16, help_text=_('The key for this token. This is used for Hawk verification.')) + secret = encrypt(models.CharField(_('Secret') ,max_length=64, default=get_random_secret, help_text=_('The secret for this token. This is used for Hawk signing.'))) + last_access = models.DateTimeField(_('Last access'),auto_now_add=True, help_text=_('The date and time when this token is last used.')) + + # Custom manager that will retrieve the related user table as well. + objects = TokenManager() + + def is_supertoken(self): + """Boolean check if the token is belonging to a user with super user rights. Then this token is a super token. + + Returns: + bool: Returns true when the token belongs to a super user. + """ + # TODO: Is it allowed to be a super user and researcher? Could give conflict of interests. With the API token you can read other researchers data... + return self.user.is_superuser == True + + is_supertoken.boolean = True + is_supertoken.short_description = _('Super token') + + def __str__(self): + """ + Print the full name of the researcher based on the first and last name fields of the User model. + """ + return f'{self.key} ({self.user.get_full_name()})' \ No newline at end of file diff --git a/VRE/apps/api/serializers.py b/VRE/apps/api/serializers.py new file mode 100644 index 0000000..455e6e9 --- /dev/null +++ b/VRE/apps/api/serializers.py @@ -0,0 +1,13 @@ +from django.utils.translation import gettext_lazy as _ +from rest_framework import serializers +from .models import Token + +class TokenSerializer(serializers.ModelSerializer): + + class Meta: + model = Token + fields = ['key','secret'] + +class TokenLoginSerializer(serializers.Serializer): + username = serializers.CharField(max_length=200, help_text=_('Your username to login')) + password = serializers.CharField(max_length=200, help_text=_('Your password to login')) diff --git a/VRE/apps/api/signals.py b/VRE/apps/api/signals.py new file mode 100644 index 0000000..e4e4c8a --- /dev/null +++ b/VRE/apps/api/signals.py @@ -0,0 +1,24 @@ +from django.conf import settings +from django.db.models.signals import post_save +from django.dispatch import receiver + +from .models import Token + +@receiver(post_save, sender=settings.AUTH_USER_MODEL) +def create_user_token(sender, instance=None, created=False, **kwargs): + """ + When a new user is created, this signal will also create a new API token for this user. So every user will have an API token. + + Arguments + ---------- + sender : sender + The model that has triggered the signal + + instance: :attr:`~django.contrib.auth.models.User` + The newly created user model data + + created : boolean + Wether the object was created (True) or updated (False). + """ + if created: + Token.objects.create(user=instance) diff --git a/VRE/apps/api/tests.py b/VRE/apps/api/tests.py new file mode 100644 index 0000000..136a604 --- /dev/null +++ b/VRE/apps/api/tests.py @@ -0,0 +1,57 @@ +from django.test import TestCase, override_settings +from rest_framework.test import RequestsClient +from requests_hawk import HawkAuth + +from django.contrib.auth.models import User + +from django.urls import reverse + +import json + +from lib.api.client import VRE_API_Client + +# Create your tests here. +class UserLoginTest(TestCase): + + @classmethod + def setUpTestData(cls): + cls.email = 'dummy@rug.nl' + cls.username = 'dummy@rug.nl' + cls.password = 'doemaarwat' + + User.objects.create_user(username=cls.username,password=cls.password,email=cls.email) + + def setUp(self): + # We want to use the API REST Framework request client, as this enables us to use HAWK authentication during testing + self.client = RequestsClient() + + def test_missing_credentials(self): + endpoint = 'http://testserver/api/auth/users/me/' + response = self.client.get(endpoint) + self.assertEqual(response.status_code, 401) + + + def test_valid_login(self): + login_data = {'username' : self.username, 'password' : self.password} + endpoint = 'http://testserver' + reverse('api:api-login') + response = self.client.post(endpoint, json=login_data) + + self.assertEqual(response.status_code, 200) + self.assertIn('key', response.json()) + self.assertIn('secret', response.json()) + + self.key = response.json()['key'] + self.secret = response.json()['secret'] + + # Add HAWK Authentication to make sure key and secret are correct. + self.client.auth = HawkAuth(id=self.key , key=self.secret, always_hash_content=False) + endpoint = 'http://testserver/api/auth/users/me/' + response = self.client.get(endpoint) + + self.assertEqual(response.status_code, 200) + self.assertIn('email', response.json()) + self.assertIn('id', response.json()) + self.assertIn('username', response.json()) + + self.assertEqual(response.json()['email'], self.email) + self.assertEqual(response.json()['username'], self.username) diff --git a/VRE/apps/api/urls.py b/VRE/apps/api/urls.py new file mode 100644 index 0000000..1d0fa52 --- /dev/null +++ b/VRE/apps/api/urls.py @@ -0,0 +1,77 @@ +from django.urls import path, re_path, include + +from rest_framework import permissions, routers + +from drf_yasg.views import get_schema_view +from drf_yasg import openapi + +from . import views + +from apps.dropoff.views import DatadropViewSet +from apps.invitation.views import InvitationViewSet +from apps.researcher.views import ResearcherViewSet +from apps.storage.views import StorageEngineViewSet, StorageLocationViewSet +from apps.study.views import StudyViewSet +from apps.virtual_machine.views import (VirtualMachineViewSet, + VirtualMachineOperatingSystemViewSet, + VirtualMachineProfileViewSet, + VirtualMachineMemoryViewSet, + VirtualMachineNetworkViewSet, + VirtualMachineStorageViewSet, + VirtualMachineGPUViewSet, + VirtualMachineAccessViewSet) + +schema_view = get_schema_view( + openapi.Info( + title="Virtual Research Environment API", + default_version='v1', + description="Here you can see a list of API endpoints and actions that are available to communicate with the VRE API", + terms_of_service="https://www.rug.nl", + contact=openapi.Contact(email="vre_team@rug.nl"), + license=openapi.License(name="MIT License"), + ), + public=True, + permission_classes=(permissions.AllowAny,), +) + +api_router_v1 = routers.DefaultRouter() + +api_router_v1.register(r'researchers', ResearcherViewSet) + +api_router_v1.register(r'studies', StudyViewSet) + +api_router_v1.register(r'dropoffs', DatadropViewSet) + +api_router_v1.register(r'invitations', InvitationViewSet) + +api_router_v1.register(r'storageengines', StorageEngineViewSet) +api_router_v1.register(r'storagelocations', StorageLocationViewSet) + +# Order is important for virtual machines. Longest match first +api_router_v1.register(r'virtualmachines/profiles', VirtualMachineProfileViewSet) +api_router_v1.register(r'virtualmachines/storage', VirtualMachineStorageViewSet) +api_router_v1.register(r'virtualmachines/access', VirtualMachineAccessViewSet) +api_router_v1.register(r'virtualmachines/memory', VirtualMachineMemoryViewSet) +api_router_v1.register(r'virtualmachines/network', VirtualMachineNetworkViewSet) +api_router_v1.register(r'virtualmachines/gpu', VirtualMachineGPUViewSet) +api_router_v1.register(r'virtualmachines/os', VirtualMachineOperatingSystemViewSet) +api_router_v1.register(r'virtualmachines', VirtualMachineViewSet) + +# Main namespace for the API urls +app_name = 'api' +urlpatterns = [ + re_path(r'^swagger(?P\.json|\.yaml)$', schema_view.without_ui(cache_timeout=0), name='schema-json'), + path('swagger/', schema_view.with_ui('swagger', cache_timeout=0), name='schema-swagger-ui'), + path('redoc/', schema_view.with_ui('redoc', cache_timeout=0), name='schema-redoc'), + + # Authentication urls + path('auth/', include('djoser.urls')), + path('auth/login/', views.Login.as_view(), name='api-login'), + + # Extra /api/info path for checking if the Hawk authentication is working. + # Also this will give the full url to the OpenAPI documentation + path('info/', views.Info.as_view(), name='api-info'), + + # Add extra namespace for versioning the API + path('v1/', include((api_router_v1.urls,'api'),namespace='v1')), +] \ No newline at end of file diff --git a/VRE/apps/api/views.py b/VRE/apps/api/views.py new file mode 100644 index 0000000..9e2ec61 --- /dev/null +++ b/VRE/apps/api/views.py @@ -0,0 +1,111 @@ +from rest_framework.views import APIView +from rest_framework.response import Response +from rest_framework.decorators import schema +from rest_framework.permissions import AllowAny +from django.urls import reverse + +from django.contrib.auth.models import User +from rest_framework import exceptions + +from hawkrest import HawkAuthentication + +from lib.utils.general import get_ip_address, generate_encryption_key + +from drf_yasg.utils import swagger_auto_schema +from drf_yasg import openapi + +from .serializers import TokenSerializer, TokenLoginSerializer + +class Login(APIView): + """This will let you login to the REST API. Login with your username and password, and get a key and a secret back that is used for further communication using HAWK signing of requests (https://github.com/hapijs/hawk). + + Args: + username (string): The username to login with + password (string): The password to for the login + + Raises: + exceptions.AuthenticationFailed: An error will be given when the login failed + + Returns: + Token: A token that holds a key and a secret to authenticate the other requests to this API + """ + + authentication_classes = [] + permission_classes = [AllowAny] + + token_response = openapi.Response('response description', TokenSerializer) + @swagger_auto_schema(request_body=TokenLoginSerializer, responses={200: token_response}, security=authentication_classes) + def post(self, request, format=None): + login_data = TokenLoginSerializer(data=request.data) + + if not login_data.is_valid(): + raise exceptions.AuthenticationFailed(f'Invalid form data posted') + + username = login_data.validated_data['username'] + password = login_data.validated_data['password'] + + try: + user = User.objects.get(username=username) + if user.check_password(password): + # Refresh tokens....? + # user.token.key = generate_encryption_key(8) + # user.token.secret = generate_encryption_key(32) + # user.token.save() + + # Return the key and secret to use with all the other REST calls + return Response({'key' : user.token.key, 'secret' : user.token.secret}) + else: + raise exceptions.AuthenticationFailed(f'Login failed for user {username}') + + except User.DoesNotExist: + raise exceptions.AuthenticationFailed(f'Login failed for user {username}') + + raise exceptions.AuthenticationFailed(f'Login failed for user {username}') + +@schema(None) +class Info(APIView): + """ + Show some API information. Also this can be used to check if the Hawk credentials are working. + + If you used your Hawk credentials, you should see here your account information. + """ + + authentication_classes = [] + permission_classes = [AllowAny] + + def get(self, request, format=None): + """ + Default API get action will return the following information in a dict: + + - Connected user + - Used authentication scheme + - The remote IP of the connection + - The used content type + - The full url to the API documentation (OpenAPI) + - If a super token is used + """ + + data = { + 'type' : 'anonymous', + 'auth' : 'none', + 'remote_ip' : get_ip_address(request), + 'content_type' : request.content_type, + 'openapi' : request.build_absolute_uri(reverse('api:schema-redoc')), + } + + if request.user.is_authenticated: + + data['user'] = str(request.user.username) + data['type'] = 'authenticated' + data['auth'] = str(request.successful_authenticator) + + if request.user.token.is_supertoken: + data['type'] = 'supertoken' + else: + try: + assert request.user.researcher + data['type'] = 'researcher' + except AttributeError: + pass + + return Response(data) \ No newline at end of file diff --git a/VRE/apps/dropoff/__init__.py b/VRE/apps/dropoff/__init__.py new file mode 100644 index 0000000..edb38da --- /dev/null +++ b/VRE/apps/dropoff/__init__.py @@ -0,0 +1 @@ +default_app_config = 'apps.dropoff.apps.DropoffConfig' \ No newline at end of file diff --git a/VRE/apps/dropoff/admin.py b/VRE/apps/dropoff/admin.py new file mode 100644 index 0000000..383c4de --- /dev/null +++ b/VRE/apps/dropoff/admin.py @@ -0,0 +1,19 @@ +from django.contrib import admin +from django.template.defaultfilters import filesizeformat + +from .models import DataDrop + +# Register your models here. +@admin.register(DataDrop) +class DataDropAdmin(admin.ModelAdmin): + list_display = ('original_filename', '_filesize', 'created_at', 'study','uploader') + ordering = ('-created_at', 'original_filename', ) + search_fields = ('original_filename', 'study__name',) + readonly_fields = ('study','original_filename','upload_filename','filepath','uploader','_filesize','ip','created_at',) + + def _filesize(self,obj): + return filesizeformat(0 if obj.pk is None else obj.filesize) + + # This will help you to disbale add functionality. It is not possible to upload files through the admin. + def has_add_permission(self, request): + return False \ No newline at end of file diff --git a/VRE/apps/dropoff/apps.py b/VRE/apps/dropoff/apps.py new file mode 100644 index 0000000..572d5c6 --- /dev/null +++ b/VRE/apps/dropoff/apps.py @@ -0,0 +1,8 @@ +from django.apps import AppConfig +from django.utils.translation import ugettext_lazy as _ + +class DropoffConfig(AppConfig): + name = 'apps.dropoff' + label = 'dropoff' + verbose_name = _('Dropoff') + verbose_name_plural = _('Dropoffs') diff --git a/VRE/apps/dropoff/locale/en/LC_MESSAGES/django.po b/VRE/apps/dropoff/locale/en/LC_MESSAGES/django.po new file mode 100644 index 0000000..7763ffa --- /dev/null +++ b/VRE/apps/dropoff/locale/en/LC_MESSAGES/django.po @@ -0,0 +1,133 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# FIRST AUTHOR , YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2021-02-19 11:19+0100\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME \n" +"Language-Team: LANGUAGE \n" +"Language: \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +#: apps/dropoff/apps.py:7 +msgid "Dropoff" +msgstr "" + +#: apps/dropoff/apps.py:8 +msgid "Dropoffs" +msgstr "" + +#: apps/dropoff/models.py:22 +msgid "New" +msgstr "" + +#: apps/dropoff/models.py:23 +msgid "Uploading" +msgstr "" + +#: apps/dropoff/models.py:24 +msgid "Uploaded" +msgstr "" + +#: apps/dropoff/models.py:25 +msgid "Moving" +msgstr "" + +#: apps/dropoff/models.py:26 +msgid "Done" +msgstr "" + +#: apps/dropoff/models.py:28 +msgid "Error" +msgstr "" + +#: apps/dropoff/models.py:61 +msgid "datadrop" +msgstr "" + +#: apps/dropoff/models.py:62 +msgid "datadrops" +msgstr "" + +#: apps/dropoff/models.py:64 +msgid "The study where this data drop belongs to." +msgstr "" + +#: apps/dropoff/models.py:65 +msgid "Original filename" +msgstr "" + +#: apps/dropoff/models.py:65 +msgid "The original filename as the uploader has uploaded it." +msgstr "" + +#: apps/dropoff/models.py:66 +msgid "Uploaded unique filename" +msgstr "" + +#: apps/dropoff/models.py:66 +msgid "A unique filename that is used to store on disc." +msgstr "" + +#: apps/dropoff/models.py:67 +msgid "Filesize" +msgstr "" + +#: apps/dropoff/models.py:67 +msgid "Filesize of the upload." +msgstr "" + +#: apps/dropoff/models.py:69 +msgid "Filepath" +msgstr "" + +#: apps/dropoff/models.py:70 +msgid "IP address" +msgstr "" + +#: apps/dropoff/models.py:70 +msgid "Ip address of the uploader." +msgstr "" + +#: apps/dropoff/models.py:71 +msgid "Encrypted" +msgstr "" + +#: apps/dropoff/models.py:71 +msgid "Is the file encrypted during upload." +msgstr "" + +#: apps/dropoff/models.py:72 +msgid "Uploader" +msgstr "" + +#: apps/dropoff/models.py:72 +msgid "" +"The uploader / invitation that has uploaded the file. When empty, the " +"researcher has uploaded the file." +msgstr "" + +#: apps/dropoff/models.py:73 +msgid "Status" +msgstr "" + +#: apps/dropoff/models.py:73 +msgid "Datadrop status." +msgstr "" + +#: apps/dropoff/models.py:74 +msgid "Status message" +msgstr "" + +#: apps/dropoff/models.py:74 +msgid "Datadrop status message." +msgstr "" diff --git a/VRE/apps/dropoff/locale/nl/LC_MESSAGES/django.po b/VRE/apps/dropoff/locale/nl/LC_MESSAGES/django.po new file mode 100644 index 0000000..7763ffa --- /dev/null +++ b/VRE/apps/dropoff/locale/nl/LC_MESSAGES/django.po @@ -0,0 +1,133 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# FIRST AUTHOR , YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2021-02-19 11:19+0100\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME \n" +"Language-Team: LANGUAGE \n" +"Language: \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +#: apps/dropoff/apps.py:7 +msgid "Dropoff" +msgstr "" + +#: apps/dropoff/apps.py:8 +msgid "Dropoffs" +msgstr "" + +#: apps/dropoff/models.py:22 +msgid "New" +msgstr "" + +#: apps/dropoff/models.py:23 +msgid "Uploading" +msgstr "" + +#: apps/dropoff/models.py:24 +msgid "Uploaded" +msgstr "" + +#: apps/dropoff/models.py:25 +msgid "Moving" +msgstr "" + +#: apps/dropoff/models.py:26 +msgid "Done" +msgstr "" + +#: apps/dropoff/models.py:28 +msgid "Error" +msgstr "" + +#: apps/dropoff/models.py:61 +msgid "datadrop" +msgstr "" + +#: apps/dropoff/models.py:62 +msgid "datadrops" +msgstr "" + +#: apps/dropoff/models.py:64 +msgid "The study where this data drop belongs to." +msgstr "" + +#: apps/dropoff/models.py:65 +msgid "Original filename" +msgstr "" + +#: apps/dropoff/models.py:65 +msgid "The original filename as the uploader has uploaded it." +msgstr "" + +#: apps/dropoff/models.py:66 +msgid "Uploaded unique filename" +msgstr "" + +#: apps/dropoff/models.py:66 +msgid "A unique filename that is used to store on disc." +msgstr "" + +#: apps/dropoff/models.py:67 +msgid "Filesize" +msgstr "" + +#: apps/dropoff/models.py:67 +msgid "Filesize of the upload." +msgstr "" + +#: apps/dropoff/models.py:69 +msgid "Filepath" +msgstr "" + +#: apps/dropoff/models.py:70 +msgid "IP address" +msgstr "" + +#: apps/dropoff/models.py:70 +msgid "Ip address of the uploader." +msgstr "" + +#: apps/dropoff/models.py:71 +msgid "Encrypted" +msgstr "" + +#: apps/dropoff/models.py:71 +msgid "Is the file encrypted during upload." +msgstr "" + +#: apps/dropoff/models.py:72 +msgid "Uploader" +msgstr "" + +#: apps/dropoff/models.py:72 +msgid "" +"The uploader / invitation that has uploaded the file. When empty, the " +"researcher has uploaded the file." +msgstr "" + +#: apps/dropoff/models.py:73 +msgid "Status" +msgstr "" + +#: apps/dropoff/models.py:73 +msgid "Datadrop status." +msgstr "" + +#: apps/dropoff/models.py:74 +msgid "Status message" +msgstr "" + +#: apps/dropoff/models.py:74 +msgid "Datadrop status message." +msgstr "" diff --git a/VRE/apps/dropoff/migrations/0001_initial.py b/VRE/apps/dropoff/migrations/0001_initial.py new file mode 100644 index 0000000..badbddf --- /dev/null +++ b/VRE/apps/dropoff/migrations/0001_initial.py @@ -0,0 +1,39 @@ +# Generated by Django 3.1.7 on 2021-02-23 14:37 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ('study', '0001_initial'), + ('invitation', '0001_initial'), + ] + + operations = [ + migrations.CreateModel( + name='DataDrop', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='The date and time this model has been created', verbose_name='Date created')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='The date and time this model has been updated', verbose_name='Date updated')), + ('original_filename', models.CharField(help_text='The original filename as the uploader has uploaded it.', max_length=1024, verbose_name='Original filename')), + ('upload_filename', models.CharField(help_text='A unique filename that is used to store on disc.', max_length=1024, verbose_name='Uploaded unique filename')), + ('filesize', models.BigIntegerField(editable=False, help_text='Filesize of the upload.', verbose_name='Filesize')), + ('filepath', models.CharField(editable=False, max_length=1024, verbose_name='Filepath')), + ('ip', models.GenericIPAddressField(editable=False, help_text='Ip address of the uploader.', null=True, unpack_ipv4=True, verbose_name='IP address')), + ('encrypted', models.BooleanField(default=False, editable=False, help_text='Is the file encrypted during upload.', verbose_name='Encrypted')), + ('status', models.CharField(choices=[('NEW', 'New'), ('UPLOADING', 'Uploading'), ('UPLOADED', 'Uploaded'), ('MOVING', 'Moving'), ('DONE', 'Done'), ('ERROR', 'Error')], default='NEW', help_text='Datadrop status.', max_length=10, verbose_name='Status')), + ('status_message', models.TextField(blank=True, help_text='Datadrop status message.', null=True, verbose_name='Status message')), + ('study', models.ForeignKey(help_text='The study where this data drop belongs to.', on_delete=django.db.models.deletion.CASCADE, related_name='files', to='study.study', verbose_name='studie')), + ('uploader', models.ForeignKey(help_text='The uploader / invitation that has uploaded the file. When empty, the researcher has uploaded the file.', null=True, on_delete=django.db.models.deletion.CASCADE, to='invitation.invitation', verbose_name='Uploader')), + ], + options={ + 'verbose_name': 'datadrop', + 'verbose_name_plural': 'datadrops', + }, + ), + ] diff --git a/VRE/apps/dropoff/migrations/__init__.py b/VRE/apps/dropoff/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/VRE/apps/dropoff/models.py b/VRE/apps/dropoff/models.py new file mode 100644 index 0000000..b00d1ae --- /dev/null +++ b/VRE/apps/dropoff/models.py @@ -0,0 +1,83 @@ +from django.db import models +from django.template.defaultfilters import filesizeformat +from django.utils.translation import gettext_lazy as _ + +from lib.models.base import MetaDataModel +from apps.invitation.models import Invitation +from apps.study.models import Study +from apps.researcher.models import Researcher + +class DataDropStatus(models.TextChoices): + """Data drop statusses. This will hold all the available datadrop statusses as an enum value. + + Valid status values: + - New + - Uploading + - Uploaded + - Moving + - Done + - Error + """ + + NEW = ('NEW', _('New')) + UPLOADING = ('UPLOADING', _('Uploading')) + UPLOADED = ('UPLOADED', _('Uploaded')) + MOVING = ('MOVING', _('Moving')) + DONE = ('DONE', _('Done')) + + ERROR = ('ERROR', _('Error')) + +class DataDrop(MetaDataModel): + """ + A model off an upload action. This is the actual upload / datadrop that has been done. + + It will inherit the attributes :attr:`~lib.models.base.MetaDataModel.created_at` and :attr:`~lib.models.base.MetaDataModel.updated_at` from the Abstract model :class:`~lib.models.base.MetaDataModel` + + Attributes + ---------- + study : Study + The study to which this upload belongs to. An upload can only belong to a single study. + original_filename : string + The original filename when the file was uploaded. Max length is 1024 chars. + upload_filename : string + The uploadname created by the upload server. This is a unique filename. Max length is 1024 chars. + filesize : integer + The filesize of the uploaded file. + filepath : string + The upload path of the file. At the moment this is not yet used. + ip : string + The IP address from where the upload came from. + encrypted : boolean + Checked if the upload is encrypted. At the moment this is not yet used + uploader : Invitation + The user or data provider that has uploaded this file. + status : :class:`~DataDropStatus` + The status type of this datadrop + status_message : string + A human readable status message of the status of this datadrop + """ + + class Meta: + verbose_name = _('datadrop') + verbose_name_plural = _('datadrops') + + study = models.ForeignKey(Study, verbose_name=Study._meta.verbose_name, on_delete=models.CASCADE, related_name='files', help_text=_('The study where this data drop belongs to.')) + original_filename = models.CharField(_('Original filename'),max_length=1024,help_text=_('The original filename as the uploader has uploaded it.')) + upload_filename = models.CharField(_('Uploaded unique filename'),max_length=1024,help_text=_('A unique filename that is used to store on disc.')) + filesize = models.BigIntegerField(_('Filesize'),editable=False,help_text=_('Filesize of the upload.')) + # TODO: Do something with this filepath variable. At the moment we do not use it. + filepath = models.CharField(_('Filepath'),max_length=1024,editable=False) + ip = models.GenericIPAddressField(_('IP address'),protocol='both', unpack_ipv4=True, null=True, editable=False, help_text=_('Ip address of the uploader.')) + encrypted = models.BooleanField(_('Encrypted'),default=False,editable=False,help_text=_('Is the file encrypted during upload.')) + uploader = models.ForeignKey(Invitation,verbose_name=_('Uploader'), null=True, on_delete=models.CASCADE,help_text=_('The uploader / invitation that has uploaded the file. When empty, the researcher has uploaded the file.')) + status = models.CharField(_('Status'), max_length=10, choices=DataDropStatus.choices, default=DataDropStatus.NEW, help_text=_('Datadrop status.')) + status_message = models.TextField(_('Status message'), blank=True, null=True, help_text=_('Datadrop status message.')) + + @property + def researcher(self): + # Get the researcher for this datadrop through the study model. + # TODO: This should be speed up with a related query manager + return self.study.researcher + + def __str__(self): + return f'{self.original_filename} ({filesizeformat(self.filesize)})' diff --git a/VRE/apps/dropoff/serializers.py b/VRE/apps/dropoff/serializers.py new file mode 100644 index 0000000..d0936b1 --- /dev/null +++ b/VRE/apps/dropoff/serializers.py @@ -0,0 +1,18 @@ +from rest_framework import serializers +from apps.dropoff.models import DataDrop + +from apps.invitation.serializers import InvitationSerializer + +from lib.api.base import BaseHyperlinkedModelSerializer + +class DataDropSerializer(BaseHyperlinkedModelSerializer): + + uploader = InvitationSerializer(read_only=True) + + class Meta: + model = DataDrop + exclude = ['filepath'] + extra_kwargs = { + 'url' : {'view_name': 'api:v1:datadrop-detail'}, + 'study' : {'view_name': 'api:v1:study-detail'}, + } diff --git a/VRE/apps/dropoff/tests.py b/VRE/apps/dropoff/tests.py new file mode 100644 index 0000000..7ce503c --- /dev/null +++ b/VRE/apps/dropoff/tests.py @@ -0,0 +1,3 @@ +from django.test import TestCase + +# Create your tests here. diff --git a/VRE/apps/dropoff/views.py b/VRE/apps/dropoff/views.py new file mode 100644 index 0000000..97cfabd --- /dev/null +++ b/VRE/apps/dropoff/views.py @@ -0,0 +1,226 @@ +from django.conf import settings +from django.shortcuts import get_object_or_404 +from django.http import JsonResponse, HttpResponse +from rest_framework.decorators import action +import json +# TODO: Replace with Pathlib +import os + +from .serializers import DataDropSerializer +from .models import DataDrop, DataDropStatus +from apps.study.models import Study +from apps.invitation.models import Invitation + +from drf_yasg.utils import swagger_auto_schema + + +from lib.api.base import BaseReadOnlyViewSet + +class DatadropViewSet(BaseReadOnlyViewSet): + """ + API endpoint for listing data drops. This is a readonly endpoint. + """ + + """ + JSON Data we receive from the TUSD daemon when processing uploads + { + "Upload": { + "ID": "6604d5eb5cd73c12bf93333ae376d245", + "Size": 423067, + "SizeIsDeferred": false, + "Offset": 423067, + "MetaData": { + "code": "678936", + "filename": "PAFF_092718_researchbutton.jpg", + "filetype": "image/jpeg", + "ip": "127.0.0.1", + "name": "PAFF_092718_researchbutton.jpg", + "relativePath": "null", + "study": "c2dc0450-c4c5-4d51-bdeb-6d4cf0077b65", + "type": "image/jpeg" + }, + "IsPartial": false, + "IsFinal": false, + "PartialUploads": null, + "Storage": { + "Path": "/opt/development/data_drop-off/tusd/upload_data/6604d5eb5cd73c12bf93333ae376d245", + "Type": "filestore" + } + }, + "HTTPRequest": { + "Method": "PATCH", + "URI": "/files/6604d5eb5cd73c12bf93333ae376d245", + "RemoteAddr": "127.0.0.1:55518", + "Header": { + "Accept": [ + "*/*" + ], + "Accept-Encoding": [ + "gzip, deflate" + ], + "Accept-Language": [ + "en-US,en;q=0.5" + ], + "Cache-Control": [ + "no-cache" + ], + "Connection": [ + "upgrade" + ], + "Content-Length": [ + "423067" + ], + "Content-Type": [ + "application/offset+octet-stream" + ], + "Origin": [ + "http://localhost:8000" + ], + "Pragma": [ + "no-cache" + ], + "Tus-Resumable": [ + "1.0.0" + ], + "Upload-Offset": [ + "0" + ], + "User-Agent": [ + "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:86.0) Gecko/20100101 Firefox/86.0" + ], + "X-Forwarded-Host": [ + "localhost" + ], + "X-Forwarded-Proto": [ + "http" + ] + } + }, + "error": 0, + "done": 1 + } + """ + + queryset = DataDrop.objects.all().order_by('-created_at') + serializer_class = DataDropSerializer + + @action(methods=['POST'],detail=False) + @swagger_auto_schema(operation_description="Special webhook url for TUSD communication") + #@swagger_auto_schema(request_body=TokenLoginSerializer, responses={200: token_response}, security=authentication_classes) + def webhook(self, request): + # TODO: Add extra check if the logged in user is actually the right user. So make a check based on a username that is configurable in the settings file + webhook_trigger = request.META.get('HTTP_HOOK_NAME',None) + # If there is no trigger hook specified, we expect that this is not a legal/valid request. Stop processing + if webhook_trigger is None: + # https://en.wikipedia.org/wiki/List_of_HTTP_status_codes + print('Wrong webhook trigger') + return HttpResponse(status=412) + + # Here we are parsing the JSON data to see if the data is actually JSON + webhook_data = {} + + try: + # Parse JSON test + webhook_data = json.loads(request.body) + # Parse Data test + if webhook_data['Upload'].get('MetaData',None) is None: + raise Exception + + except Exception as ex: + # No valid JSON data, so we cannot process further + print('Parsing JSON data error!') + print(ex) + # https://en.wikipedia.org/wiki/List_of_HTTP_status_codes + return HttpResponse(status=412) + + # Now we should be save that the post data is a valid JSON object with valid contents + # Here we checking if the upload file is valid based on it's extension + original_name = webhook_data['Upload']['MetaData'].get('name',None) + if original_name is None: + original_name = webhook_data['Upload']['MetaData'].get('filename',None) + + if original_name is None: + print('Original Filename is not specified... cannot continue') + return HttpResponse(status=412) + + extension = os.path.splitext(original_name)[1][1:] + if extension.lower() in settings.DROPOFF_NOT_ALLOWED_EXTENSIONS: + print(f'Invalid extension in upload file: {original_name}') + return HttpResponse(status=412) + + # Here we check if the study does exists based on the UUID in the url. This is set by NGINX + study = get_object_or_404(Study, upload_uuid=webhook_data['Upload']['MetaData'].get('study',None)) + # Create a list of valid upload codes. At this time, it do not need to know who it is. + valid_upload_codes = list(study.invitations.values_list('upload_code',flat=True)) + valid_upload_codes.append(study.upload_code) + + # Check the upload code that is posted with the data drop + upload_code = webhook_data['Upload']['MetaData'].get('code',None) + if upload_code is None or upload_code not in valid_upload_codes: + print(f'Upload code does not match study code ({upload_code})') + # https://en.wikipedia.org/wiki/List_of_HTTP_status_codes + return HttpResponse(status=412) + + # We do not have to do more for checking with the hook 'pre-create'. Because when finishing, then we create the file. Not earlier now + + # if 'pre-create' == webhook_trigger: + # print(f'Check if study ID is known....: {webhook_data["Upload"]["MetaData"]["study"]}') + # study = get_object_or_404(Study, upload_url=webhook_data['Upload']['MetaData']['study']) + # code = None + # if 'code' in webhook_data['Upload']['MetaData']: + # code = webhook_data['Upload']['MetaData']['code'] + # + # if study.upload_code != code: + # raise PermissionDenied + + json_response = None + if 'post-finish' == webhook_trigger: + # Create the DataDrop object + upload_id = webhook_data['Upload'].get('ID',None) + ip = webhook_data['Upload']['MetaData'].get('ip',None) + uploader = None + try: + uploader = Invitation.objects.filter(upload_code=upload_code).get() + except Invitation.DoesNotExist as ex: + # Owner is the uploader.... just continue + pass + + try: + uploaded_file = DataDrop.objects.filter(study=study, upload_filename=upload_id).get() + except DataDrop.DoesNotExist as ex: + # This is a first time action + uploaded_file = DataDrop(study = study, + original_filename = original_name, + upload_filename = upload_id, + filesize = webhook_data['Upload']['Size'], + filepath = '', + ip = ip, + uploader = uploader) + uploaded_file.save() + + if 1 == webhook_data.get('error',0): + uploaded_file.status = DataDropStatus.ERROR + uploaded_file.status_message = webhook_data.get('error_message',None) + uploaded_file.save() + + if 1 == webhook_data.get('done',0): + uploaded_file.status = DataDropStatus.DONE + uploaded_file.save() + + # Return back a JSON list with storages where to send to next.... This is done with the webhook of TUS + # TODO: Make this WAAAAYYYYYY better. This is pretty pore programming :P + json_response = {'storages' : []} + json_response['storages'].append( + {'engine' : study.storagelocation.storageengine.engine, + 'location' : study.storagelocation.storageengine.location, + 'username' : study.storagelocation.storageengine.username, + 'password' : study.storagelocation.storageengine.password, + 'path' : study.storagelocation.path, + # Enabling encryption is done by pasing an encryption password. If empty, encryption is disabled + 'encryption_password' : study.storagelocation.encryption_password if study.storagelocation.encrypted() else '' }) + + if uploader is not None: + json_response['uploader_name'] = uploader.name + json_response['uploader_email'] = uploader.email + + return JsonResponse(json_response, safe=False) \ No newline at end of file diff --git a/VRE/apps/invitation/__init__.py b/VRE/apps/invitation/__init__.py new file mode 100644 index 0000000..e6db50c --- /dev/null +++ b/VRE/apps/invitation/__init__.py @@ -0,0 +1 @@ +default_app_config = 'apps.invitation.apps.InvitationConfig' \ No newline at end of file diff --git a/VRE/apps/invitation/admin.py b/VRE/apps/invitation/admin.py new file mode 100644 index 0000000..78897ce --- /dev/null +++ b/VRE/apps/invitation/admin.py @@ -0,0 +1,9 @@ +from django.contrib import admin +from .models import Invitation + +@admin.register(Invitation) +class InvitationAdmin(admin.ModelAdmin): + list_display = ('name', 'email', 'study','researcher', 'upload_code', 'created_at') + ordering = ('-created_at', 'name', 'email', ) + search_fields = ('name', 'email','study__name') + readonly_fields = ('mail_sent', 'created_at', 'updated_at') \ No newline at end of file diff --git a/VRE/apps/invitation/apps.py b/VRE/apps/invitation/apps.py new file mode 100644 index 0000000..64a1320 --- /dev/null +++ b/VRE/apps/invitation/apps.py @@ -0,0 +1,11 @@ +from django.apps import AppConfig +from django.utils.translation import ugettext_lazy as _ + +class InvitationConfig(AppConfig): + name = 'apps.invitation' + label = 'invitation' + verbose_name = _('Invitation') + verbose_name_plural = _('Invitations') + + def ready(self): + from . import signals \ No newline at end of file diff --git a/VRE/apps/invitation/forms.py b/VRE/apps/invitation/forms.py new file mode 100644 index 0000000..fb891ff --- /dev/null +++ b/VRE/apps/invitation/forms.py @@ -0,0 +1,10 @@ +from django import forms +from django.utils.translation import gettext_lazy as _ + +class InvitationForm(forms.Form): + """ + A form for sending invitations. + """ + id = forms.IntegerField(widget=forms.HiddenInput, required=False) + name = forms.CharField(label=_('Receiver name'), max_length=100, help_text=_('Enter the name of the recipient')) + email = forms.EmailField(label=_('Receiver email address'), help_text=_('Enter the email address')) \ No newline at end of file diff --git a/VRE/apps/invitation/locale/en/LC_MESSAGES/django.po b/VRE/apps/invitation/locale/en/LC_MESSAGES/django.po new file mode 100644 index 0000000..faade9e --- /dev/null +++ b/VRE/apps/invitation/locale/en/LC_MESSAGES/django.po @@ -0,0 +1,87 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# FIRST AUTHOR , YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2021-02-19 11:19+0100\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME \n" +"Language-Team: LANGUAGE \n" +"Language: \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +#: apps/invitation/apps.py:7 +msgid "Invitation" +msgstr "" + +#: apps/invitation/apps.py:8 +msgid "Invitations" +msgstr "" + +#: apps/invitation/forms.py:9 +msgid "Receiver name" +msgstr "" + +#: apps/invitation/forms.py:9 +msgid "Enter the name of the recipient" +msgstr "" + +#: apps/invitation/forms.py:10 +msgid "Receiver email address" +msgstr "" + +#: apps/invitation/forms.py:10 +msgid "Enter the email address" +msgstr "" + +#: apps/invitation/models.py:37 +msgid "invitation" +msgstr "" + +#: apps/invitation/models.py:38 +msgid "invitations" +msgstr "" + +#: apps/invitation/models.py:45 +msgid "The study where this invitation belongs to." +msgstr "" + +#: apps/invitation/models.py:46 +msgid "Name" +msgstr "" + +#: apps/invitation/models.py:46 +msgid "The name of the uploader / data provider." +msgstr "" + +#: apps/invitation/models.py:47 +msgid "Email address" +msgstr "" + +#: apps/invitation/models.py:47 +msgid "The email address of the uploader / data provider." +msgstr "" + +#: apps/invitation/models.py:48 +msgid "Upload code" +msgstr "" + +#: apps/invitation/models.py:48 +msgid "A unique upload code. Will be generated when a new study is saved." +msgstr "" + +#: apps/invitation/models.py:49 +msgid "Date mailed" +msgstr "" + +#: apps/invitation/models.py:49 +msgid "The date when the last invitation is send." +msgstr "" diff --git a/VRE/apps/invitation/locale/nl/LC_MESSAGES/django.po b/VRE/apps/invitation/locale/nl/LC_MESSAGES/django.po new file mode 100644 index 0000000..8296d9e --- /dev/null +++ b/VRE/apps/invitation/locale/nl/LC_MESSAGES/django.po @@ -0,0 +1,93 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# FIRST AUTHOR , YEAR. +# +msgid "" +msgstr "" +"Project-Id-Version: \n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2021-02-19 11:19+0100\n" +"PO-Revision-Date: 2020-05-27 16:25+0200\n" +"Last-Translator: Joshua Rubingh \n" +"Language-Team: \n" +"Language: nl\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" +"X-Generator: Poedit 2.0.6\n" + +#: apps/invitation/apps.py:7 +msgid "Invitation" +msgstr "" + +#: apps/invitation/apps.py:8 +msgid "Invitations" +msgstr "" + +#: apps/invitation/forms.py:9 +msgid "Receiver name" +msgstr "" + +#: apps/invitation/forms.py:9 +msgid "Enter the name of the recipient" +msgstr "" + +#: apps/invitation/forms.py:10 +msgid "Receiver email address" +msgstr "" + +#: apps/invitation/forms.py:10 +msgid "Enter the email address" +msgstr "" + +#: apps/invitation/models.py:37 +msgid "invitation" +msgstr "" + +#: apps/invitation/models.py:38 +msgid "invitations" +msgstr "" + +#: apps/invitation/models.py:45 +msgid "The study where this invitation belongs to." +msgstr "" + +#: apps/invitation/models.py:46 +msgid "Name" +msgstr "" + +#: apps/invitation/models.py:46 +msgid "The name of the uploader / data provider." +msgstr "" + +#: apps/invitation/models.py:47 +msgid "Email address" +msgstr "" + +#: apps/invitation/models.py:47 +msgid "The email address of the uploader / data provider." +msgstr "" + +#: apps/invitation/models.py:48 +msgid "Upload code" +msgstr "" + +#: apps/invitation/models.py:48 +msgid "A unique upload code. Will be generated when a new study is saved." +msgstr "" + +#: apps/invitation/models.py:49 +msgid "Date mailed" +msgstr "" + +#: apps/invitation/models.py:49 +msgid "The date when the last invitation is send." +msgstr "" + +#~ msgid "API" +#~ msgstr "API" + +#~ msgid "APIs" +#~ msgstr "APIs" diff --git a/VRE/apps/invitation/migrations/0001_initial.py b/VRE/apps/invitation/migrations/0001_initial.py new file mode 100644 index 0000000..8930da5 --- /dev/null +++ b/VRE/apps/invitation/migrations/0001_initial.py @@ -0,0 +1,39 @@ +# Generated by Django 3.1.7 on 2021-02-23 14:37 + +from django.db import migrations, models +import django.db.models.deletion +import lib.utils.general + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ('study', '0001_initial'), + ] + + operations = [ + migrations.CreateModel( + name='Invitation', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='The date and time this model has been created', verbose_name='Date created')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='The date and time this model has been updated', verbose_name='Date updated')), + ('name', models.CharField(help_text='The name of the uploader / data provider.', max_length=200, verbose_name='Name')), + ('email', models.EmailField(help_text='The email address of the uploader / data provider.', max_length=254, verbose_name='Email address')), + ('upload_code', models.CharField(default=lib.utils.general.get_random_int_value, help_text='A unique upload code. Will be generated when a new study is saved.', max_length=20, verbose_name='Upload code')), + ('mail_sent', models.DateTimeField(blank=True, help_text='The date when the last invitation is send.', null=True, verbose_name='Date mailed')), + ('study', models.ForeignKey(help_text='The study where this invitation belongs to.', on_delete=django.db.models.deletion.CASCADE, related_name='invitations', to='study.study', verbose_name='studie')), + ], + options={ + 'verbose_name': 'invitation', + 'verbose_name_plural': 'invitations', + 'ordering': ['name'], + }, + ), + migrations.AddConstraint( + model_name='invitation', + constraint=models.UniqueConstraint(fields=('study', 'email'), name='one_invitation_per_study'), + ), + ] diff --git a/VRE/apps/invitation/migrations/__init__.py b/VRE/apps/invitation/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/VRE/apps/invitation/models.py b/VRE/apps/invitation/models.py new file mode 100644 index 0000000..f7d1f9c --- /dev/null +++ b/VRE/apps/invitation/models.py @@ -0,0 +1,89 @@ +from django.db import models +from apps.study.models import Study +from lib.models.base import MetaDataModel + +from django.utils import timezone +from django.conf import settings + +from django.utils.translation import gettext_lazy as _ +from django.contrib.staticfiles import finders + +from lib.utils.general import get_random_int_value, remove_html_tags +from lib.utils.emails import EmailMultiRelated + +from pathlib import Path + +# Create your models here. +class Invitation(MetaDataModel): + """ + A model to represent an invitation for uploading files. Every invitation has his own unique upload code. + It will inherit the attributes :attr:`~lib.models.base.MetaDataModel.created_at` and :attr:`~lib.models.base.MetaDataModel.updated_at` from the Abstract model :class:`~lib.models.base.MetaDataModel` + + Attributes + ---------- + study : Study + The study to which this invitation belongs to. An invitation can only be used for a single study. + name : str + The name of the uploader or data provider. Max length is 200 characters. + email : str + The email address of the uploader or data provider. Will be used to sent the upload information with unique urls and upload codes. + upload_code : str + A unique code that is used as a token for uploading. This is a unique code for this invitation only. This will be auto generated. + mail_sent: datetime + The date and time when the last invitation has been sent. + """ + + class Meta: + verbose_name = _('invitation') + verbose_name_plural = _('invitations') + ordering = ['name'] + + constraints = [ + models. UniqueConstraint(fields=['study', 'email'], name='one_invitation_per_study') + ] + + study = models.ForeignKey(Study, verbose_name=Study._meta.verbose_name, on_delete=models.CASCADE, related_name='invitations', help_text=_('The study where this invitation belongs to.')) + name = models.CharField(_('Name'), max_length=200, help_text=_('The name of the uploader / data provider.')) + email = models.EmailField(_('Email address'),help_text=_('The email address of the uploader / data provider.')) + upload_code = models.CharField(_('Upload code'),max_length=20, default=get_random_int_value, help_text=_('A unique upload code. Will be generated when a new study is saved.')) + mail_sent = models.DateTimeField(_('Date mailed'), blank=True, null=True,help_text=_('The date when the last invitation is send.')) + + def __str__(self): + """str: Returns a readable string for the invitation. Format is [invitation_name] ([invitation_email_address]).""" + return f'{self.name} ({self.email})' + + @property + def researcher(self): + return self.study.researcher + + def send_email(self): + """Send the invitation by email with a nicely HTML formated template.""" + from_email = settings.EMAIL_FROM_ADDRESS + subject = f'Drop-off study \'{self.study.name}\' instructions' + html_message = f'''RUG Logo

Hello {self.name} + +

In order to upload files to study \'{self.study.name}\' please read the following instructions

+ +Go to the following url to start uploading: {self.study.web_upload_url}
+To use with an API use the url: {self.study.api_upload_url}
+
+Use the following upload code: {self.upload_code}''' + + text_message = remove_html_tags(html_message) + message = EmailMultiRelated(subject, + text_message, + from_email, + [f'{self.name}<{self.email}>'], + headers={'reply-to' : self.study.researcher.user.email}) + message.attach_alternative(html_message, 'text/html') + + logo = Path(finders.find('images/RUG_Logo.jpg')) + if logo.is_file(): + message.attach_related_file(logo) + + message_sent = message.send() == 1 + if message_sent: + self.mail_sent = timezone.now() + self.save() + + return message_sent diff --git a/VRE/apps/invitation/serializers.py b/VRE/apps/invitation/serializers.py new file mode 100644 index 0000000..a3bb18e --- /dev/null +++ b/VRE/apps/invitation/serializers.py @@ -0,0 +1,15 @@ +from rest_framework import serializers +from apps.dropoff.models import Invitation + +from lib.api.base import BaseHyperlinkedModelSerializer + +class InvitationSerializer(BaseHyperlinkedModelSerializer): + + class Meta: + model = Invitation + fields = '__all__' + + extra_kwargs = { + 'url' : {'view_name': 'api:v1:invitation-detail'}, + 'study' : {'view_name': 'api:v1:study-detail'}, + } diff --git a/VRE/apps/invitation/signals.py b/VRE/apps/invitation/signals.py new file mode 100644 index 0000000..aa1ae64 --- /dev/null +++ b/VRE/apps/invitation/signals.py @@ -0,0 +1,23 @@ +from django.db.models.signals import post_save +from django.dispatch import receiver + +from .models import Invitation + +@receiver(post_save, sender=Invitation) +def send_first_invitation(sender, instance=None, created=False, **kwargs): + """ + | A signal that is fired when a invitation is created. This will trigger sending an email with instructions for the uploader. + + Arguments + ---------- + sender : sender + The model that has triggered the signal + + instance: :attr:`~django.contrib.auth.models.User` + The newly created user model data + + created : boolean + Wether the object was created (True) or updated (False). + """ + if created and instance is not None: + instance.send_email() diff --git a/VRE/apps/invitation/tests.py b/VRE/apps/invitation/tests.py new file mode 100644 index 0000000..7ce503c --- /dev/null +++ b/VRE/apps/invitation/tests.py @@ -0,0 +1,3 @@ +from django.test import TestCase + +# Create your tests here. diff --git a/VRE/apps/invitation/views.py b/VRE/apps/invitation/views.py new file mode 100644 index 0000000..8d1e333 --- /dev/null +++ b/VRE/apps/invitation/views.py @@ -0,0 +1,53 @@ +from .serializers import InvitationSerializer +from rest_framework import serializers +from rest_framework.response import Response + +from django.template.defaultfilters import date +from django.utils.translation import gettext as _ +from apps.invitation.models import Invitation + +from lib.api.base import BaseViewSet + +from rest_framework.decorators import action + +from drf_yasg.utils import swagger_auto_schema +from drf_yasg import openapi + +class InvitationViewSet(BaseViewSet): + """ + API endpoint for creating/reading/updating/deleting and sending invitations. + """ + + queryset = Invitation.objects.all().order_by('-created_at') + serializer_class = InvitationSerializer + + @action(methods=['PUT'],detail=True) + @swagger_auto_schema(operation_description="Resend the invitation email", request_body=serializers.Serializer) + def send_email(self, request, pk): + data = {'error' : False, + 'message' : ''} + + try: + invitation = Invitation.objects.get(pk=pk, study__in=request.user.researcher.study_set.all()) + + except Invitation.DoesNotExist: + data['error'] = True + data['message'] = _('Invitation does not exists') + + except Exception as ex: + print(ex) + data['error'] = True + data['message'] = _('Unknown error') + + try: + invitation.send_email() + + data['error'] = False + data['message'] = _('Invitation to %(name)s (%(email)s) is sent') % {'name' : invitation.name, 'email': invitation.email} + data['send_date'] = date(invitation.mail_sent,'SHORT_DATE_FORMAT') + except Exception as ex: + print(ex) + data['error'] = True + data['message'] = str(ex) + + return Response(data) \ No newline at end of file diff --git a/VRE/apps/researcher/__init__.py b/VRE/apps/researcher/__init__.py new file mode 100644 index 0000000..4608442 --- /dev/null +++ b/VRE/apps/researcher/__init__.py @@ -0,0 +1 @@ +default_app_config = 'apps.researcher.apps.ResearcherConfig' \ No newline at end of file diff --git a/VRE/apps/researcher/admin.py b/VRE/apps/researcher/admin.py new file mode 100644 index 0000000..e9ef722 --- /dev/null +++ b/VRE/apps/researcher/admin.py @@ -0,0 +1,4 @@ +from django.contrib import admin +from .models import Researcher + +admin.site.register(Researcher) \ No newline at end of file diff --git a/VRE/apps/researcher/apps.py b/VRE/apps/researcher/apps.py new file mode 100644 index 0000000..babb4a9 --- /dev/null +++ b/VRE/apps/researcher/apps.py @@ -0,0 +1,11 @@ +from django.apps import AppConfig +from django.utils.translation import ugettext_lazy as _ + +class ResearcherConfig(AppConfig): + name = 'apps.researcher' + label = 'researcher' + verbose_name = _('Researcher') + verbose_name_plural = _('Researchers') + + def ready(self): + import apps.researcher.signals \ No newline at end of file diff --git a/VRE/apps/researcher/locale/en/LC_MESSAGES/django.po b/VRE/apps/researcher/locale/en/LC_MESSAGES/django.po new file mode 100644 index 0000000..872f51a --- /dev/null +++ b/VRE/apps/researcher/locale/en/LC_MESSAGES/django.po @@ -0,0 +1,39 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# FIRST AUTHOR , YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2021-02-19 11:19+0100\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME \n" +"Language-Team: LANGUAGE \n" +"Language: \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +#: apps/researcher/apps.py:7 +msgid "Researcher" +msgstr "" + +#: apps/researcher/apps.py:8 +msgid "Researchers" +msgstr "" + +#: apps/researcher/models.py:29 +msgid "researcher" +msgstr "" + +#: apps/researcher/models.py:30 +msgid "researchers" +msgstr "" + +#: apps/researcher/models.py:33 +msgid "Select the user that has to become a researcher" +msgstr "" diff --git a/VRE/apps/researcher/locale/nl/LC_MESSAGES/django.po b/VRE/apps/researcher/locale/nl/LC_MESSAGES/django.po new file mode 100644 index 0000000..b420c74 --- /dev/null +++ b/VRE/apps/researcher/locale/nl/LC_MESSAGES/django.po @@ -0,0 +1,45 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# FIRST AUTHOR , YEAR. +# +msgid "" +msgstr "" +"Project-Id-Version: \n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2021-02-19 11:19+0100\n" +"PO-Revision-Date: 2020-05-27 16:25+0200\n" +"Last-Translator: Joshua Rubingh \n" +"Language-Team: \n" +"Language: nl\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" +"X-Generator: Poedit 2.0.6\n" + +#: apps/researcher/apps.py:7 +msgid "Researcher" +msgstr "" + +#: apps/researcher/apps.py:8 +msgid "Researchers" +msgstr "" + +#: apps/researcher/models.py:29 +msgid "researcher" +msgstr "" + +#: apps/researcher/models.py:30 +msgid "researchers" +msgstr "" + +#: apps/researcher/models.py:33 +msgid "Select the user that has to become a researcher" +msgstr "" + +#~ msgid "API" +#~ msgstr "API" + +#~ msgid "APIs" +#~ msgstr "APIs" diff --git a/VRE/apps/researcher/migrations/0001_initial.py b/VRE/apps/researcher/migrations/0001_initial.py new file mode 100644 index 0000000..2baf276 --- /dev/null +++ b/VRE/apps/researcher/migrations/0001_initial.py @@ -0,0 +1,31 @@ +# Generated by Django 3.1.7 on 2021-02-23 14:37 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ] + + operations = [ + migrations.CreateModel( + name='Researcher', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='The date and time this model has been created', verbose_name='Date created')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='The date and time this model has been updated', verbose_name='Date updated')), + ('user', models.OneToOneField(help_text='Select the user that has to become a researcher', on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ], + options={ + 'verbose_name': 'researcher', + 'verbose_name_plural': 'researchers', + 'ordering': ['user__last_name'], + }, + ), + ] diff --git a/VRE/apps/researcher/migrations/__init__.py b/VRE/apps/researcher/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/VRE/apps/researcher/models.py b/VRE/apps/researcher/models.py new file mode 100644 index 0000000..4a17af1 --- /dev/null +++ b/VRE/apps/researcher/models.py @@ -0,0 +1,42 @@ +from django.db import models +from django.contrib.auth.models import User +from django.utils.translation import gettext_lazy as _ + +from lib.models.base import MetaDataModel + +# Create your models here. +class ResearcherManager(models.Manager): + """ + Custom queryset which will prefetch related user table data + """ + + def get_queryset(self): + return super(ResearcherManager, self).get_queryset().select_related('user') + + +class Researcher(MetaDataModel): + """ + A model to represent a researcher. This is a One to One field with the Django user model. + It will inherit the attributes :attr:`~lib.models.base.MetaDataModel.created_at` and :attr:`~lib.models.base.MetaDataModel.updated_at` from the Abstract model :class:`~lib.models.base.MetaDataModel` + + Attributes + ---------- + user : :class:`~django.contrib.auth.models.User` + The Django User model that is the researcher. + """ + + class Meta: + verbose_name = _('researcher') + verbose_name_plural = _('researchers') + ordering = ['user__last_name'] + + user = models.OneToOneField(User, on_delete=models.CASCADE, help_text=_('Select the user that has to become a researcher')) + + # Custom manager that will retrieve the related user table as well. + objects = ResearcherManager() + + def __str__(self): + """ + Print the full name of the researcher based on the first and last name fields of the User model. + """ + return self.user.get_full_name() \ No newline at end of file diff --git a/VRE/apps/researcher/serializers.py b/VRE/apps/researcher/serializers.py new file mode 100644 index 0000000..3cec870 --- /dev/null +++ b/VRE/apps/researcher/serializers.py @@ -0,0 +1,35 @@ +from rest_framework import serializers +from apps.researcher.models import Researcher +from lib.api.base import BaseHyperlinkedModelSerializer + +from django.contrib.auth.models import User + +class ResearcherSerializer(BaseHyperlinkedModelSerializer): + # 'Proxy' some data from the user model to our Researcher model. + first_name = serializers.CharField(source='user.first_name') + last_name = serializers.CharField(source='user.last_name') + email_address = serializers.CharField(source='user.email') + # The token data is retrieved by the Django user model. As every new user will get a own token key and secret + token_key = serializers.ReadOnlyField(source='user.token.key') + token_secret = serializers.ReadOnlyField(source='user.token.secret') + + class Meta: + model = Researcher + exclude = ['user'] + extra_kwargs = { + 'url': {'view_name': 'api:v1:researcher-detail'} + } + + def create(self, validated_data): + """ + Create a new researcher by creating a new normal user. The signal of the user creation will create a new researcher. + """ + # Cleaned data field names are based on the model field names. So email_address become email + data = validated_data['user'] + # As we need a username, we set it to the email address of the researcher. This should be unique! + data['username'] = data['email'] + # Create a new user, or load an existing. Ignore the '_' for created True or False + user, _ = User.objects.get_or_create(**data) + # TODO: Check if the user did exists, but not as a researcher. So create a 'new' researcher object for the existing user. + # TODO: Check if the user did exists, but does not have an API token. So create a 'new' token object for the existing user. + return user.researcher \ No newline at end of file diff --git a/VRE/apps/researcher/signals.py b/VRE/apps/researcher/signals.py new file mode 100644 index 0000000..3a165ac --- /dev/null +++ b/VRE/apps/researcher/signals.py @@ -0,0 +1,24 @@ +from django.contrib.auth.models import User +from django.db.models.signals import post_save +from django.dispatch import receiver + +from .models import Researcher + +@receiver(post_save, sender=User) +def create_researcher_profile(sender, instance, created, **kwargs): + """ + | A signal that is fired when a user is created. This will create a researcher model with a link with the newly created user. + + Arguments + ---------- + sender : sender + The model that has triggered the signal + + instance: :attr:`~django.contrib.auth.models.User` + The newly created user model data + + created : boolean + Wether the object was created or updated. When true it is newly created + """ + if created: + Researcher.objects.create(user=instance) diff --git a/VRE/apps/researcher/tests.py b/VRE/apps/researcher/tests.py new file mode 100644 index 0000000..7ce503c --- /dev/null +++ b/VRE/apps/researcher/tests.py @@ -0,0 +1,3 @@ +from django.test import TestCase + +# Create your tests here. diff --git a/VRE/apps/researcher/views.py b/VRE/apps/researcher/views.py new file mode 100644 index 0000000..c914528 --- /dev/null +++ b/VRE/apps/researcher/views.py @@ -0,0 +1,38 @@ +from rest_framework import viewsets +from rest_framework import permissions +from rest_framework.response import Response + +from .serializers import ResearcherSerializer +from apps.researcher.models import Researcher + +class ResearcherViewSet(viewsets.ModelViewSet): + """ + API endpoint for listing researchers. This is a readonly endpoint ordered by username. + """ + queryset = Researcher.objects.all().order_by('user__last_name') + serializer_class = ResearcherSerializer + permission_classes = [permissions.IsAuthenticated] + + def get_queryset(self): + """ + This view should return a list of all the storages for the currently authenticated user ordered by name. + Or the complete list if a super user is logged in. + """ + if getattr(self, 'swagger_fake_view', False): + return self.queryset + + objects = self.queryset.objects + + try: + # Only show the logged in researcher his storages... + objects = objects.filter(user=self.request.user) + except Exception as ex: + # If it fails, then we check if the logged in user is a super user/token? + # If so, ignore the error, and continue + if not self.request.user.token.is_supertoken: + # Raise the error, something went wrong + raise ex + + # TODO: Check if this select_related is still needed, as we have a custom query manager in the model.... + #objects = objects.select_related('user') + return objects diff --git a/VRE/apps/storage/__init__.py b/VRE/apps/storage/__init__.py new file mode 100644 index 0000000..aeb3ed3 --- /dev/null +++ b/VRE/apps/storage/__init__.py @@ -0,0 +1 @@ +default_app_config = 'apps.storage.apps.StorageConfig' \ No newline at end of file diff --git a/VRE/apps/storage/admin.py b/VRE/apps/storage/admin.py new file mode 100644 index 0000000..380e921 --- /dev/null +++ b/VRE/apps/storage/admin.py @@ -0,0 +1,16 @@ +from django.contrib import admin +from .models import StorageEngine, StorageLocation + +@admin.register(StorageEngine) +class StorageEngineAdmin(admin.ModelAdmin): + list_display = ('name', 'engine', 'researcher', 'created_at') + ordering = ('-created_at', 'name', ) + search_fields = ('name', 'engine', 'researcher') + readonly_fields = ('created_at', 'updated_at') + +@admin.register(StorageLocation) +class StorageLocationAdmin(admin.ModelAdmin): + list_display = ( 'study', 'researcher', 'storageengine', 'encrypted', 'created_at') + ordering = ('-created_at', 'study__name', ) + search_fields = ( 'study', 'researcher', 'storageengine') + readonly_fields = ('created_at', 'updated_at') \ No newline at end of file diff --git a/VRE/apps/storage/apps.py b/VRE/apps/storage/apps.py new file mode 100644 index 0000000..6a48ce2 --- /dev/null +++ b/VRE/apps/storage/apps.py @@ -0,0 +1,11 @@ +from django.apps import AppConfig +from django.utils.translation import ugettext_lazy as _ + +class StorageConfig(AppConfig): + name = 'apps.storage' + label = 'storage' + verbose_name = _('Storage') + verbose_name_plural = _('Storages') + + def ready(self): + from . import signals \ No newline at end of file diff --git a/VRE/apps/storage/locale/en/LC_MESSAGES/django.po b/VRE/apps/storage/locale/en/LC_MESSAGES/django.po new file mode 100644 index 0000000..ca59a61 --- /dev/null +++ b/VRE/apps/storage/locale/en/LC_MESSAGES/django.po @@ -0,0 +1,148 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# FIRST AUTHOR , YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2021-02-19 11:19+0100\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME \n" +"Language-Team: LANGUAGE \n" +"Language: \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +#: apps/storage/apps.py:7 +msgid "Storage" +msgstr "" + +#: apps/storage/apps.py:8 +msgid "Storages" +msgstr "" + +#: apps/storage/models.py:41 +msgid "WebDAV" +msgstr "" + +#: apps/storage/models.py:42 +msgid "Gitea" +msgstr "" + +#: apps/storage/models.py:43 +msgid "Github" +msgstr "" + +#: apps/storage/models.py:44 +msgid "iRODS" +msgstr "" + +#: apps/storage/models.py:60 +msgid "Destination" +msgstr "" + +#: apps/storage/models.py:61 +msgid "Source" +msgstr "" + +#: apps/storage/models.py:96 +msgid "storage engine" +msgstr "" + +#: apps/storage/models.py:97 +msgid "storage engines" +msgstr "" + +#: apps/storage/models.py:104 +msgid "The researcher which owns this storage." +msgstr "" + +#: apps/storage/models.py:105 +msgid "Name" +msgstr "" + +#: apps/storage/models.py:105 +msgid "Easy to remember name for this storage." +msgstr "" + +#: apps/storage/models.py:106 +msgid "Engine" +msgstr "" + +#: apps/storage/models.py:106 +msgid "The engine storage type." +msgstr "" + +#: apps/storage/models.py:107 +msgid "Location" +msgstr "" + +#: apps/storage/models.py:107 +msgid "Full location/url where to store/get the data." +msgstr "" + +#: apps/storage/models.py:108 +msgid "Username" +msgstr "" + +#: apps/storage/models.py:108 +msgid "The user name to connect to the storage." +msgstr "" + +#: apps/storage/models.py:109 +msgid "Password" +msgstr "" + +#: apps/storage/models.py:109 +msgid "The password for the user name to connect to the storage." +msgstr "" + +#: apps/storage/models.py:117 +msgid "storage location" +msgstr "" + +#: apps/storage/models.py:118 +msgid "storage locations" +msgstr "" + +#: apps/storage/models.py:120 +msgid "Select the study where you need this storage location" +msgstr "" + +#: apps/storage/models.py:122 +msgid "Select the storage engine where to store the data" +msgstr "" + +#: apps/storage/models.py:124 +msgid "Direction" +msgstr "" + +#: apps/storage/models.py:124 +msgid "Is it a 'source' or 'destination'." +msgstr "" + +#: apps/storage/models.py:126 +msgid "Path" +msgstr "" + +#: apps/storage/models.py:126 +msgid "Folder to store the data. Will be created when does not exists." +msgstr "" + +#: apps/storage/models.py:127 +msgid "Encryption password" +msgstr "" + +#: apps/storage/models.py:127 +msgid "" +"Password for encryption the uploaded data. Leave empty to disable encryption." +msgstr "" + +#: apps/storage/models.py:137 +msgid "Encrypted" +msgstr "" diff --git a/VRE/apps/storage/locale/nl/LC_MESSAGES/django.po b/VRE/apps/storage/locale/nl/LC_MESSAGES/django.po new file mode 100644 index 0000000..54f9d06 --- /dev/null +++ b/VRE/apps/storage/locale/nl/LC_MESSAGES/django.po @@ -0,0 +1,154 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# FIRST AUTHOR , YEAR. +# +msgid "" +msgstr "" +"Project-Id-Version: \n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2021-02-19 11:19+0100\n" +"PO-Revision-Date: 2020-05-27 16:25+0200\n" +"Last-Translator: Joshua Rubingh \n" +"Language-Team: \n" +"Language: nl\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" +"X-Generator: Poedit 2.0.6\n" + +#: apps/storage/apps.py:7 +msgid "Storage" +msgstr "" + +#: apps/storage/apps.py:8 +msgid "Storages" +msgstr "" + +#: apps/storage/models.py:41 +msgid "WebDAV" +msgstr "" + +#: apps/storage/models.py:42 +msgid "Gitea" +msgstr "" + +#: apps/storage/models.py:43 +msgid "Github" +msgstr "" + +#: apps/storage/models.py:44 +msgid "iRODS" +msgstr "" + +#: apps/storage/models.py:60 +msgid "Destination" +msgstr "" + +#: apps/storage/models.py:61 +msgid "Source" +msgstr "" + +#: apps/storage/models.py:96 +msgid "storage engine" +msgstr "" + +#: apps/storage/models.py:97 +msgid "storage engines" +msgstr "" + +#: apps/storage/models.py:104 +msgid "The researcher which owns this storage." +msgstr "" + +#: apps/storage/models.py:105 +msgid "Name" +msgstr "" + +#: apps/storage/models.py:105 +msgid "Easy to remember name for this storage." +msgstr "" + +#: apps/storage/models.py:106 +msgid "Engine" +msgstr "" + +#: apps/storage/models.py:106 +msgid "The engine storage type." +msgstr "" + +#: apps/storage/models.py:107 +msgid "Location" +msgstr "" + +#: apps/storage/models.py:107 +msgid "Full location/url where to store/get the data." +msgstr "" + +#: apps/storage/models.py:108 +msgid "Username" +msgstr "" + +#: apps/storage/models.py:108 +msgid "The user name to connect to the storage." +msgstr "" + +#: apps/storage/models.py:109 +msgid "Password" +msgstr "" + +#: apps/storage/models.py:109 +msgid "The password for the user name to connect to the storage." +msgstr "" + +#: apps/storage/models.py:117 +msgid "storage location" +msgstr "" + +#: apps/storage/models.py:118 +msgid "storage locations" +msgstr "" + +#: apps/storage/models.py:120 +msgid "Select the study where you need this storage location" +msgstr "" + +#: apps/storage/models.py:122 +msgid "Select the storage engine where to store the data" +msgstr "" + +#: apps/storage/models.py:124 +msgid "Direction" +msgstr "" + +#: apps/storage/models.py:124 +msgid "Is it a 'source' or 'destination'." +msgstr "" + +#: apps/storage/models.py:126 +msgid "Path" +msgstr "" + +#: apps/storage/models.py:126 +msgid "Folder to store the data. Will be created when does not exists." +msgstr "" + +#: apps/storage/models.py:127 +msgid "Encryption password" +msgstr "" + +#: apps/storage/models.py:127 +msgid "" +"Password for encryption the uploaded data. Leave empty to disable encryption." +msgstr "" + +#: apps/storage/models.py:137 +msgid "Encrypted" +msgstr "" + +#~ msgid "API" +#~ msgstr "API" + +#~ msgid "APIs" +#~ msgstr "APIs" diff --git a/VRE/apps/storage/migrations/0001_initial.py b/VRE/apps/storage/migrations/0001_initial.py new file mode 100644 index 0000000..506959f --- /dev/null +++ b/VRE/apps/storage/migrations/0001_initial.py @@ -0,0 +1,59 @@ +# Generated by Django 3.1.7 on 2021-02-23 14:37 + +from django.db import migrations, models +import django.db.models.deletion +import django_cryptography.fields +import lib.utils.general + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ('study', '0001_initial'), + ('researcher', '0001_initial'), + ] + + operations = [ + migrations.CreateModel( + name='StorageEngine', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='The date and time this model has been created', verbose_name='Date created')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='The date and time this model has been updated', verbose_name='Date updated')), + ('name', models.CharField(help_text='Easy to remember name for this storage.', max_length=50, verbose_name='Name')), + ('engine', models.CharField(choices=[('WEBDAV', 'WebDAV'), ('GITEA', 'Gitea'), ('GITHUB', 'Github'), ('IRODS', 'iRODS')], help_text='The engine storage type.', max_length=6, verbose_name='Engine')), + ('location', models.CharField(help_text='Full location/url where to store/get the data.', max_length=1024, verbose_name='Location')), + ('username', django_cryptography.fields.encrypt(models.CharField(help_text='The user name to connect to the storage.', max_length=100, verbose_name='Username'))), + ('password', django_cryptography.fields.encrypt(models.CharField(help_text='The password for the user name to connect to the storage.', max_length=100, verbose_name='Password'))), + ('researcher', models.ForeignKey(help_text='The researcher which owns this storage.', on_delete=django.db.models.deletion.CASCADE, to='researcher.researcher', verbose_name='researcher')), + ], + options={ + 'verbose_name': 'storage engine', + 'verbose_name_plural': 'storage engines', + 'ordering': ['-created_at'], + }, + ), + migrations.CreateModel( + name='StorageLocation', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='The date and time this model has been created', verbose_name='Date created')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='The date and time this model has been updated', verbose_name='Date updated')), + ('direction', models.CharField(choices=[('DESTINATION', 'Destination'), ('SOURCE', 'Source')], default='DESTINATION', help_text="Is it a 'source' or 'destination'.", max_length=11, verbose_name='Direction')), + ('path', models.CharField(blank=True, help_text='Folder to store the data. Will be created when does not exists.', max_length=1024, verbose_name='Path')), + ('encryption_password', django_cryptography.fields.encrypt(models.CharField(blank=True, default=lib.utils.general.generate_encryption_key, help_text='Password for encryption the uploaded data. Leave empty to disable encryption.', max_length=100, verbose_name='Encryption password'))), + ('storageengine', models.ForeignKey(help_text='Select the storage engine where to store the data', on_delete=django.db.models.deletion.CASCADE, to='storage.storageengine', verbose_name='storage engine')), + ('study', models.OneToOneField(help_text='Select the study where you need this storage location', on_delete=django.db.models.deletion.CASCADE, to='study.study', verbose_name='studie')), + ], + options={ + 'verbose_name': 'storage location', + 'verbose_name_plural': 'storage locations', + }, + ), + migrations.AddConstraint( + model_name='storageengine', + constraint=models.UniqueConstraint(fields=('researcher', 'engine', 'location'), name='unique_engine_location'), + ), + ] diff --git a/VRE/apps/storage/migrations/__init__.py b/VRE/apps/storage/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/VRE/apps/storage/models.py b/VRE/apps/storage/models.py new file mode 100644 index 0000000..af88437 --- /dev/null +++ b/VRE/apps/storage/models.py @@ -0,0 +1,161 @@ +from django.db import models +from django.utils.translation import gettext_lazy as _ + +from django_cryptography.fields import encrypt + +from apps.researcher.models import Researcher +from apps.study.models import Study +from lib.models.base import MetaDataModel + +from lib.utils.general import get_random_int_value, generate_encryption_key + + +# Here we import the different storage engine libraries. TODO: Should this be somehow some external part? Does not look right to do it here... +from django.template.defaultfilters import slugify + +from storage.storage import Storage +import storage.exceptions as StorageException + + +class StorageEngineType(models.TextChoices): + """ + A class for defining storage engine types as choices. Currently the following storage engines are supported: + + .. data:: WEBDAV + + WebDAV like servers. This can be all kinds of WebDAV servers including `Owncloud `_ and `Nextcloud `_ + + .. data:: GITEA + + Support for `Gitea `_. + + .. data:: GITHUB + + Support for `Github `_. + + .. data:: IRODS + + Support for `iRODS `_. + """ + + WEBDAV = ('WEBDAV',_('WebDAV')) + GITEA = ('GITEA',_('Gitea')) + GITHUB = ('GITHUB',_('Github')) + IRODS = ('IRODS',_('iRODS')) + +class StorageDirectionType(models.TextChoices): + """ + A class for defining storage directions as choices + + .. data:: DESTINATION + + This indicates that the content needs to pushed to + + .. data:: SOURCE + + | This indicates that the contents needs to be pulled. + | (Not supported yet) + """ + + DESTINATION = ('DESTINATION',_('Destination')) + SOURCE = ('SOURCE',_('Source')) + + +class StorageEngine(MetaDataModel): + """ + | A model for saving the storage for a Study. This model holds all the information that is needed to put the uploaded file to the right storage. + | At the moment the following storage engines are supported: + + * .. data:: StorageEngineType.WEBDAV + + * .. data:: StorageEngineType.GITEA + + * .. data:: StorageEngineType.GITHUB + + * .. data:: StorageEngineType.IRODS + + It will inherit the attributes :attr:`~lib.models.base.MetaDataModel.created_at` and :attr:`~lib.models.base.MetaDataModel.updated_at` from the Abstract model :class:`~lib.models.base.MetaDataModel` + + Attributes + ---------- + study : Researcher + The researcher to which this storage belongs to. + name : str + The name of this storage. Use a easy to remember name for humans. + engine : StorageEngineType + Specify the storage engine where to store or get the data from. + location : str + The full url including protocol and portnumbers if needed. For webav (Owncloud/Nextcloud) also add the root path + username : str + The username that is needed to connect to the storage. This should be a user that is controlled by the study researcher. The data is stored encrypted in the database. + password : str + The password that is needed to connect to the storage. The data is stored encrypted in the database. + """ + + class Meta: + verbose_name = _('storage engine') + verbose_name_plural = _('storage engines') + ordering = ['-created_at'] + + constraints = [ + models. UniqueConstraint(fields=['researcher', 'engine', 'location'], name='unique_engine_location') + ] + + researcher = models.ForeignKey(Researcher, verbose_name=Researcher._meta.verbose_name, on_delete=models.CASCADE, help_text=_('The researcher which owns this storage.')) + name = models.CharField(_('Name'), max_length=50,help_text=_('Easy to remember name for this storage.')) + engine = models.CharField(_('Engine'), max_length=6, choices=StorageEngineType.choices, help_text=_('The engine storage type.')) + location = models.CharField(_('Location'), max_length=1024,help_text=_('Full location/url where to store/get the data.')) + username = encrypt(models.CharField(_('Username'), max_length=100, help_text=_('The user name to connect to the storage.'))) + password = encrypt(models.CharField(_('Password'), max_length=100, help_text=_('The password for the user name to connect to the storage.'))) + + def __str__(self): + """str: Returns a readable string for the storate. Format is [storage_name] (storage_engine).""" + return f'{self.name} ({self.get_engine_display()})' + +class StorageLocation(MetaDataModel): + class Meta: + verbose_name = _('storage location') + verbose_name_plural = _('storage locations') + + study = models.OneToOneField(Study, verbose_name=Study._meta.verbose_name, on_delete=models.CASCADE, help_text=_('Select the study where you need this storage location')) + + storageengine = models.ForeignKey(StorageEngine, verbose_name=StorageEngine._meta.verbose_name, on_delete=models.CASCADE, help_text=_('Select the storage engine where to store the data')) + + direction = models.CharField(_('Direction'), max_length=11, choices=StorageDirectionType.choices, default=StorageDirectionType.DESTINATION, help_text=_('Is it a \'source\' or \'destination\'.')) + + path = models.CharField(_('Path'), blank=True, max_length=1024,help_text=_('Folder to store the data. Will be created when does not exists.')) + encryption_password = encrypt(models.CharField(_('Encryption password'), default=generate_encryption_key, max_length=100, blank=True, help_text=_('Password for encryption the uploaded data. Leave empty to disable encryption.'))) + + def researcher(self): + return self.study.researcher + + def encrypted(self): + """boolean: Returns true when the storage has encryption enabled.""" + # For now, disable encryption as it gives problems with docker containers + return False + #return '' != self.encryption_password + + encrypted.boolean = True + encrypted.short_description = _('Encrypted') + + def create_default_paths(self): + project_root_folder = slugify(self.study.name) + + # TODO: Make this is setting so that every faculty could have their own project structure + folders_to_make = ['01 Ethics committee application & approval', + '02 Research materials', + '03 Raw data', + '04 Syntax & scripts', + '05 Publication'] + + storage = Storage(self.storageengine.engine, + self.storageengine.location, + self.storageengine.username, + self.storageengine.password) + + for folder in folders_to_make: + storage.create_directories(f'/{project_root_folder}/{slugify(folder)}') + + # Set the Datadrop upload path... Should be the Raw data folder + self.path = f'{project_root_folder}/{slugify(folders_to_make[2])}' + self.save() diff --git a/VRE/apps/storage/serializers.py b/VRE/apps/storage/serializers.py new file mode 100644 index 0000000..b9c211b --- /dev/null +++ b/VRE/apps/storage/serializers.py @@ -0,0 +1,27 @@ +from rest_framework import serializers +from apps.storage.models import StorageEngine, StorageLocation + +from lib.api.base import BaseHyperlinkedModelSerializer + +class StorageEngineSerializer(BaseHyperlinkedModelSerializer): + + class Meta: + model = StorageEngine + fields = '__all__' + extra_kwargs = { + 'url' : {'view_name': 'api:v1:storageengine-detail'}, + } + +class StorageLocationSerializer(BaseHyperlinkedModelSerializer): + + encrypted = serializers.ReadOnlyField() + name = serializers.ReadOnlyField(source='storageengine.name') + + class Meta: + model = StorageLocation + fields = '__all__' + extra_kwargs = { + 'url' : {'view_name': 'api:v1:storagelocation-detail'}, + 'study' : {'view_name': 'api:v1:study-detail'}, + 'storageengine' : {'view_name': 'api:v1:storageengine-detail'} + } diff --git a/VRE/apps/storage/signals.py b/VRE/apps/storage/signals.py new file mode 100644 index 0000000..daf1eea --- /dev/null +++ b/VRE/apps/storage/signals.py @@ -0,0 +1,17 @@ +from django.db.models.signals import post_save +from django.dispatch import receiver + +from .models import StorageLocation + +@receiver(post_save, sender=StorageLocation) +def create_initial_storage_paths(sender, instance=None, created=False, **kwargs): + """[summary] + + Args: + sender ([type]): [description] + instance ([type], optional): [description]. Defaults to None. + created (bool, optional): [description]. Defaults to False. + """ + + if created and instance is not None: + instance.create_default_paths() \ No newline at end of file diff --git a/VRE/apps/storage/tests.py b/VRE/apps/storage/tests.py new file mode 100644 index 0000000..7ce503c --- /dev/null +++ b/VRE/apps/storage/tests.py @@ -0,0 +1,3 @@ +from django.test import TestCase + +# Create your tests here. diff --git a/VRE/apps/storage/views.py b/VRE/apps/storage/views.py new file mode 100644 index 0000000..5d02ed3 --- /dev/null +++ b/VRE/apps/storage/views.py @@ -0,0 +1,24 @@ +from rest_framework import viewsets +from rest_framework import permissions + +from .serializers import StorageEngineSerializer, StorageLocationSerializer +from apps.storage.models import StorageEngine, StorageLocation, StorageDirectionType + +from lib.api.base import BaseViewSet + +class StorageEngineViewSet(BaseViewSet): + """ + API endpoint for creating/reading/updating/deleting storages. + """ + queryset = StorageEngine.objects.all().order_by('name') + serializer_class = StorageEngineSerializer + + def perform_create(self, serializer): + serializer.save(researcher = self.request.user.researcher) + +class StorageLocationViewSet(BaseViewSet): + """ + API endpoint for creating/reading/updating/deleting storages. + """ + queryset = StorageLocation.objects.all().order_by('study__name') + serializer_class = StorageLocationSerializer \ No newline at end of file diff --git a/VRE/apps/study/__init__.py b/VRE/apps/study/__init__.py new file mode 100644 index 0000000..d805ad2 --- /dev/null +++ b/VRE/apps/study/__init__.py @@ -0,0 +1 @@ +default_app_config = 'apps.study.apps.StudyConfig' \ No newline at end of file diff --git a/VRE/apps/study/admin.py b/VRE/apps/study/admin.py new file mode 100644 index 0000000..b60a862 --- /dev/null +++ b/VRE/apps/study/admin.py @@ -0,0 +1,15 @@ +from django.contrib import admin +from django.template.defaultfilters import filesizeformat + +from .models import Study + +# Register your models here. +@admin.register(Study) +class StudyAdmin(admin.ModelAdmin): + list_display = ('name', 'researcher', 'total_files', 'total_file_size', 'total_invitations', 'created_at') + ordering = ('-created_at','name', 'researcher') + search_fields = ('name', 'researcher') + readonly_fields = ('upload_uuid', 'api_upload_url', 'total_files', 'total_file_size', 'total_invitations', 'created_at', 'updated_at') + + def total_file_size(self,obj): + return filesizeformat(obj.total_file_size) \ No newline at end of file diff --git a/VRE/apps/study/apps.py b/VRE/apps/study/apps.py new file mode 100644 index 0000000..55f75cc --- /dev/null +++ b/VRE/apps/study/apps.py @@ -0,0 +1,8 @@ +from django.apps import AppConfig +from django.utils.translation import ugettext_lazy as _ + +class StudyConfig(AppConfig): + name = 'apps.study' + label = 'study' + verbose_name = _('Study') + verbose_name_plural = _('Studies') \ No newline at end of file diff --git a/VRE/apps/study/locale/en/LC_MESSAGES/django.po b/VRE/apps/study/locale/en/LC_MESSAGES/django.po new file mode 100644 index 0000000..f2e746e --- /dev/null +++ b/VRE/apps/study/locale/en/LC_MESSAGES/django.po @@ -0,0 +1,71 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# FIRST AUTHOR , YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2021-02-19 11:19+0100\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME \n" +"Language-Team: LANGUAGE \n" +"Language: \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +#: apps/study/apps.py:7 +msgid "Study" +msgstr "" + +#: apps/study/apps.py:8 +msgid "Studies" +msgstr "" + +#: apps/study/models.py:59 +msgid "studie" +msgstr "" + +#: apps/study/models.py:60 +msgid "studies" +msgstr "" + +#: apps/study/models.py:63 +msgid "Choose the researcher which owns this study" +msgstr "" + +#: apps/study/models.py:64 +msgid "Name" +msgstr "" + +#: apps/study/models.py:64 +msgid "Name of the research study." +msgstr "" + +#: apps/study/models.py:65 +msgid "Description" +msgstr "" + +#: apps/study/models.py:65 +msgid "Enter a short description for this study." +msgstr "" + +#: apps/study/models.py:67 +msgid "Upload code" +msgstr "" + +#: apps/study/models.py:67 +msgid "A unique upload code. Will be generated when a new study is saved." +msgstr "" + +#: apps/study/models.py:68 +msgid "Upload url key" +msgstr "" + +#: apps/study/models.py:68 +msgid "A unique upload url. Will be generated when a new study is saved." +msgstr "" diff --git a/VRE/apps/study/locale/nl/LC_MESSAGES/django.po b/VRE/apps/study/locale/nl/LC_MESSAGES/django.po new file mode 100644 index 0000000..48cb540 --- /dev/null +++ b/VRE/apps/study/locale/nl/LC_MESSAGES/django.po @@ -0,0 +1,77 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# FIRST AUTHOR , YEAR. +# +msgid "" +msgstr "" +"Project-Id-Version: \n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2021-02-19 11:19+0100\n" +"PO-Revision-Date: 2020-05-27 16:25+0200\n" +"Last-Translator: Joshua Rubingh \n" +"Language-Team: \n" +"Language: nl\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" +"X-Generator: Poedit 2.0.6\n" + +#: apps/study/apps.py:7 +msgid "Study" +msgstr "" + +#: apps/study/apps.py:8 +msgid "Studies" +msgstr "" + +#: apps/study/models.py:59 +msgid "studie" +msgstr "" + +#: apps/study/models.py:60 +msgid "studies" +msgstr "" + +#: apps/study/models.py:63 +msgid "Choose the researcher which owns this study" +msgstr "" + +#: apps/study/models.py:64 +msgid "Name" +msgstr "" + +#: apps/study/models.py:64 +msgid "Name of the research study." +msgstr "" + +#: apps/study/models.py:65 +msgid "Description" +msgstr "" + +#: apps/study/models.py:65 +msgid "Enter a short description for this study." +msgstr "" + +#: apps/study/models.py:67 +msgid "Upload code" +msgstr "" + +#: apps/study/models.py:67 +msgid "A unique upload code. Will be generated when a new study is saved." +msgstr "" + +#: apps/study/models.py:68 +msgid "Upload url key" +msgstr "" + +#: apps/study/models.py:68 +msgid "A unique upload url. Will be generated when a new study is saved." +msgstr "" + +#~ msgid "API" +#~ msgstr "API" + +#~ msgid "APIs" +#~ msgstr "APIs" diff --git a/VRE/apps/study/migrations/0001_initial.py b/VRE/apps/study/migrations/0001_initial.py new file mode 100644 index 0000000..a258d8e --- /dev/null +++ b/VRE/apps/study/migrations/0001_initial.py @@ -0,0 +1,36 @@ +# Generated by Django 3.1.7 on 2021-02-23 14:37 + +from django.db import migrations, models +import django.db.models.deletion +import lib.utils.general +import uuid + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ('researcher', '0001_initial'), + ] + + operations = [ + migrations.CreateModel( + name='Study', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='The date and time this model has been created', verbose_name='Date created')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='The date and time this model has been updated', verbose_name='Date updated')), + ('name', models.CharField(help_text='Name of the research study.', max_length=200, verbose_name='Name')), + ('description', models.TextField(blank=True, help_text='Enter a short description for this study.', null=True, verbose_name='Description')), + ('upload_code', models.CharField(default=lib.utils.general.get_random_int_value, editable=False, help_text='A unique upload code. Will be generated when a new study is saved.', max_length=20, verbose_name='Upload code')), + ('upload_uuid', models.UUIDField(default=uuid.uuid4, editable=False, help_text='A unique upload url. Will be generated when a new study is saved.', unique=True, verbose_name='Upload url key')), + ('researcher', models.ForeignKey(help_text='Choose the researcher which owns this study', on_delete=django.db.models.deletion.CASCADE, to='researcher.researcher', verbose_name='researcher')), + ], + options={ + 'verbose_name': 'studie', + 'verbose_name_plural': 'studies', + 'ordering': ['name'], + }, + ), + ] diff --git a/VRE/apps/study/migrations/__init__.py b/VRE/apps/study/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/VRE/apps/study/models.py b/VRE/apps/study/models.py new file mode 100644 index 0000000..d34f392 --- /dev/null +++ b/VRE/apps/study/models.py @@ -0,0 +1,111 @@ +from django.conf import settings +from django.db import models +from django.db.models import Count, Sum + +from django.urls import reverse +from django.utils.translation import gettext_lazy as _ +from lib.utils.general import get_random_int_value, generate_encryption_key + +from django_cryptography.fields import encrypt + +from lib.models.base import MetaDataModel +from apps.researcher.models import Researcher + +import uuid + +# Create your models here. +class StudyManager(models.Manager): + """ + This is a custom study manager for getting extra information from a study model like: + - Total files / datadrops attached for a study. + - Total file size of all the data drops for a study. + - Total invitations that have been sent for a study. + """ + + def get_queryset(self): + """ + Returns the queryset with extra fields + - '_total_files', + - '_total_file_size' + - '_total_invitations'. + + This will overrule/alter the existing queryset. + """ + return super(StudyManager, self).get_queryset().select_related('researcher__user').select_related('storagelocation').annotate(_total_files=Count('files', distinct=True), + _total_file_size=Sum('files__filesize', distinct=True), + _total_invitations=Count('invitations', distinct=True), + ) + +class Study(MetaDataModel): + """ + A model to represent a study of a researcher. This study will receive the dropoff files from various sources. + It will inherit the attributes :attr:`~lib.models.base.MetaDataModel.created_at` and :attr:`~lib.models.base.MetaDataModel.updated_at` from the Abstract model :class:`~lib.models.base.MetaDataModel` + + Attributes + ---------- + researcher : Researcher + The Django Researcher model that is the owner of this study. + name : str + The name of the study. Can be entered freely by the researcher. + description : str + A small description what the study is about. This will be used on the upload page and invitation mail. + upload_code : str + A unique code that is used as a token for uploading. This is a general upload code. Every Invitation will get his own upload code as well. This will be auto generated. + upload_uuid : uuid4 + A UUID v4 string that is used to create a unique upload url. This will be auto generated when created. + """ + + class Meta: + verbose_name = _('studie') + verbose_name_plural = _('studies') + ordering = ['name'] + + researcher = models.ForeignKey(Researcher, verbose_name=Researcher._meta.verbose_name, on_delete=models.CASCADE, help_text=_('Choose the researcher which owns this study')) + name = models.CharField(_('Name'), max_length=200, help_text=_('Name of the research study.')) + description = models.TextField(_('Description'), blank=True, null=True, help_text=_('Enter a short description for this study.')) + + upload_code = models.CharField(_('Upload code'), max_length=20, default=get_random_int_value, editable=False, help_text=_('A unique upload code. Will be generated when a new study is saved.')) + upload_uuid = models.UUIDField(_('Upload url key'), unique=True, default=uuid.uuid4, editable=False, help_text=_('A unique upload url. Will be generated when a new study is saved.')) + + # Here we load our custom StudyManager so we always have the amount of files, total file size and amount of invitations directly accessable + objects = StudyManager() + + @property + def has_storage(self): + """boolean: Returns true when there is at least one storage location connected to this study""" + #print(dir(self.storagelocation)) + return self.storagelocation is not None + + @property + def get_absolute_url(self): + """str: Returns the full url to the study detail page.""" + return reverse('study:detail', kwargs={'study_id': self.pk}) + + @property + def web_upload_url(self): + """str: Returns the full url for the study upload page through web.""" + return f'{settings.DROPOFF_BASE_URL.strip("/")}/{self.upload_uuid}' + + @property + def api_upload_url(self): + """str: Returns the full url for the study upload API entrypoint.""" + return f'{settings.DROPOFF_UPLOAD_HOST.strip("/")}/files/{self.upload_uuid}/' + + @property + def total_files(self): + """int: Returns the total amount of uploaded files""" + return self._total_files + + @property + def total_file_size(self): + """int: Returns the total upload amount of the uploaded files""" + return 0 if self._total_file_size is None else self._total_file_size + + @property + def total_invitations(self): + """int: Returns the total amount of invitations""" + return self._total_invitations + + def __str__(self): + """str: Returns a readable string for the study. Format is [study_name] ([researcher_name]).""" + return f'{self.name} ({self.researcher})' diff --git a/VRE/apps/study/serializers.py b/VRE/apps/study/serializers.py new file mode 100644 index 0000000..45f50c0 --- /dev/null +++ b/VRE/apps/study/serializers.py @@ -0,0 +1,35 @@ +from rest_framework import serializers + +from lib.api.base import BaseHyperlinkedModelSerializer + +from apps.storage.serializers import StorageLocationSerializer +from apps.invitation.serializers import InvitationSerializer +from apps.dropoff.serializers import DataDropSerializer +from apps.study.models import Study + +class StudySerializer(BaseHyperlinkedModelSerializer): + api_upload_url = serializers.ReadOnlyField() + web_upload_url = serializers.ReadOnlyField() + + total_files = serializers.ReadOnlyField() + total_file_size = serializers.ReadOnlyField() + total_invitations = serializers.ReadOnlyField() + + storage = StorageLocationSerializer(read_only=True, source='storagelocation') + invitations = InvitationSerializer(many=True, read_only=True) + files = DataDropSerializer(many=True, read_only=True) + + class Meta: + model = Study + fields = '__all__' + extra_kwargs = { + 'url' : {'view_name': 'api:v1:study-detail'}, + } + +class StudyUploadSerializer(BaseHyperlinkedModelSerializer): + class Meta: + model = Study + fields = ['researcher','api_upload_url','name','description'] + extra_kwargs = { + 'url' : {'view_name': 'api:v1:study-detail'}, + } diff --git a/VRE/apps/study/tests.py b/VRE/apps/study/tests.py new file mode 100644 index 0000000..7ce503c --- /dev/null +++ b/VRE/apps/study/tests.py @@ -0,0 +1,3 @@ +from django.test import TestCase + +# Create your tests here. diff --git a/VRE/apps/study/views.py b/VRE/apps/study/views.py new file mode 100644 index 0000000..7084769 --- /dev/null +++ b/VRE/apps/study/views.py @@ -0,0 +1,24 @@ +from rest_framework.decorators import action +from rest_framework.response import Response + +from .serializers import StudySerializer, StudyUploadSerializer +from apps.study.models import Study + +from lib.api.base import BaseViewSet + +class StudyViewSet(BaseViewSet): + """ + API endpoint for creating/reading/updating/deleting studies. + """ + queryset = Study.objects.all().order_by('name') + serializer_class = StudySerializer + + def perform_create(self, serializer): + serializer.save(researcher = self.request.user.researcher) + + # TODO: This should be able to access without authentication. So that means take it out the current StudyViewSet class and make a seperate class for it.... + @action(detail=True) + def upload(self, request, pk=None): + study = Study.objects.get(upload_uuid=pk) + serializer = StudyUploadSerializer(study, many=False,context={'request': request}) + return Response(serializer.data) diff --git a/VRE/apps/virtual_machine/__init__.py b/VRE/apps/virtual_machine/__init__.py new file mode 100644 index 0000000..bc219b3 --- /dev/null +++ b/VRE/apps/virtual_machine/__init__.py @@ -0,0 +1 @@ +default_app_config = 'apps.virtual_machine.apps.VirtualMachineConfig' \ No newline at end of file diff --git a/VRE/apps/virtual_machine/admin.py b/VRE/apps/virtual_machine/admin.py new file mode 100644 index 0000000..521f56c --- /dev/null +++ b/VRE/apps/virtual_machine/admin.py @@ -0,0 +1,63 @@ +from django.contrib import admin +from django.template.defaultfilters import filesizeformat + +from .models import (VirtualMachineOperatingSystem, + VirtualMachineMemory, + VirtualMachineStorage, + VirtualMachineGPU, + VirtualMachineNetwork, + VirtualMachineProfile, + VirtualMachine, + VirtualMachineAccess) + +# Register your models here. +@admin.register(VirtualMachine) +class VirtualMachineAdmin(admin.ModelAdmin): + list_display = ('name', 'researcher', 'total_memory', 'total_storage', 'created_at') + search_fields = ('name', 'researcher') + ordering = ('-created_at', 'name', 'researcher', ) + readonly_fields = ('created_at', 'updated_at') + + def total_memory(self, obj): + return filesizeformat(obj.total_memory) + + def total_storage(self, obj): + return filesizeformat(obj.total_storage) + +class VirtualMachinePartAdmin(admin.ModelAdmin): + list_display = ('name', 'is_available', 'created_at') + search_fields = ('name', ) + ordering = ('-created_at', 'name', ) + readonly_fields = ('created_at', 'updated_at') + + +@admin.register(VirtualMachineOperatingSystem) +class VirtualMachineOperatingSystemAdmin(VirtualMachinePartAdmin): + pass + +@admin.register(VirtualMachineMemory) +class VirtualMachineMemoryAdmin(VirtualMachinePartAdmin): + pass + +@admin.register(VirtualMachineNetwork) +class VirtualMachineNetworkAdmin(VirtualMachinePartAdmin): + pass + +@admin.register(VirtualMachineStorage) +class VirtualMachineStorageAdmin(VirtualMachinePartAdmin): + pass + +@admin.register(VirtualMachineGPU) +class VirtualMachineGPUAdmin(VirtualMachinePartAdmin): + pass + +@admin.register(VirtualMachineProfile) +class VirtualMachineProfileAdmin(VirtualMachinePartAdmin): + pass + +@admin.register(VirtualMachineAccess) +class VirtualMachineAccessAdmin(admin.ModelAdmin): + list_display = ('researcher', 'virtual_machine', 'created_at') + search_fields = ('researcher', 'virtual_machine') + ordering = ('-created_at', 'researcher', ) + readonly_fields = ('user','created_at', 'updated_at') \ No newline at end of file diff --git a/VRE/apps/virtual_machine/apps.py b/VRE/apps/virtual_machine/apps.py new file mode 100644 index 0000000..e836f1e --- /dev/null +++ b/VRE/apps/virtual_machine/apps.py @@ -0,0 +1,11 @@ +from django.apps import AppConfig +from django.utils.translation import ugettext_lazy as _ + +class VirtualMachineConfig(AppConfig): + name = 'apps.virtual_machine' + label = 'virtual_machine' + verbose_name = _('Virtual Machine') + verbose_name_plural = _('Virtual Machines') + + def ready(self): + import apps.virtual_machine.signals \ No newline at end of file diff --git a/VRE/apps/virtual_machine/fixtures/virtual_machine_initial_data.json b/VRE/apps/virtual_machine/fixtures/virtual_machine_initial_data.json new file mode 100644 index 0000000..a4f396f --- /dev/null +++ b/VRE/apps/virtual_machine/fixtures/virtual_machine_initial_data.json @@ -0,0 +1,156 @@ +[ +{ + "model": "virtual_machine.virtualmachineoperatingsystem", + "pk": 1, + "fields": { + "created_at": "2020-03-31T08:45:41.308Z", + "updated_at": "2020-10-26T09:47:19.425Z", + "name": "Ubuntu 18.04 LTS", + "vm_code": "ea70716d-7a9b-4345-9987-a96acc8e8949", + "is_available": true + } +}, +{ + "model": "virtual_machine.virtualmachineoperatingsystem", + "pk": 2, + "fields": { + "created_at": "2020-03-31T08:45:54.979Z", + "updated_at": "2020-03-31T08:46:14.343Z", + "name": "Windows 10 standaard", + "vm_code": "windows_10", + "is_available": true + } +}, +{ + "model": "virtual_machine.virtualmachinememory", + "pk": 1, + "fields": { + "created_at": "2020-03-31T08:44:54.925Z", + "updated_at": "2020-03-31T08:44:54.925Z", + "name": "1 GB Standaard", + "vm_code": "memory_1gb_standaard", + "is_available": true + } +}, +{ + "model": "virtual_machine.virtualmachinememory", + "pk": 2, + "fields": { + "created_at": "2020-03-31T08:45:15.985Z", + "updated_at": "2020-03-31T08:46:35.124Z", + "name": "1 GB High performance", + "vm_code": "memory_1gb_hp", + "is_available": true + } +}, +{ + "model": "virtual_machine.virtualmachinestorage", + "pk": 1, + "fields": { + "created_at": "2020-03-31T08:47:03.785Z", + "updated_at": "2020-10-26T09:53:18.052Z", + "name": "1GB Standaard", + "vm_code": "storage_1gb_standaard", + "is_available": true + } +}, +{ + "model": "virtual_machine.virtualmachinestorage", + "pk": 2, + "fields": { + "created_at": "2020-03-31T08:47:28.233Z", + "updated_at": "2020-10-26T09:53:10.606Z", + "name": "1GB NVMe", + "vm_code": "storage_1gb_nvme", + "is_available": true + } +}, +{ + "model": "virtual_machine.virtualmachinestorage", + "pk": 3, + "fields": { + "created_at": "2020-03-31T08:47:44.444Z", + "updated_at": "2020-10-26T09:53:04.073Z", + "name": "1GB SSD", + "vm_code": "storage_1gb_ssd", + "is_available": true + } +}, +{ + "model": "virtual_machine.virtualmachinenetwork", + "pk": 1, + "fields": { + "created_at": "2020-10-26T09:49:36.730Z", + "updated_at": "2020-10-26T09:50:13.440Z", + "name": "Private network (internal)", + "vm_code": "3918e28f-d1ee-4f8b-aebe-d49b256de249", + "is_available": true, + "network_type": "PRIVATE" + } +}, +{ + "model": "virtual_machine.virtualmachinenetwork", + "pk": 2, + "fields": { + "created_at": "2020-10-26T09:50:08.083Z", + "updated_at": "2020-10-26T09:50:08.083Z", + "name": "Public network (vlan16)", + "vm_code": "136fa66d-4887-4a88-912f-9209dc3c6c45", + "is_available": true, + "network_type": "PUBLIC" + } +}, +{ + "model": "virtual_machine.virtualmachinegpu", + "pk": 1, + "fields": { + "created_at": "2020-03-31T08:44:23.755Z", + "updated_at": "2020-03-31T08:44:23.755Z", + "name": "NVIDIA GTX1080", + "vm_code": "nvidia_gtx1080", + "is_available": true + } +}, +{ + "model": "virtual_machine.virtualmachineprofile", + "pk": 1, + "fields": { + "created_at": "2020-03-31T09:17:11.143Z", + "updated_at": "2020-10-26T09:52:50.679Z", + "vm_code": "c2ad7a1e-937f-445e-b33a-b604711e2468", + "is_available": true, + "name": "Standaard", + "memory_type": 1, + "memory_amount": 4, + "storage_type": 1, + "storage_amount": 20, + "gpu_type": null, + "gpu_amount": 0, + "networks": [ + 1, + 2 + ] + } +}, +{ + "model": "virtual_machine.virtualmachineprofile", + "pk": 2, + "fields": { + "created_at": "2020-03-31T09:17:35.444Z", + "updated_at": "2020-10-26T09:53:40.396Z", + "vm_code": "N/A", + "is_available": true, + "name": "Premium", + "memory_type": 2, + "memory_amount": 12, + "storage_type": 2, + "storage_amount": 100, + "gpu_type": 1, + "gpu_amount": 1, + "networks": [ + 1, + 2 + ] + } +} +] diff --git a/VRE/apps/virtual_machine/locale/en/LC_MESSAGES/django.po b/VRE/apps/virtual_machine/locale/en/LC_MESSAGES/django.po new file mode 100644 index 0000000..599f1b0 --- /dev/null +++ b/VRE/apps/virtual_machine/locale/en/LC_MESSAGES/django.po @@ -0,0 +1,219 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# FIRST AUTHOR , YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2021-02-19 11:19+0100\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME \n" +"Language-Team: LANGUAGE \n" +"Language: \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +#: apps/virtual_machine/apps.py:7 +msgid "Virtual Machine" +msgstr "" + +#: apps/virtual_machine/apps.py:8 +msgid "Virtual Machines" +msgstr "" + +#: apps/virtual_machine/models.py:19 +msgid "Private" +msgstr "" + +#: apps/virtual_machine/models.py:20 +msgid "Public" +msgstr "" + +#: apps/virtual_machine/models.py:38 +msgid "Easy to remember name for this virtual machine part." +msgstr "" + +#: apps/virtual_machine/models.py:39 +msgid "Technical name for this virtual machine part." +msgstr "" + +#: apps/virtual_machine/models.py:40 +msgid "Only selected virtual machine parts can be chosen from." +msgstr "" + +#: apps/virtual_machine/models.py:58 +msgid "virtual machine operating system" +msgstr "" + +#: apps/virtual_machine/models.py:59 +msgid "virtual machine operating systems" +msgstr "" + +#: apps/virtual_machine/models.py:71 apps/virtual_machine/models.py:72 +msgid "virtual machine memory" +msgstr "" + +#: apps/virtual_machine/models.py:91 apps/virtual_machine/models.py:92 +msgid "virtual machine network" +msgstr "" + +#: apps/virtual_machine/models.py:94 +msgid "Network type" +msgstr "" + +#: apps/virtual_machine/models.py:94 +msgid "Network type. Either pirvate or public" +msgstr "" + +#: apps/virtual_machine/models.py:114 apps/virtual_machine/models.py:115 +msgid "virtual machine storage" +msgstr "" + +#: apps/virtual_machine/models.py:135 +msgid "virtual machine GPU" +msgstr "" + +#: apps/virtual_machine/models.py:136 +msgid "virtual machine GPUs" +msgstr "" + +#: apps/virtual_machine/models.py:165 +msgid "virtual machine profile" +msgstr "" + +#: apps/virtual_machine/models.py:166 +msgid "virtual machine profiles" +msgstr "" + +#: apps/virtual_machine/models.py:168 +msgid "Easy to remember name for this virtual machine profile." +msgstr "" + +#: apps/virtual_machine/models.py:170 +msgid "Select the networks that should be connected." +msgstr "" + +#: apps/virtual_machine/models.py:172 apps/virtual_machine/models.py:272 +msgid "Basic memory" +msgstr "" + +#: apps/virtual_machine/models.py:173 apps/virtual_machine/models.py:273 +msgid "Amount of memory. Default is 1" +msgstr "" + +#: apps/virtual_machine/models.py:175 apps/virtual_machine/models.py:275 +msgid "Basic disk size" +msgstr "" + +#: apps/virtual_machine/models.py:176 apps/virtual_machine/models.py:276 +msgid "Amount of disk storage. Default is 1" +msgstr "" + +#: apps/virtual_machine/models.py:178 +msgid "Basic GPU" +msgstr "" + +#: apps/virtual_machine/models.py:179 apps/virtual_machine/models.py:279 +msgid "Amount of GPUs. Default is 0" +msgstr "" + +#: apps/virtual_machine/models.py:255 +msgid "virtual machine" +msgstr "" + +#: apps/virtual_machine/models.py:256 +msgid "virtual machines" +msgstr "" + +#: apps/virtual_machine/models.py:258 +msgid "The remote ID of this virtual machine on the cloud platform" +msgstr "" + +#: apps/virtual_machine/models.py:260 +msgid "Easy to remember name for this virtual machine." +msgstr "" + +#: apps/virtual_machine/models.py:262 +msgid "The researcher that own this virtual machine." +msgstr "" + +#: apps/virtual_machine/models.py:264 +msgid "The study for which this virtual machine is used." +msgstr "" + +#: apps/virtual_machine/models.py:266 +msgid "The virtual machine selected profile." +msgstr "" + +#: apps/virtual_machine/models.py:268 +msgid "Networks connected to this virtual machine." +msgstr "" + +#: apps/virtual_machine/models.py:270 +msgid "The operating system for this virtual machine." +msgstr "" + +#: apps/virtual_machine/models.py:278 +msgid "Additional GPU" +msgstr "" + +#: apps/virtual_machine/models.py:281 +msgid "Additional memory" +msgstr "" + +#: apps/virtual_machine/models.py:282 +msgid "Amount of memory. Default is 0" +msgstr "" + +#: apps/virtual_machine/models.py:284 +msgid "Additional storage" +msgstr "" + +#: apps/virtual_machine/models.py:285 +msgid "Amount of storage. Default is 0" +msgstr "" + +#: apps/virtual_machine/models.py:327 +msgid "virtual machine login" +msgstr "" + +#: apps/virtual_machine/models.py:328 +msgid "virtual machine logins" +msgstr "" + +#: apps/virtual_machine/models.py:334 +msgid "The researcher for which this login is valid for." +msgstr "" + +#: apps/virtual_machine/models.py:335 +msgid "The virtual machine to login to." +msgstr "" + +#: apps/virtual_machine/models.py:338 +msgid "Login key" +msgstr "" + +#: apps/virtual_machine/models.py:338 +msgid "The private key to login to the virtual machine." +msgstr "" + +#: apps/virtual_machine/models.py:339 +msgid "Password" +msgstr "" + +#: apps/virtual_machine/models.py:339 +msgid "The SSH password to login." +msgstr "" + +#: apps/virtual_machine/models.py:340 +msgid "Login IP" +msgstr "" + +#: apps/virtual_machine/models.py:340 +msgid "The IP address to login to the virtual machine." +msgstr "" diff --git a/VRE/apps/virtual_machine/locale/nl/LC_MESSAGES/django.po b/VRE/apps/virtual_machine/locale/nl/LC_MESSAGES/django.po new file mode 100644 index 0000000..8af5145 --- /dev/null +++ b/VRE/apps/virtual_machine/locale/nl/LC_MESSAGES/django.po @@ -0,0 +1,421 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# FIRST AUTHOR , YEAR. +# +msgid "" +msgstr "" +"Project-Id-Version: \n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2021-02-19 11:19+0100\n" +"PO-Revision-Date: 2020-05-27 16:13+0200\n" +"Last-Translator: Joshua Rubingh \n" +"Language-Team: \n" +"Language: nl\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" +"X-Generator: Poedit 2.0.6\n" + +#: apps/virtual_machine/apps.py:7 +msgid "Virtual Machine" +msgstr "Virtuele machine" + +#: apps/virtual_machine/apps.py:8 +msgid "Virtual Machines" +msgstr "Virtuele machines" + +#: apps/virtual_machine/models.py:19 +msgid "Private" +msgstr "" + +#: apps/virtual_machine/models.py:20 +msgid "Public" +msgstr "" + +#: apps/virtual_machine/models.py:38 +msgid "Easy to remember name for this virtual machine part." +msgstr "Makkelijk te onthouden naam voor dit virtuele machine-onderdeel." + +#: apps/virtual_machine/models.py:39 +msgid "Technical name for this virtual machine part." +msgstr "Technische naam voor dit virtuele machine-onderdeel." + +#: apps/virtual_machine/models.py:40 +msgid "Only selected virtual machine parts can be chosen from." +msgstr "" +"Er kan alleen uit geselecteerde onderdelen van virtuele machines worden " +"gekozen." + +#: apps/virtual_machine/models.py:58 +msgid "virtual machine operating system" +msgstr "besturingssysteem van virtuele machine" + +#: apps/virtual_machine/models.py:59 +msgid "virtual machine operating systems" +msgstr "besturingssysteemen van virtuele machine" + +#: apps/virtual_machine/models.py:71 apps/virtual_machine/models.py:72 +msgid "virtual machine memory" +msgstr "geheugen van virtuele machine" + +#: apps/virtual_machine/models.py:91 apps/virtual_machine/models.py:92 +#, fuzzy +#| msgid "virtual machine memory" +msgid "virtual machine network" +msgstr "geheugen van virtuele machine" + +#: apps/virtual_machine/models.py:94 +#, fuzzy +#| msgid "New workspace" +msgid "Network type" +msgstr "Nieuwe werkruimte" + +#: apps/virtual_machine/models.py:94 +msgid "Network type. Either pirvate or public" +msgstr "" + +#: apps/virtual_machine/models.py:114 apps/virtual_machine/models.py:115 +msgid "virtual machine storage" +msgstr "opslag van virtuele machines" + +#: apps/virtual_machine/models.py:135 +msgid "virtual machine GPU" +msgstr "virtuele machine GPU" + +#: apps/virtual_machine/models.py:136 +msgid "virtual machine GPUs" +msgstr "virtuele machine GPU's" + +#: apps/virtual_machine/models.py:165 +msgid "virtual machine profile" +msgstr "profiel van virtuele machine" + +#: apps/virtual_machine/models.py:166 +msgid "virtual machine profiles" +msgstr "profielen van virtuele machine" + +#: apps/virtual_machine/models.py:168 +msgid "Easy to remember name for this virtual machine profile." +msgstr "Gemakkelijk te onthouden naam voor dit virtuele machine profiel." + +#: apps/virtual_machine/models.py:170 +msgid "Select the networks that should be connected." +msgstr "" + +#: apps/virtual_machine/models.py:172 apps/virtual_machine/models.py:272 +msgid "Basic memory" +msgstr "Basisgeheugen" + +#: apps/virtual_machine/models.py:173 apps/virtual_machine/models.py:273 +msgid "Amount of memory. Default is 1" +msgstr "Hoeveelheid geheugen. Standaard is 1" + +#: apps/virtual_machine/models.py:175 apps/virtual_machine/models.py:275 +msgid "Basic disk size" +msgstr "Basis schijf grootte" + +#: apps/virtual_machine/models.py:176 apps/virtual_machine/models.py:276 +msgid "Amount of disk storage. Default is 1" +msgstr "Hoeveelheid schijf opslag. Standaard is 1" + +#: apps/virtual_machine/models.py:178 +msgid "Basic GPU" +msgstr "Basis GPU" + +#: apps/virtual_machine/models.py:179 apps/virtual_machine/models.py:279 +msgid "Amount of GPUs. Default is 0" +msgstr "Aantal GPU's. Standaard is 0" + +#: apps/virtual_machine/models.py:255 +msgid "virtual machine" +msgstr "virtuele machine" + +#: apps/virtual_machine/models.py:256 +msgid "virtual machines" +msgstr "virtuele machines" + +#: apps/virtual_machine/models.py:258 +#, fuzzy +#| msgid "Technical name for this virtual machine part." +msgid "The remote ID of this virtual machine on the cloud platform" +msgstr "Technische naam voor dit virtuele machine-onderdeel." + +#: apps/virtual_machine/models.py:260 +msgid "Easy to remember name for this virtual machine." +msgstr "Makkelijk te onthouden naam voor deze virtuele machine." + +#: apps/virtual_machine/models.py:262 +msgid "The researcher that own this virtual machine." +msgstr "De onderzoeker die deze virtuele machine bezit." + +#: apps/virtual_machine/models.py:264 +#, fuzzy +#| msgid "The operating system for this virtual machine." +msgid "The study for which this virtual machine is used." +msgstr "Het besturingssysteem voor deze virtuele machine." + +#: apps/virtual_machine/models.py:266 +msgid "The virtual machine selected profile." +msgstr "Het geselecteerde profiel van de virtuele machine." + +#: apps/virtual_machine/models.py:268 +#, fuzzy +#| msgid "The researcher that own this virtual machine." +msgid "Networks connected to this virtual machine." +msgstr "De onderzoeker die deze virtuele machine bezit." + +#: apps/virtual_machine/models.py:270 +msgid "The operating system for this virtual machine." +msgstr "Het besturingssysteem voor deze virtuele machine." + +#: apps/virtual_machine/models.py:278 +msgid "Additional GPU" +msgstr "Extra GPU" + +#: apps/virtual_machine/models.py:281 +msgid "Additional memory" +msgstr "Extra geheugen" + +#: apps/virtual_machine/models.py:282 +msgid "Amount of memory. Default is 0" +msgstr "Hoeveelheid geheugen. Standaard is 0" + +#: apps/virtual_machine/models.py:284 +msgid "Additional storage" +msgstr "Extra opslag" + +#: apps/virtual_machine/models.py:285 +msgid "Amount of storage. Default is 0" +msgstr "Opslagruimte. Standaard is 0" + +#: apps/virtual_machine/models.py:327 +#, fuzzy +#| msgid "virtual machine" +msgid "virtual machine login" +msgstr "virtuele machine" + +#: apps/virtual_machine/models.py:328 +#, fuzzy +#| msgid "virtual machines" +msgid "virtual machine logins" +msgstr "virtuele machines" + +#: apps/virtual_machine/models.py:334 +#, fuzzy +#| msgid "The researcher that own this virtual machine." +msgid "The researcher for which this login is valid for." +msgstr "De onderzoeker die deze virtuele machine bezit." + +#: apps/virtual_machine/models.py:335 +#, fuzzy +#| msgid "The virtual machine selected profile." +msgid "The virtual machine to login to." +msgstr "Het geselecteerde profiel van de virtuele machine." + +#: apps/virtual_machine/models.py:338 +msgid "Login key" +msgstr "" + +#: apps/virtual_machine/models.py:338 +#, fuzzy +#| msgid "The operating system for this virtual machine." +msgid "The private key to login to the virtual machine." +msgstr "Het besturingssysteem voor deze virtuele machine." + +#: apps/virtual_machine/models.py:339 +msgid "Password" +msgstr "" + +#: apps/virtual_machine/models.py:339 +msgid "The SSH password to login." +msgstr "" + +#: apps/virtual_machine/models.py:340 +msgid "Login IP" +msgstr "" + +#: apps/virtual_machine/models.py:340 +#, fuzzy +#| msgid "The researcher that own this virtual machine." +msgid "The IP address to login to the virtual machine." +msgstr "De onderzoeker die deze virtuele machine bezit." + +#, fuzzy +#~| msgid "virtual machine memory" +#~ msgid "Virtual machine name" +#~ msgstr "geheugen van virtuele machine" + +#, fuzzy +#~| msgid "virtual machine memory" +#~ msgid "Enter the virtual machine name" +#~ msgstr "geheugen van virtuele machine" + +#, fuzzy +#~| msgid "virtual machine memory" +#~ msgid "Study where this virtual machine belongs to" +#~ msgstr "geheugen van virtuele machine" + +#, fuzzy +#~| msgid "virtual machine operating system" +#~ msgid "Virtual machine operating system" +#~ msgstr "besturingssysteem van virtuele machine" + +#, fuzzy +#~| msgid "virtual machine operating system" +#~ msgid "Select the operating system" +#~ msgstr "besturingssysteem van virtuele machine" + +#, fuzzy +#~| msgid "virtual machine profile" +#~ msgid "Virtual machine profile" +#~ msgstr "profiel van virtuele machine" + +#, fuzzy +#~| msgid "virtual machine profile" +#~ msgid "Select a predefined virtual machine profile" +#~ msgstr "profiel van virtuele machine" + +#, fuzzy +#~| msgid "virtual machine memory" +#~ msgid "Virtual machine memory" +#~ msgstr "geheugen van virtuele machine" + +#, fuzzy +#~| msgid "virtual machine storage" +#~ msgid "Virtual machine storage" +#~ msgstr "opslag van virtuele machines" + +#, fuzzy +#~| msgid "virtual machine storage" +#~ msgid "How much storage" +#~ msgstr "opslag van virtuele machines" + +#, fuzzy +#~| msgid "virtual machine GPU" +#~ msgid "Additional virtual machine GPU" +#~ msgstr "virtuele machine GPU" + +#, fuzzy +#~| msgid "virtual machine memory" +#~ msgid "How much video memory" +#~ msgstr "geheugen van virtuele machine" + +#, fuzzy +#~| msgid "virtual machine memory" +#~ msgid "Additional virtual machine memory" +#~ msgstr "geheugen van virtuele machine" + +#, fuzzy +#~| msgid "virtual machine storage" +#~ msgid "Additional virtual machine storage" +#~ msgstr "opslag van virtuele machines" + +#, fuzzy +#~| msgid "Virtual machine details" +#~ msgid "Virtual machine detail '%(name)s'" +#~ msgstr "Details van virtuele machines" + +#, fuzzy +#~| msgid "virtual machine" +#~ msgid "Edit virtual machine" +#~ msgstr "virtuele machine" + +#~ msgid "Name" +#~ msgstr "Naam" + +#, fuzzy +#~| msgid "virtual machine operating system" +#~ msgid "Operating system" +#~ msgstr "besturingssysteem van virtuele machine" + +#~ msgid "Memory" +#~ msgstr "Geheugen" + +#, fuzzy +#~| msgid "Additional GPU" +#~ msgid "additional" +#~ msgstr "Extra GPU" + +#~ msgid "Storage" +#~ msgstr "Opslag" + +#~ msgid "GPU" +#~ msgstr "GPU" + +#, fuzzy +#~| msgid "Details" +#~ msgid "Login details" +#~ msgstr "Details" + +#~ msgid "Virtual machine overview" +#~ msgstr "Overzicht van virtuele machines" + +#~ msgid "List of your virtual machines" +#~ msgstr "Lijst met uw virtuele machines" + +#, fuzzy +#~| msgid "virtual machine" +#~ msgid "New virtual machine" +#~ msgstr "virtuele machine" + +#~ msgid "Here you see a list of virtual machines you are using." +#~ msgstr "Hier ziet u een lijst met virtuele machines die u gebruikt." + +#~ msgid "OS" +#~ msgstr "OS" + +#~ msgid "Actions" +#~ msgstr "Acties" + +#, fuzzy +#~| msgid "No virtual machines created." +#~ msgid "No virtual machines available." +#~ msgstr "Geen virtuele machines gemaakt." + +#, fuzzy +#~| msgid "Create a new virtual machine." +#~ msgid "Create a new virtual machine" +#~ msgstr "Maak een nieuwe virtuele machine." + +#, fuzzy +#~| msgid "Here you see a list of virtual machines you are using." +#~ msgid "Are you sure to delete the virtual machine:" +#~ msgstr "Hier ziet u een lijst met virtuele machines die u gebruikt." + +#, fuzzy +#~| msgid "Virtual machine details" +#~ msgid "Update virtual machine '%(name)s'" +#~ msgstr "Details van virtuele machines" + +#~ msgid "Make some choices below." +#~ msgstr "Maak hieronder enkele keuzes." + +#, fuzzy +#~| msgid "Virtual machine details" +#~ msgid "Virtual machine ID {virtual_machine_id} is deleted" +#~ msgstr "Details van virtuele machines" + +#, fuzzy +#~| msgid "virtual machine memory" +#~ msgid "Virtual machine %(name)s is %(action)s" +#~ msgstr "geheugen van virtuele machine" + +#~ msgid "Details of your virtual machine" +#~ msgstr "Details van uw virtuele machine" + +#~ msgid "Some info." +#~ msgstr "Wat info." + +#~ msgid "Something went wrong" +#~ msgstr "Er is iets fout gegaan" + +#~ msgid "Type" +#~ msgstr "Type" + +#~ msgid "Loading data" +#~ msgstr "Data laden" + +#~ msgid "Create a new workspace" +#~ msgstr "Maak een nieuwe werkruimte" diff --git a/VRE/apps/virtual_machine/migrations/0001_initial.py b/VRE/apps/virtual_machine/migrations/0001_initial.py new file mode 100644 index 0000000..e66acd7 --- /dev/null +++ b/VRE/apps/virtual_machine/migrations/0001_initial.py @@ -0,0 +1,206 @@ +# Generated by Django 3.1.7 on 2021-02-23 14:37 + +from django.db import migrations, models +import django.db.models.deletion +import django_cryptography.fields + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ('study', '0001_initial'), + ('researcher', '0001_initial'), + ] + + operations = [ + migrations.CreateModel( + name='VirtualMachine', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='The date and time this model has been created', verbose_name='Date created')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='The date and time this model has been updated', verbose_name='Date updated')), + ('remote_id', models.CharField(blank=True, help_text='The remote ID of this virtual machine on the cloud platform', max_length=50, null=True)), + ('name', models.CharField(help_text='Easy to remember name for this virtual machine.', max_length=50)), + ('base_memory_amount', models.PositiveSmallIntegerField(default=1, help_text='Amount of memory. Default is 1')), + ('base_storage_amount', models.PositiveSmallIntegerField(default=1, help_text='Amount of disk storage. Default is 1')), + ('additional_gpu_amount', models.PositiveSmallIntegerField(default=0, help_text='Amount of GPUs. Default is 0')), + ('additional_memory_amount', models.PositiveSmallIntegerField(default=0, help_text='Amount of memory. Default is 0')), + ('additional_storage_amount', models.PositiveSmallIntegerField(default=0, help_text='Amount of storage. Default is 0')), + ], + options={ + 'verbose_name': 'virtual machine', + 'verbose_name_plural': 'virtual machines', + }, + ), + migrations.CreateModel( + name='VirtualMachineGPU', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='The date and time this model has been created', verbose_name='Date created')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='The date and time this model has been updated', verbose_name='Date updated')), + ('name', models.CharField(help_text='Easy to remember name for this virtual machine part.', max_length=50)), + ('vm_code', models.CharField(help_text='Technical name for this virtual machine part.', max_length=50)), + ('is_available', models.BooleanField(help_text='Only selected virtual machine parts can be chosen from.')), + ], + options={ + 'verbose_name': 'virtual machine GPU', + 'verbose_name_plural': 'virtual machine GPUs', + }, + ), + migrations.CreateModel( + name='VirtualMachineMemory', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='The date and time this model has been created', verbose_name='Date created')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='The date and time this model has been updated', verbose_name='Date updated')), + ('name', models.CharField(help_text='Easy to remember name for this virtual machine part.', max_length=50)), + ('vm_code', models.CharField(help_text='Technical name for this virtual machine part.', max_length=50)), + ('is_available', models.BooleanField(help_text='Only selected virtual machine parts can be chosen from.')), + ], + options={ + 'verbose_name': 'virtual machine memory', + 'verbose_name_plural': 'virtual machine memory', + }, + ), + migrations.CreateModel( + name='VirtualMachineNetwork', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='The date and time this model has been created', verbose_name='Date created')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='The date and time this model has been updated', verbose_name='Date updated')), + ('name', models.CharField(help_text='Easy to remember name for this virtual machine part.', max_length=50)), + ('vm_code', models.CharField(help_text='Technical name for this virtual machine part.', max_length=50)), + ('is_available', models.BooleanField(help_text='Only selected virtual machine parts can be chosen from.')), + ('network_type', models.CharField(choices=[('PRIVATE', 'Private'), ('PUBLIC', 'Public')], help_text='Network type. Either pirvate or public', max_length=10, verbose_name='Network type')), + ], + options={ + 'verbose_name': 'virtual machine network', + 'verbose_name_plural': 'virtual machine network', + }, + ), + migrations.CreateModel( + name='VirtualMachineOperatingSystem', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='The date and time this model has been created', verbose_name='Date created')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='The date and time this model has been updated', verbose_name='Date updated')), + ('name', models.CharField(help_text='Easy to remember name for this virtual machine part.', max_length=50)), + ('vm_code', models.CharField(help_text='Technical name for this virtual machine part.', max_length=50)), + ('is_available', models.BooleanField(help_text='Only selected virtual machine parts can be chosen from.')), + ], + options={ + 'verbose_name': 'virtual machine operating system', + 'verbose_name_plural': 'virtual machine operating systems', + }, + ), + migrations.CreateModel( + name='VirtualMachineStorage', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='The date and time this model has been created', verbose_name='Date created')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='The date and time this model has been updated', verbose_name='Date updated')), + ('name', models.CharField(help_text='Easy to remember name for this virtual machine part.', max_length=50)), + ('vm_code', models.CharField(help_text='Technical name for this virtual machine part.', max_length=50)), + ('is_available', models.BooleanField(help_text='Only selected virtual machine parts can be chosen from.')), + ], + options={ + 'verbose_name': 'virtual machine storage', + 'verbose_name_plural': 'virtual machine storage', + }, + ), + migrations.CreateModel( + name='VirtualMachineProfile', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='The date and time this model has been created', verbose_name='Date created')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='The date and time this model has been updated', verbose_name='Date updated')), + ('vm_code', models.CharField(help_text='Technical name for this virtual machine part.', max_length=50)), + ('is_available', models.BooleanField(help_text='Only selected virtual machine parts can be chosen from.')), + ('name', models.CharField(help_text='Easy to remember name for this virtual machine profile.', max_length=50)), + ('memory_amount', models.PositiveSmallIntegerField(default=1, help_text='Amount of memory. Default is 1')), + ('storage_amount', models.PositiveSmallIntegerField(default=1, help_text='Amount of disk storage. Default is 1')), + ('gpu_amount', models.PositiveSmallIntegerField(default=0, help_text='Amount of GPUs. Default is 0')), + ('gpu_type', models.ForeignKey(blank=True, help_text='Basic GPU', limit_choices_to={'is_available': True}, null=True, on_delete=django.db.models.deletion.CASCADE, to='virtual_machine.virtualmachinegpu')), + ('memory_type', models.ForeignKey(help_text='Basic memory', limit_choices_to={'is_available': True}, on_delete=django.db.models.deletion.CASCADE, to='virtual_machine.virtualmachinememory')), + ('networks', models.ManyToManyField(help_text='Select the networks that should be connected.', to='virtual_machine.VirtualMachineNetwork')), + ('storage_type', models.ForeignKey(help_text='Basic disk size', limit_choices_to={'is_available': True}, on_delete=django.db.models.deletion.CASCADE, to='virtual_machine.virtualmachinestorage')), + ], + options={ + 'verbose_name': 'virtual machine profile', + 'verbose_name_plural': 'virtual machine profiles', + }, + ), + migrations.CreateModel( + name='VirtualMachineAccess', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='The date and time this model has been created', verbose_name='Date created')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='The date and time this model has been updated', verbose_name='Date updated')), + ('login_key', models.TextField(help_text='The private key to login to the virtual machine.', max_length=2048, verbose_name='Login key')), + ('password', django_cryptography.fields.encrypt(models.CharField(help_text='The SSH password to login.', max_length=46, verbose_name='Password'))), + ('virtual_machine_ip', models.CharField(help_text='The IP address to login to the virtual machine.', max_length=46, verbose_name='Login IP')), + ('researcher', models.ForeignKey(help_text='The researcher for which this login is valid for.', on_delete=django.db.models.deletion.CASCADE, to='researcher.researcher')), + ('virtual_machine', models.ForeignKey(help_text='The virtual machine to login to.', on_delete=django.db.models.deletion.CASCADE, related_name='access', to='virtual_machine.virtualmachine')), + ], + options={ + 'verbose_name': 'virtual machine login', + 'verbose_name_plural': 'virtual machine logins', + }, + ), + migrations.AddField( + model_name='virtualmachine', + name='additional_gpu_type', + field=models.ForeignKey(blank=True, help_text='Additional GPU', limit_choices_to={'is_available': True}, null=True, on_delete=django.db.models.deletion.CASCADE, to='virtual_machine.virtualmachinegpu'), + ), + migrations.AddField( + model_name='virtualmachine', + name='additional_memory_type', + field=models.ForeignKey(blank=True, help_text='Additional memory', limit_choices_to={'is_available': True}, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='additional_memory', to='virtual_machine.virtualmachinememory'), + ), + migrations.AddField( + model_name='virtualmachine', + name='additional_storage_type', + field=models.ForeignKey(blank=True, help_text='Additional storage', limit_choices_to={'is_available': True}, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='additional_storage', to='virtual_machine.virtualmachinestorage'), + ), + migrations.AddField( + model_name='virtualmachine', + name='base_memory_type', + field=models.ForeignKey(help_text='Basic memory', limit_choices_to={'is_available': True}, on_delete=django.db.models.deletion.CASCADE, to='virtual_machine.virtualmachinememory'), + ), + migrations.AddField( + model_name='virtualmachine', + name='base_storage_type', + field=models.ForeignKey(help_text='Basic disk size', limit_choices_to={'is_available': True}, on_delete=django.db.models.deletion.CASCADE, to='virtual_machine.virtualmachinestorage'), + ), + migrations.AddField( + model_name='virtualmachine', + name='networks', + field=models.ManyToManyField(help_text='Networks connected to this virtual machine.', to='virtual_machine.VirtualMachineNetwork'), + ), + migrations.AddField( + model_name='virtualmachine', + name='operating_system', + field=models.ForeignKey(help_text='The operating system for this virtual machine.', limit_choices_to={'is_available': True}, on_delete=django.db.models.deletion.CASCADE, to='virtual_machine.virtualmachineoperatingsystem'), + ), + migrations.AddField( + model_name='virtualmachine', + name='profile', + field=models.ForeignKey(help_text='The virtual machine selected profile.', on_delete=django.db.models.deletion.CASCADE, to='virtual_machine.virtualmachineprofile'), + ), + migrations.AddField( + model_name='virtualmachine', + name='researcher', + field=models.ForeignKey(help_text='The researcher that own this virtual machine.', on_delete=django.db.models.deletion.CASCADE, to='researcher.researcher'), + ), + migrations.AddField( + model_name='virtualmachine', + name='study', + field=models.ForeignKey(help_text='The study for which this virtual machine is used.', on_delete=django.db.models.deletion.CASCADE, to='study.study'), + ), + migrations.AddConstraint( + model_name='virtualmachineaccess', + constraint=models.UniqueConstraint(fields=('researcher', 'virtual_machine'), name='researcher_server_login'), + ), + ] diff --git a/VRE/apps/virtual_machine/migrations/__init__.py b/VRE/apps/virtual_machine/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/VRE/apps/virtual_machine/models.py b/VRE/apps/virtual_machine/models.py new file mode 100644 index 0000000..34e6d3b --- /dev/null +++ b/VRE/apps/virtual_machine/models.py @@ -0,0 +1,348 @@ +from django.db import models +from django.utils.translation import gettext_lazy as _ + +from django_cryptography.fields import encrypt + +from lib.models.base import MetaDataModel + +from apps.researcher.models import Researcher +from apps.study.models import Study + +from math import pow + +# Create your models here. + +class VirtualNetworkType(models.TextChoices): + """ + """ + + PRIVATE = ('PRIVATE', _('Private')) + PUBLIC = ('PUBLIC', _('Public')) + +class VirtualMachinePart(models.Model): + """ + This is a base abstract class for multiple virtual machine models. This model provides default fields + + Attributes + ---------- + name : str + The name of the virtual machine part. This is a free field which will be used for showing to the enduser + + is_available : boolean + Is this virtual machine hardware part available for selecting by the enduser. When false, it is not available anymore to the enduser + + vm_code : str + The technical VM code for the virtualization platform. + """ + + name = models.CharField(max_length=50,help_text=_('Easy to remember name for this virtual machine part.')) + vm_code = models.CharField(max_length=50,help_text=_('Technical name for this virtual machine part.')) + is_available = models.BooleanField(help_text=_('Only selected virtual machine parts can be chosen from.')) + + class Meta: + abstract = True + + def __str__(self): + return self.name + +class VirtualMachineOperatingSystem(MetaDataModel,VirtualMachinePart): + """ + The virtual machine operating system model. This will hold the information of available operating systems that can be used for creating virtual machines + + It will inherit the attributes :attr:`~lib.models.base.MetaDataModel.created_at` and :attr:`~lib.models.base.MetaDataModel.updated_at` from the Abstract model :class:`~lib.models.base.MetaDataModel` + + It will inherit the attributes :attr:`~VirtualMachinePart.name`, :attr:`~VirtualMachinePart.is_available` and :attr:`~VirtualMachinePart.vm_code` from the Abstract model :class:`VirtualMachinePart` + """ + + class Meta: + verbose_name = _('virtual machine operating system') + verbose_name_plural = _('virtual machine operating systems') + +class VirtualMachineMemory(MetaDataModel,VirtualMachinePart): + """ + The virtual machine memory model. This will hold the information of available memories that can be used for creating virtual machines + + It will inherit the attributes :attr:`~lib.models.base.MetaDataModel.created_at` and :attr:`~lib.models.base.MetaDataModel.updated_at` from the Abstract model :class:`~lib.models.base.MetaDataModel` + + It will inherit the attributes :attr:`~VirtualMachinePart.name`, :attr:`~VirtualMachinePart.is_available` and :attr:`~VirtualMachinePart.vm_code` from the Abstract model :class:`VirtualMachinePart` + """ + + class Meta: + verbose_name = _('virtual machine memory') + verbose_name_plural = _('virtual machine memory') + + @property + def unit_value(self): + """This returns the base memory size which we use for calculations + + Returns: + float -- base memory size in bytes""" + return pow(1024,3) + +class VirtualMachineNetwork(MetaDataModel,VirtualMachinePart): + """ + + It will inherit the attributes :attr:`~lib.models.base.MetaDataModel.created_at` and :attr:`~lib.models.base.MetaDataModel.updated_at` from the Abstract model :class:`~lib.models.base.MetaDataModel` + + It will inherit the attributes :attr:`~VirtualMachinePart.name`, :attr:`~VirtualMachinePart.is_available` and :attr:`~VirtualMachinePart.vm_code` from the Abstract model :class:`VirtualMachinePart` + """ + + class Meta: + verbose_name = _('virtual machine network') + verbose_name_plural = _('virtual machine network') + + network_type = models.CharField(_('Network type'), max_length=10, choices=VirtualNetworkType.choices, help_text=_('Network type. Either pirvate or public')) + + @property + def unit_value(self): + """This returns the base storage size which we use for calculations + + Returns: + float -- Unit value in Gbps""" + return pow(1024,3) + +class VirtualMachineStorage(MetaDataModel,VirtualMachinePart): + """ + The virtual machine storage model. This will hold the information of available storages that can be used for creating virtual machines + + It will inherit the attributes :attr:`~lib.models.base.MetaDataModel.created_at` and :attr:`~lib.models.base.MetaDataModel.updated_at` from the Abstract model :class:`~lib.models.base.MetaDataModel` + + It will inherit the attributes :attr:`~VirtualMachinePart.name`, :attr:`~VirtualMachinePart.is_available` and :attr:`~VirtualMachinePart.vm_code` from the Abstract model :class:`VirtualMachinePart` + """ + + class Meta: + verbose_name = _('virtual machine storage') + verbose_name_plural = _('virtual machine storage') + + @property + def unit_value(self): + """This returns the base storage size which we use for calculations + + Returns: + float -- base memory size in bytes""" + return pow(1024,3) + +class VirtualMachineGPU(MetaDataModel,VirtualMachinePart): + """ + The virtual machine operating GPU model. This will hold the information of available GPUs that can be used for creating virtual machines + + It will inherit the attributes :attr:`~lib.models.base.MetaDataModel.created_at` and :attr:`~lib.models.base.MetaDataModel.updated_at` from the Abstract model :class:`~lib.models.base.MetaDataModel` + + It will inherit the attributes :attr:`~VirtualMachinePart.name`, :attr:`~VirtualMachinePart.is_available` and :attr:`~VirtualMachinePart.vm_code` from the Abstract model :class:`VirtualMachinePart` + """ + + class Meta: + verbose_name = _('virtual machine GPU') + verbose_name_plural = _('virtual machine GPUs') + +class VirtualMachineProfile(MetaDataModel,VirtualMachinePart): + """ + The virtual machine profile. This is a predefined setup which can be used for creating new virtual machines. + + It will inherit the attributes :attr:`~lib.models.base.MetaDataModel.created_at` and :attr:`~lib.models.base.MetaDataModel.updated_at` from the Abstract model :class:`~lib.models.base.MetaDataModel` + + It will inherit the attributes :attr:`~VirtualMachinePart.name`, :attr:`~VirtualMachinePart.is_available` and :attr:`~VirtualMachinePart.vm_code` from the Abstract model :class:`VirtualMachinePart` + + Attributes + ---------- + name : str + The name of the virtual machine profile. This name will be used for showing to the end user. + memory_type : VirtualMachineMemory + The memory type that is used for this profile + memory_amount : int + The amount of memory that is available for this profile using the selected :attr:`memory_type` + storage_type : VirtualMachineStorage + The storage type that is used for this profile + storage_amount : int + The amount of storage that is available for this profile using the selected :attr:`storage_type` + gpu_type : VirtualMachineGPU + The GPU type that is used for this profile + gpu_amount : int + The amount of GPUs that is available for this profile using the selected :attr:`gpu_type` + """ + + class Meta: + verbose_name = _('virtual machine profile') + verbose_name_plural = _('virtual machine profiles') + + name = models.CharField(max_length=50,help_text=_('Easy to remember name for this virtual machine profile.')) + + networks = models.ManyToManyField(VirtualMachineNetwork,help_text=_('Select the networks that should be connected.')) + + memory_type = models.ForeignKey(VirtualMachineMemory, on_delete=models.CASCADE, limit_choices_to={'is_available': True}, help_text=_('Basic memory')) + memory_amount = models.PositiveSmallIntegerField(default=1, help_text=_('Amount of memory. Default is 1')) + + storage_type = models.ForeignKey(VirtualMachineStorage, on_delete=models.CASCADE, limit_choices_to={'is_available': True}, help_text=_('Basic disk size')) + storage_amount = models.PositiveSmallIntegerField(default=1, help_text=_('Amount of disk storage. Default is 1')) + + gpu_type = models.ForeignKey(VirtualMachineGPU, blank=True, null=True, on_delete=models.CASCADE, limit_choices_to={'is_available': True}, help_text=_('Basic GPU')) + gpu_amount = models.PositiveSmallIntegerField(default=0, help_text=_('Amount of GPUs. Default is 0')) + + def __str__(self): + return self.name + + @property + def description(self): + """ + The description of this profile in the format '[:attr:`memory_amount`] x [:attr:`memory_type`] memory, [:attr:`storage_amount`] x [:attr:`storage_type`] storage'. + + Returns: + str -- small summary of the selected options""" + + return f'{self.memory_amount} x {self.memory_type.name} memory, {self.storage_amount} x {self.storage_type.name} storage' + + @property + def total_memory(self): + """ + The total amount of memory in bytes for this profile + + Returns: + float -- total memory size in bytes + """ + return self.memory_amount * self.memory_type.unit_value + + @property + def total_storage(self): + """ + The total amount of storage in bytes for this profile + + Returns: + float -- total storage size in bytes + """ + return self.storage_amount * self.storage_type.unit_value + +class VirtualMachine(MetaDataModel): + """ + A model which holds a complete virtual machine setup. A virtual machine is linked to a researcher. + + It will inherit the attributes :attr:`~lib.models.base.MetaDataModel.created_at` and :attr:`~lib.models.base.MetaDataModel.updated_at` from the Abstract model :class:`~lib.models.base.MetaDataModel` + + Attributes + ---------- + name : str + The name of the virtual machine. This name will be used for showing to the end user. + researcher : Researcher + The researcher that owns this virtual machine. He/she will can login to this virtual machine. + profile : VirtualMachineProfile + The virtual machine profile that is selected when created. + operating_system : VirtualMachineOperatingSystem + The operating machine that is being used for this virtual machine + + base_memory_type : VirtualMachineMemory + The memory type that is used for this virtual machine + base_memory_amount : int + The amount of memory that is available for this virtual machine using the selected :attr:`base_memory_type` + base_storage_type : VirtualMachineStorage + The storage type that is used for this virtual machine + base_storage_amount : int + The amount of storage that is available for this virtual machine using the selected :attr:`storage_type` + + additional_gpu_type : VirtualMachineGPU, optional + The GPU type that is used for this virtual machine + additional_gpu_amount : int, optional + The amount of GPUs that is available for this virtual machine using the selected :attr:`additional_gpu_type` + additional_memory_type : VirtualMachineMemory, optional + The additional memory type that is used for this virtual machine + additional_memory_amount : int, optional + The amount of additional memory that is available for this virtual machine using the selected :attr:`additional_memory_type` + additional_storage_type : VirtualMachineStorage, optional + The additional storage type that is used for this virtual machine + additional_storage_amount : int, optional + The amount of additional storage that is available for this virtual machine using the selected :attr:`additional_storage_type` + """ + + class Meta: + verbose_name = _('virtual machine') + verbose_name_plural = _('virtual machines') + + remote_id = models.CharField(max_length=50, blank=True, null=True, help_text=_('The remote ID of this virtual machine on the cloud platform')) + + name = models.CharField(max_length=50,help_text=_('Easy to remember name for this virtual machine.')) + + researcher = models.ForeignKey(Researcher, on_delete=models.CASCADE, help_text=_('The researcher that own this virtual machine.')) + + study = models.ForeignKey(Study, on_delete=models.CASCADE, help_text=_('The study for which this virtual machine is used.')) + + profile = models.ForeignKey(VirtualMachineProfile, on_delete=models.CASCADE, help_text=_('The virtual machine selected profile.')) + + networks = models.ManyToManyField(VirtualMachineNetwork,help_text=_('Networks connected to this virtual machine.')) + + operating_system = models.ForeignKey(VirtualMachineOperatingSystem, on_delete=models.CASCADE, limit_choices_to={'is_available': True}, help_text=_('The operating system for this virtual machine.')) + + base_memory_type = models.ForeignKey(VirtualMachineMemory, on_delete=models.CASCADE, limit_choices_to={'is_available': True}, help_text=_('Basic memory')) + base_memory_amount = models.PositiveSmallIntegerField(default=1, help_text=_('Amount of memory. Default is 1')) + + base_storage_type = models.ForeignKey(VirtualMachineStorage, on_delete=models.CASCADE, limit_choices_to={'is_available': True}, help_text=_('Basic disk size')) + base_storage_amount = models.PositiveSmallIntegerField(default=1, help_text=_('Amount of disk storage. Default is 1')) + + additional_gpu_type = models.ForeignKey(VirtualMachineGPU, blank=True, null=True, on_delete=models.CASCADE, limit_choices_to={'is_available': True}, help_text=_('Additional GPU')) + additional_gpu_amount = models.PositiveSmallIntegerField(default=0, help_text=_('Amount of GPUs. Default is 0')) + + additional_memory_type = models.ForeignKey(VirtualMachineMemory, blank=True, null=True, related_name='additional_memory', on_delete=models.CASCADE, limit_choices_to={'is_available': True}, help_text=_('Additional memory')) + additional_memory_amount = models.PositiveSmallIntegerField(default=0, help_text=_('Amount of memory. Default is 0')) + + additional_storage_type = models.ForeignKey(VirtualMachineStorage, blank=True, null=True, related_name='additional_storage', on_delete=models.CASCADE, limit_choices_to={'is_available': True}, help_text=_('Additional storage')) + additional_storage_amount = models.PositiveSmallIntegerField(default=0, help_text=_('Amount of storage. Default is 0')) + + def __str__(self): + return self.name + + @property + def total_memory(self): + """ + The total amount of memory in bytes for this virtual machine. This is the total memory of base memory + additional memory + + Returns: + float -- total memory size in bytes + """ + memory = 0 + if self.base_memory_type is not None: + memory = self.base_memory_amount * self.base_memory_type.unit_value + + if self.additional_memory_type is not None: + memory += self.additional_memory_amount * self.additional_memory_type.unit_value + + return memory + + @property + def total_storage(self): + """ + The total amount of storage in bytes for this virtual machine. This is the total amount of base storage + additional storage + + Returns: + float -- total storage size in bytes + """ + storage = 0 + if self.base_storage_type is not None: + storage = self.base_storage_amount * self.base_storage_type.unit_value + + if self.additional_storage_type is not None: + storage += self.additional_storage_amount * self.additional_storage_type.unit_value + + return storage + +class VirtualMachineAccess(MetaDataModel): + + class Meta: + verbose_name = _('virtual machine login') + verbose_name_plural = _('virtual machine logins') + + constraints = [ + models.UniqueConstraint(fields=['researcher', 'virtual_machine'], name='researcher_server_login') + ] + + researcher = models.ForeignKey(Researcher, on_delete=models.CASCADE, help_text=_('The researcher for which this login is valid for.')) + virtual_machine = models.ForeignKey(VirtualMachine, related_name='access', on_delete=models.CASCADE, help_text=_('The virtual machine to login to.')) + + # For 2Factor we use both password and ssh key + login_key = models.TextField(_('Login key'), max_length=2048, help_text=_('The private key to login to the virtual machine.')) + password = encrypt(models.CharField(_('Password'), max_length=46, help_text=_('The SSH password to login.'))) + virtual_machine_ip = models.CharField(_('Login IP'), max_length=46, help_text=_('The IP address to login to the virtual machine.')) + + @property + def user(self): + return self.researcher.user.username + + def __str__(self): + return f'Access login for {self.researcher} to machine {self.virtual_machine}' + \ No newline at end of file diff --git a/VRE/apps/virtual_machine/serializers.py b/VRE/apps/virtual_machine/serializers.py new file mode 100644 index 0000000..d1853d6 --- /dev/null +++ b/VRE/apps/virtual_machine/serializers.py @@ -0,0 +1,129 @@ +from rest_framework import serializers + +from apps.virtual_machine.models import (VirtualMachine, + VirtualMachineOperatingSystem, + VirtualMachineProfile, + VirtualMachineMemory, + VirtualMachineNetwork, + VirtualMachineStorage, + VirtualMachineGPU, + VirtualMachineAccess) + +from lib.api.base import BaseHyperlinkedModelSerializer + +class VirtualMachineOperatingSystemSerializer(BaseHyperlinkedModelSerializer): + class Meta: + model = VirtualMachineOperatingSystem + exclude = ['is_available','vm_code'] + extra_kwargs = { + 'url': {'view_name': 'api:v1:virtualmachineoperatingsystem-detail'} + } + +class VirtualMachineMemorySerializer(BaseHyperlinkedModelSerializer): + unit_value = serializers.ReadOnlyField() + + class Meta: + model = VirtualMachineMemory + exclude = ['is_available','vm_code'] + extra_kwargs = { + 'url': {'view_name': 'api:v1:virtualmachinememory-detail'} + } + +class VirtualMachineNetworkSerializer(BaseHyperlinkedModelSerializer): + unit_value = serializers.ReadOnlyField() + + class Meta: + model = VirtualMachineNetwork + exclude = ['is_available','vm_code'] + extra_kwargs = { + 'url': {'view_name': 'api:v1:virtualmachinenetwork-detail'} + } + +class VirtualMachineStorageSerializer(BaseHyperlinkedModelSerializer): + unit_value = serializers.ReadOnlyField() + + class Meta: + model = VirtualMachineStorage + exclude = ['is_available','vm_code'] + extra_kwargs = { + 'url': {'view_name': 'api:v1:virtualmachinestorage-detail'} + } + +class VirtualMachineGPUSerializer(BaseHyperlinkedModelSerializer): + class Meta: + model = VirtualMachineGPU + exclude = ['is_available','vm_code'] + extra_kwargs = { + 'url': {'view_name': 'api:v1:virtualmachinegpu-detail'} + } + +class VirtualMachineProfileSerializer(BaseHyperlinkedModelSerializer): + description = serializers.ReadOnlyField() + + memory_type = VirtualMachineMemorySerializer() + networks = VirtualMachineNetworkSerializer(many=True) + storage_type = VirtualMachineStorageSerializer() + gpu_type = VirtualMachineGPUSerializer() + + class Meta: + model = VirtualMachineProfile + exclude = ['is_available','vm_code'] + extra_kwargs = { + 'url': {'view_name': 'api:v1:virtualmachineprofile-detail'}, + } + +class VirtualMachineAccessSerializer(BaseHyperlinkedModelSerializer): + user = serializers.ReadOnlyField() + + class Meta: + model = VirtualMachineAccess + fields = '__all__' + extra_kwargs = { + 'url': {'view_name': 'api:v1:virtualmachineaccess-detail'}, + 'virtual_machine' : {'view_name': 'api:v1:virtualmachine-detail'}, + } + +class VirtualMachineSerializer(BaseHyperlinkedModelSerializer): + remote_id = serializers.ReadOnlyField() + profile_name = serializers.ReadOnlyField(source='profile.name') + operating_system_name = serializers.ReadOnlyField(source='operating_system.name') + + total_memory = serializers.ReadOnlyField() + total_storage = serializers.ReadOnlyField() + + # TODO: For every virtual machine, we get all the access accounts. So maybe we need to filter here for only the active logged in user. + access = VirtualMachineAccessSerializer( + many=True, + read_only=True, + ) + + class Meta: + model = VirtualMachine + fields = '__all__' + # Extra kwargs are needed here due to namespace usage. + extra_kwargs = { + 'url' : {'view_name': 'api:v1:virtualmachine-detail'}, + + 'profile' : {'view_name': 'api:v1:virtualmachineprofile-detail'}, + + 'study' : {'view_name': 'api:v1:study-detail'}, + + 'networks' : {'view_name': 'api:v1:virtualmachinenetwork-detail'}, + + 'operating_system' : {'view_name': 'api:v1:virtualmachineoperatingsystem-detail'}, + 'base_memory_type' : {'view_name': 'api:v1:virtualmachinememory-detail'}, + 'base_storage_type' : {'view_name': 'api:v1:virtualmachinestorage-detail'}, + + 'additional_memory_type' : {'view_name': 'api:v1:virtualmachinememory-detail'}, + 'additional_storage_type' : {'view_name': 'api:v1:virtualmachinestorage-detail'}, + 'additional_gpu_type' : {'view_name': 'api:v1:virtualmachinegpu-detail'}, + } + + + def create(self, validated_data): + networks = validated_data.pop('networks') + virtual_machine = VirtualMachine.objects.create(**validated_data) + for network in networks: + virtual_machine.networks.add(network) + + return virtual_machine diff --git a/VRE/apps/virtual_machine/signals.py b/VRE/apps/virtual_machine/signals.py new file mode 100644 index 0000000..ed6b792 --- /dev/null +++ b/VRE/apps/virtual_machine/signals.py @@ -0,0 +1,36 @@ +from django.db.models.signals import post_save, post_delete +from django.dispatch import receiver + +from .models import VirtualMachine, VirtualNetworkType, VirtualMachineAccess + +from .tasks import create_virtual_machine_task, delete_virtual_machine_task + +@receiver(post_save, sender=VirtualMachine) +def create_virtual_machine(sender, instance, created, **kwargs): + """ + | A signal that is fired when a new virtual machine is created in the DB. This will trigger the action to create a VPS on a cloud platform + + Arguments + ---------- + sender : sender + The model that has triggered the signal + + instance: :attr:`VirtualMachine` + The newly created virtual machine object + + created : boolean + Wether the object was created or updated. When true it is newly created + """ + + if instance.remote_id: + # We have already a connection with the cloud instance... For now we do nothing. + return + + create_virtual_machine_task(instance.pk) + +@receiver(post_delete, sender=VirtualMachine) +def terminate_virtual_machine(sender, instance, **kwargs): + if instance.remote_id: + # If we have a remote_id in the instance, then we need to delete the VPS from the cloud provider + delete_virtual_machine_task(instance.remote_id) + \ No newline at end of file diff --git a/VRE/apps/virtual_machine/tasks.py b/VRE/apps/virtual_machine/tasks.py new file mode 100644 index 0000000..73e3f51 --- /dev/null +++ b/VRE/apps/virtual_machine/tasks.py @@ -0,0 +1,107 @@ +from huey.contrib.djhuey import periodic_task, task + +from lib.cloud.openstack_client import VRE_OpenStackClient + +from Crypto.PublicKey import RSA +import crypt +import string +import random + +from .models import VirtualMachine, VirtualNetworkType, VirtualMachineAccess + +# Create the virtual machine in the background with a 30 sec delay, so that the model is fully saved (M2M) +@task(delay=30) +def create_virtual_machine_task(virtual_machine_id): + virtual_machine = VirtualMachine.objects.get(id=virtual_machine_id) + + print(f'Create VPS for: {virtual_machine}') + + # Create a SHA512 password for the password file. + ssh_password = "".join(random.sample(string.ascii_lowercase+string.ascii_uppercase+string.digits+string.punctuation,16)) + hashed_password = crypt.crypt(ssh_password, crypt.mksalt(crypt.METHOD_SHA512)) + + key = RSA.generate(4096) + private_key = key.export_key().decode('utf8') + public_key = key.publickey().export_key(format='OpenSSH').decode('utf8') + + user_data = f'''#cloud-config +users: + - default + - name: {virtual_machine.researcher.user.username} + gecos: {virtual_machine.researcher.user.get_full_name()} + groups: davfs2 + shell: /bin/bash + lock_passwd: false + passwd: {hashed_password} + ssh_authorized_keys: + - {public_key} + +ssh_pwauth: True + +package_update: true +#package_upgrade: true +packages: + - davfs2 + +groups: + - davfs2: [root,ubuntu] + +# We add 2FA to the SSH by using a private key AND password +runcmd: + - echo 'AuthenticationMethods publickey,password' >> /etc/ssh/sshd_config + - service ssh restart + - echo "{virtual_machine.study.storagelocation.storageengine.location}{virtual_machine.study.storagelocation.path} {virtual_machine.study.storagelocation.storageengine.username} {virtual_machine.study.storagelocation.storageengine.password}" >> /etc/davfs2/secrets + - echo "{virtual_machine.study.storagelocation.storageengine.location}{virtual_machine.study.storagelocation.path} /opt/research_data davfs user,rw,auto,uid={virtual_machine.researcher.user.username},gid={virtual_machine.researcher.user.username} 0 0" >> /etc/fstab + - [mkdir, /opt/research_data] + - [mount, /opt/research_data]''' + + # Openstack values: + name = f'VRE_{virtual_machine.study.name}_({virtual_machine.researcher.user.get_full_name()})' + flavour = virtual_machine.profile.vm_code + image = virtual_machine.operating_system.vm_code + networks = { + 'private' : [], + 'float' : [] + } + # Only add private networks now, the public network should be added as a floating IP + + for network in virtual_machine.networks.all(): + if network.network_type == VirtualNetworkType.PRIVATE: + networks['private'].append(network.vm_code) + elif network.network_type == VirtualNetworkType.PUBLIC: + networks['float'].append(network.vm_code) + + + opts = { + 'memory' : virtual_machine.total_memory, + 'storage' : virtual_machine.total_storage, + 'user_data' : user_data + } + + print('Connecting to Openstack') + client = VRE_OpenStackClient('hpc') + print(f'Createing a new VPS... please wait...') + result = client.create_vps(name, image, flavour, networks, opts) + print(result) + + virtual_machine.remote_id = result['id'] + virtual_machine.save() + + # TODO: figure this out.... why update_or_create does not work... For now, we work around it. + access, created = VirtualMachineAccess.objects.get_or_create( + researcher = virtual_machine.researcher, + virtual_machine = virtual_machine + ) + + access.virtual_machine_ip = result['ipv4'] + access.login_key = private_key + access.password = ssh_password + access.save() + + print(f'Created VPS {virtual_machine} -> {virtual_machine.remote_id}') + +@task(delay=30) +def delete_virtual_machine_task(virtual_machine_id): + client = VRE_OpenStackClient('hpc') + client.remove_vps(virtual_machine_id) + print(f'Deleted VPS with ID {virtual_machine_id}') \ No newline at end of file diff --git a/VRE/apps/virtual_machine/tests.py b/VRE/apps/virtual_machine/tests.py new file mode 100644 index 0000000..7ce503c --- /dev/null +++ b/VRE/apps/virtual_machine/tests.py @@ -0,0 +1,3 @@ +from django.test import TestCase + +# Create your tests here. diff --git a/VRE/apps/virtual_machine/views.py b/VRE/apps/virtual_machine/views.py new file mode 100644 index 0000000..b77f850 --- /dev/null +++ b/VRE/apps/virtual_machine/views.py @@ -0,0 +1,115 @@ +from rest_framework import viewsets +from rest_framework import permissions + +from .serializers import (VirtualMachineSerializer, + VirtualMachineOperatingSystemSerializer, + VirtualMachineProfileSerializer, + VirtualMachineMemorySerializer, + VirtualMachineNetworkSerializer, + VirtualMachineStorageSerializer, + VirtualMachineGPUSerializer, + VirtualMachineAccessSerializer) + +from apps.virtual_machine.models import (VirtualMachine, + VirtualMachineOperatingSystem, + VirtualMachineProfile, + VirtualMachineMemory, + VirtualMachineNetwork, + VirtualMachineStorage, + VirtualMachineGPU, + VirtualMachineAccess) + +from lib.api.base import BaseViewSet + +class VirtualMachineViewSet(BaseViewSet): + """ + API endpoint for creating/reading/updating/deleting virtual machines. + """ + queryset = VirtualMachine.objects.all().order_by('name') + serializer_class = VirtualMachineSerializer + + def perform_create(self, serializer): + serializer.save(researcher = self.request.user.researcher) + + def get_queryset(self): + """ + This view should return a list of all the virtual machines for the currently authenticated user ordered by name. + """ + if getattr(self, 'swagger_fake_view', False): + return self.queryset + + return self.queryset.filter(researcher=self.request.user.researcher).select_related('researcher') \ + .select_related('profile') \ + .select_related('operating_system') \ + .select_related('base_memory_type') \ + .select_related('base_storage_type') \ + .select_related('additional_gpu_type') \ + .select_related('additional_memory_type') \ + .select_related('additional_storage_type') + +class VirtualMachineProfileViewSet(viewsets.ReadOnlyModelViewSet): + """ + API endpoint for listing virtual machines available profiles. This is a readonly endpoint. + """ + queryset = VirtualMachineProfile.objects.filter(is_available=True).order_by('name') + serializer_class = VirtualMachineProfileSerializer + permission_classes = [permissions.IsAuthenticated] + + def get_queryset(self): + """ + Overrule default queryset by selecting related tables at once ordered by name. + """ + return self.queryset.select_related('memory_type').select_related('storage_type').select_related('gpu_type') + +class VirtualMachineOperatingSystemViewSet(viewsets.ReadOnlyModelViewSet): + """ + API endpoint for listing virtual machines available operating systems. This is a readonly endpoint. + """ + queryset = VirtualMachineOperatingSystem.objects.filter(is_available=True).order_by('name') + serializer_class = VirtualMachineOperatingSystemSerializer + permission_classes = [permissions.IsAuthenticated] + +class VirtualMachineMemoryViewSet(viewsets.ReadOnlyModelViewSet): + """ + API endpoint for listing virtual machines available memory types. This is a readonly endpoint. + """ + queryset = VirtualMachineMemory.objects.filter(is_available=True).order_by('name') + serializer_class = VirtualMachineMemorySerializer + permission_classes = [permissions.IsAuthenticated] + +class VirtualMachineNetworkViewSet(viewsets.ReadOnlyModelViewSet): + """ + API endpoint for listing virtual machines available network types. This is a readonly endpoint. + """ + queryset = VirtualMachineNetwork.objects.filter(is_available=True).order_by('name') + serializer_class = VirtualMachineNetworkSerializer + permission_classes = [permissions.IsAuthenticated] + +class VirtualMachineStorageViewSet(viewsets.ReadOnlyModelViewSet): + """ + API endpoint for listing virtual machines available storage types. This is a readonly endpoint. + """ + queryset = VirtualMachineStorage.objects.filter(is_available=True).order_by('name') + serializer_class = VirtualMachineStorageSerializer + permission_classes = [permissions.IsAuthenticated] + +class VirtualMachineGPUViewSet(viewsets.ReadOnlyModelViewSet): + """ + API endpoint for listing virtual machines available GPU types. This is a readonly endpoint. + """ + queryset = VirtualMachineGPU.objects.filter(is_available=True).order_by('name') + serializer_class = VirtualMachineGPUSerializer + permission_classes = [permissions.IsAuthenticated] + +class VirtualMachineAccessViewSet(viewsets.ReadOnlyModelViewSet): + """ + API endpoint for listing virtual machines logins. This is a readonly endpoint. + """ + queryset = VirtualMachineAccess.objects + serializer_class = VirtualMachineAccessSerializer + permission_classes = [permissions.IsAuthenticated] + + def get_queryset(self): + if getattr(self, 'swagger_fake_view', False): + return self.queryset + return self.queryset.filter(researcher=self.request.user.researcher) \ No newline at end of file diff --git a/VRE/locale/en/LC_MESSAGES/django.po b/VRE/locale/en/LC_MESSAGES/django.po new file mode 100644 index 0000000..f435645 --- /dev/null +++ b/VRE/locale/en/LC_MESSAGES/django.po @@ -0,0 +1,35 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# FIRST AUTHOR , YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2021-02-19 11:19+0100\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME \n" +"Language-Team: LANGUAGE \n" +"Language: \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +#: lib/models/base.py:16 +msgid "Date created" +msgstr "" + +#: lib/models/base.py:16 +msgid "The date and time this model has been created" +msgstr "" + +#: lib/models/base.py:17 +msgid "Date updated" +msgstr "" + +#: lib/models/base.py:17 +msgid "The date and time this model has been updated" +msgstr "" diff --git a/VRE/locale/nl/LC_MESSAGES/django.po b/VRE/locale/nl/LC_MESSAGES/django.po new file mode 100644 index 0000000..8a92e9e --- /dev/null +++ b/VRE/locale/nl/LC_MESSAGES/django.po @@ -0,0 +1,124 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# FIRST AUTHOR , YEAR. +# +msgid "" +msgstr "" +"Project-Id-Version: \n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2021-02-19 11:19+0100\n" +"PO-Revision-Date: 2020-05-27 16:05+0200\n" +"Last-Translator: Joshua Rubingh \n" +"Language-Team: \n" +"Language: nl\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" +"X-Generator: Poedit 2.0.6\n" + +#: lib/models/base.py:16 +msgid "Date created" +msgstr "Datum gecreeërd" + +#: lib/models/base.py:16 +msgid "The date and time this model has been created" +msgstr "De datum en tijd waarop dit model is gemaakt" + +#: lib/models/base.py:17 +msgid "Date updated" +msgstr "Datum bijgewerkt" + +#: lib/models/base.py:17 +msgid "The date and time this model has been updated" +msgstr "De datum en tijd waarop dit model is bijgewerkt" + +#, fuzzy +#~| msgid "Date created" +#~ msgid "Date mailed" +#~ msgstr "Datum gecreeërd" + +#, fuzzy +#~| msgid "Storages" +#~ msgid "Storage" +#~ msgstr "Opslagplekken" + +#~ msgid "Storages" +#~ msgstr "Opslagplekken" + +#, fuzzy +#~| msgid "Storages" +#~ msgid "storage engines" +#~ msgstr "Opslagplekken" + +#, fuzzy +#~| msgid "Studies" +#~ msgid "Study" +#~ msgstr "Onderzoeken" + +#~ msgid "Studies" +#~ msgstr "Onderzoeken" + +#, fuzzy +#~| msgid "Studies" +#~ msgid "studie" +#~ msgstr "Onderzoeken" + +#, fuzzy +#~| msgid "Studies" +#~ msgid "studies" +#~ msgstr "Onderzoeken" + +#~ msgid "Dutch" +#~ msgstr "Nederlands" + +#~ msgid "English" +#~ msgstr "Engels" + +#~ msgid "Virtual Research Environment" +#~ msgstr "Virtuele onderzoeksomgeving" + +#~ msgid "Secure data drops to RUG Phd students" +#~ msgstr "Veilige data delingen voor RUG-promovendi" + +#~ msgid "Here you can securely upload files for Phd students and researchers." +#~ msgstr "" +#~ "Hier kunt u veilig bestanden uploaden voor promovendi en onderzoekers." + +#~ msgid "The following actions can be done:" +#~ msgstr "De volgende acties zijn mogelijk:" + +#~ msgid "Study overview for grouping uploads for a single research study" +#~ msgstr "" +#~ "Onderzoeksoverzicht voor het groeperen van uploads voor één onderzoek" + +#~ msgid "Create a new study" +#~ msgstr "Maak een onderzoek" + +#~ msgid "Get a list of all the uploaded files for your studies" +#~ msgstr "Krijg een lijst met alle geüploade bestanden voor je onderzoeken" + +#~ msgid "Logout" +#~ msgstr "Uitloggen" + +#~ msgid "VRE" +#~ msgstr "VRE" + +#~ msgid "Login" +#~ msgstr "Log in" + +#~ msgid "New" +#~ msgstr "Nieuw" + +#~ msgid "Virtual machines" +#~ msgstr "Virtuele machines" + +#~ msgid "Activity" +#~ msgstr "Activiteit" + +#~ msgid "Signup" +#~ msgstr "Aanmelden" + +#~ msgid "Please contact x@y.z" +#~ msgstr "Neem contact op met x@y.z" diff --git a/VRE/log b/VRE/log new file mode 120000 index 0000000..3355f40 --- /dev/null +++ b/VRE/log @@ -0,0 +1 @@ +../log/ \ No newline at end of file diff --git a/VRE/logging.ini.disabled b/VRE/logging.ini.disabled new file mode 100644 index 0000000..d7b0e2c --- /dev/null +++ b/VRE/logging.ini.disabled @@ -0,0 +1,46 @@ +# In order to change the logging configuration, make a copy of this file and save it as 'logging.custom.ini' +# Then the logger will update the default logging values with your custom logging settings + +[loggers] +keys=root + +[handlers] +keys=consoleHandler,fileHandler,fileHandlerDebug,syslogHandler + +[formatters] +keys=simpleFormatter + +[logger_root] +level=DEBUG +# Add fileHandlerDebug to handlers to enable debug logging +# Add syslogHandler to handlers to enable syslog logging. +handlers=consoleHandler,fileHandler,fileHandlerDebug + +[handler_consoleHandler] +class=StreamHandler +level=WARNING +formatter=simpleFormatter +args=(sys.stdout,) + +[handler_fileHandler] +class=logging.handlers.TimedRotatingFileHandler +level=INFO +formatter=simpleFormatter +args=('log/django.log','midnight',1,30) + +[handler_fileHandlerDebug] +class=logging.handlers.TimedRotatingFileHandler +level=DEBUG +formatter=simpleFormatter +args=('log/django.debug.log','midnight',1,30) + +[handler_syslogHandler] +class=logging.handlers.SysLogHandler +level=INFO +formatter=simpleFormatter +# Use '/dev/log' for local syslog. For remote use ('hostip',portnr) +args=('/dev/log','local6') + +[formatter_simpleFormatter] +format=%(asctime)s - %(levelname)-7s - %(module)-12s - %(message)s +datefmt= diff --git a/VRE/manage.py b/VRE/manage.py new file mode 100755 index 0000000..0018756 --- /dev/null +++ b/VRE/manage.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python +"""Django's command-line utility for administrative tasks.""" +import os +import sys + + +def main(): + os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'VRE.settings') + try: + from django.core.management import execute_from_command_line + except ImportError as exc: + raise ImportError( + "Couldn't import Django. Are you sure it's installed and " + "available on your PYTHONPATH environment variable? Did you " + "forget to activate a virtual environment?" + ) from exc + execute_from_command_line(sys.argv) + + +if __name__ == '__main__': + main() diff --git a/VRE/requirements.txt b/VRE/requirements.txt new file mode 100644 index 0000000..543e590 --- /dev/null +++ b/VRE/requirements.txt @@ -0,0 +1,24 @@ +wheel==0.36.2 +Django==3.1.7 +dj-database-url==0.5.0 +django-cryptography==1.0 +djangorestframework==3.12.2 +drf_yasg==1.20.0 +python-decouple==3.4 +hawkrest==1.0.1 +requests-hawk==1.1.0 +requests==2.25.1 +openstacksdk==0.53.0 +pycryptodome==3.10.1 +huey==2.3.0 +redis==3.5.3 +djoser==2.1.0 +diskcache==5.2.1 +psycopg2-binary==2.8.6 +gunicorn==20.0.4 +webdavclient3==3.14.5 +giteapy==1.0.8 +requests==2.25.1 +requests-hawk==1.1.0 +PyGithub==1.54.1 +python-irodsclient==0.8.6 \ No newline at end of file diff --git a/VRE/static/images/.gitignore b/VRE/static/images/.gitignore new file mode 100644 index 0000000..e69de29 diff --git a/VRE/static/images/RUG_Logo.jpg b/VRE/static/images/RUG_Logo.jpg new file mode 100644 index 0000000..da3037b Binary files /dev/null and b/VRE/static/images/RUG_Logo.jpg differ diff --git a/VRE/static/javascript/javascript.js b/VRE/static/javascript/javascript.js new file mode 100644 index 0000000..c3aadf9 --- /dev/null +++ b/VRE/static/javascript/javascript.js @@ -0,0 +1 @@ +/* Dummy javascript file */ \ No newline at end of file diff --git a/VRE/static/style/style.css b/VRE/static/style/style.css new file mode 100644 index 0000000..236c4b8 --- /dev/null +++ b/VRE/static/style/style.css @@ -0,0 +1 @@ +/* Dummy style sheet */ \ No newline at end of file diff --git a/VRE/storage/__init__.py b/VRE/storage/__init__.py new file mode 100644 index 0000000..77535cd --- /dev/null +++ b/VRE/storage/__init__.py @@ -0,0 +1,8 @@ +import os +import logging +import logging.config + +if os.path.isfile('logging.custom.ini'): + logging.config.fileConfig('logging.custom.ini') +elif os.path.isfile('logging.ini'): + logging.config.fileConfig('logging.ini') \ No newline at end of file diff --git a/VRE/storage/engines/fs.py b/VRE/storage/engines/fs.py new file mode 100644 index 0000000..0dbb709 --- /dev/null +++ b/VRE/storage/engines/fs.py @@ -0,0 +1,29 @@ +import logging +logger = logging.getLogger(__name__) + +from storage.storage import BaseStorage + +import os +import shutil + +class LocalStorage(BaseStorage): + + TYPE = 'fs' + + def file_exists(self, filepath): + return os.path.exists(filepath) and os.path.isfile(filepath) + + def directory_exists(self, filepath): + return os.path.exists(filepath) and os.path.isdir(filepath) + + def _make_folder_action(self, path): + os.makedirs(path) + return True + + def _upload_file_action(self, source, destination): + shutil.copy(source, destination) + return True + + def _download_file_action(self, source, destination): + shutil.copy(source, destination) + return True diff --git a/VRE/storage/engines/gitea.py b/VRE/storage/engines/gitea.py new file mode 100644 index 0000000..631268e --- /dev/null +++ b/VRE/storage/engines/gitea.py @@ -0,0 +1,88 @@ +import logging +logger = logging.getLogger(__name__) + +from storage.storage import BaseStorage + +# Gitea Support - https://pypi.org/project/giteapy/ +import base64 +import giteapy +from giteapy.rest import ApiException + +class GiteaStorage(BaseStorage): + + TYPE = 'gitea' + + def __init__(self, url = None, username = None, password = None, source = None, destination = None, encryption_key = None, sender_name = None, sender_email = None): + # The repository is added to the url parameter. Use a '#' as seperator. The repository needs to be created first. + # Ex: https://git.web.rug.nl/api/v1#RepositoryName + (url, self.repository) = url.split('#') + destination = destination.strip('/') + + super().__init__(url, username, password, source, destination, encryption_key, sender_name, sender_email) + + # Create a commiter object when the data is uploaded through one of the invited accounts. + self.committer = None + if sender_name is not None or sender_email is not None: + self.committer = giteapy.Identity(name = sender_name,email = sender_email) + + def __connect(self): + try: + assert(self.client) + except AttributeError: + # Configuration for the GITEA connection + configuration = giteapy.Configuration() + # Overrule the host url....? + configuration.host = self.url + #configuration.debug = False + configuration.api_key['access_token'] = self.password + + # Create the client + self.client = giteapy.RepositoryApi(giteapy.ApiClient(configuration)) + logger.info(f'Created Gitea connection to url: {self.url}') + + def file_exists(self, filepath): + self.__connect() + try: + self.client.repo_get_contents(self.username, self.repository, filepath) + return True + except ApiException: + return False + + def directory_exists(self, filepath): + self.__connect() + return self.file_exists(filepath) + + def _make_folder_action(self, path): + # On GitHub you cannot create empty directories. So this actions will always succeed + return True + + def _upload_file_action(self, source, destination): + self.__connect() + try: + with open(source,'rb') as datafile: + # This is a very big issue. Big files will be stored completely in memory :( + body = giteapy.CreateFileOptions(content = base64.b64encode(datafile.read()).decode(), + message = f'Upload from VRE DataDropOff\n Added file: {destination}', + committer = self.committer) + except Exception: + return False + + try: + # Create a file in a repository + api_response = self.client.repo_create_file(self.username, self.repository, destination, body) + return True + except ApiException as ex: + logger.exception(f'Exception when calling RepositoryApi->repo_create_file: {ex}') + + return True + + def _download_file_action(self, source, destination): + self.__connect() + with open(destination,'wb') as destination_file: + try: + data = self.client.repo_get_contents(self.username, self.repository, source) + destination_file.write(base64.b64decode(data.content)) + except ApiException as ex: + logger.exception(f'Exception when calling RepositoryApi->repo_get_contents: {ex}') + + return True \ No newline at end of file diff --git a/VRE/storage/engines/github.py b/VRE/storage/engines/github.py new file mode 100644 index 0000000..d48b94c --- /dev/null +++ b/VRE/storage/engines/github.py @@ -0,0 +1,66 @@ +import logging +logger = logging.getLogger(__name__) + +import os + +from storage.storage import BaseStorage + +# Github Support - https://pypi.org/project/PyGithub/ +from github import Github, InputGitAuthor, GithubObject +from github.GithubException import UnknownObjectException + +class GithubStorage(BaseStorage): + + TYPE = 'github' + + def __init__(self, url = None, username = None, password = None, source = None, destination = None, encryption_key = None, sender_name = None, sender_email = None): + # The repository is added to the url parameter. Use a '#' as seperator. The repository needs to be created first. + # Ex: https://api.github.com/#RepositoryName + (url, self.repository) = url.split('#') + destination = destination.strip('/') + + super().__init__(url, username, password, source, destination, encryption_key, sender_name, sender_email) + + # Create a commiter object when the data is uploaded through one of the invited accounts. + self.committer = GithubObject.NotSet + if sender_name is not None or sender_email is not None: + self.committer = InputGitAuthor(name=sender_name, email=sender_email) + + def __connect(self): + try: + assert(self.repo) + except AttributeError: + client = Github(self.password) + self.repo = client.get_user().get_repo(self.repository) + logger.info('Created Github.com connection') + + def file_exists(self, filepath): + self.__connect() + try: + self.repo.get_contents(filepath) + return True + except UnknownObjectException: + return False + + def directory_exists(self, filepath): + return True + + def _make_folder_action(self, path): + # On GitHub you cannot create empty directories. So this actions will always succeed + return True + + def _upload_file_action(self, source, destination): + self.__connect() + # Read the file and post to Github. The library will convert to Base64 + with open(source,'rb') as datafile: + self.repo.create_file(destination.strip('/'),f'Upload from VRE DataDropOff\n Added file: {destination}', datafile.read(), committer = self.committer) + + return True + + def _download_file_action(self, source, destination): + self.__connect() + download = self.repo.get_contents(source) + with open(destination,'wb') as destination_file: + destination_file.write(download.decoded_content) + + return True \ No newline at end of file diff --git a/VRE/storage/engines/irods.py b/VRE/storage/engines/irods.py new file mode 100644 index 0000000..56bfa96 --- /dev/null +++ b/VRE/storage/engines/irods.py @@ -0,0 +1,138 @@ +import logging +logger = logging.getLogger(__name__) + +from storage.storage import BaseStorage +import storage.exceptions as StorageException + +# iRods support - https://pypi.org/project/python-irodsclient/ +import irods +from irods.session import iRODSSession +import atexit + +class iRODSStorage(BaseStorage): + + TYPE = 'irods' + + def __init__(self, url = None, username = None, password = None, source = None, destination = None, encryption_key = None, sender_name = None, sender_email = None): + # The iRODS zone is added to the url parameter. Use a '#' as seperator. This needs to be an Existing iRODS zone + # Ex: rdms-prod-icat.data.rug.nl#rug + (url, self.irods_zone) = url.split('#') + if destination: + destination = destination.strip('/') + + super().__init__(url, username, password, source, destination, encryption_key, sender_name, sender_email) + + # We need to clean up the iRODS session. Using atexit is the easiest way. + atexit.register(self.__close) + + def __connect(self): + try: + assert(self.client) + except AttributeError: + # Connect to the iRODS server + self.client = None + try: + self.client = iRODSSession(host=self.url, port=1247, user=self.username, password=self.password, zone=self.irods_zone) + # Need to make a call to validate the authentication. So by checking the version, we know if we can authenticate... + logger.debug(f'iRODS {self.client.server_version} connection through *native* authentication') + except irods.exception.CAT_INVALID_AUTHENTICATION: + # Authentication scheme is not native (default), so we try PAM here + try: + self.client = iRODSSession(host=self.url, port=1247, user=self.username, password=self.password, zone=self.irods_zone, irods_authentication_scheme='pam') + logger.debug(f'iRODS {self.client.server_version} connection through *PAM* authentication') + except irods.exception.CAT_INVALID_AUTHENTICATION: + # Authentication scheme is not PAM either last try: GIS + try: + self.client = iRODSSession(host=self.url, port=1247, user=self.username, password=self.password, zone=self.irods_zone, irods_authentication_scheme='gis') + logger.debug(f'iRODS {self.client.server_version} connection through *GIS* authentication') + except irods.exception.CAT_INVALID_AUTHENTICATION: + pass + + if self.client is None: + logger.error('Unable to login to the iRODS instance. Please check username and password combination!') + raise StorageException.InvalidAuthentication(self.username) + + logger.info('Created iRODS connection') + + def __close(self): + logger.debug('Closing iRODS storage connection and clean up') + self.client.cleanup() + + def _file_exists_action(self, path): + self.__connect() + try: + self.client.data_objects.get(f'/{self.irods_zone}/home/{self.username}/{path}') + except irods.exception.DataObjectDoesNotExist: + logger.debug(f'File \'{path}\' does NOT exists on the iRODS server') + return False + except irods.exception.CollectionDoesNotExist: + logger.debug(f'Parent folder of file \'{path}\' does NOT exists on the iRODS server') + return False + + return True + + def _directory_exists_action(self, path): + self.__connect() + try: + self.client.collections.get(f'/{self.irods_zone}/home/{self.username}/{path}') + logger.debug(f'Folder \'{path}\' exists on the iRODS server') + except irods.exception.CollectionDoesNotExist: + logger.debug(f'Folder \'{path}\' does NOT exists on the iRODS server') + return False + + return True + + def _make_folder_action(self, path): + self.__connect() + try: + self.client.collections.create(f'/{self.irods_zone}/home/{self.username}/{path}') + except irods.exception.CollectionDoesNotExist: + logger.debug(f'Parent folder of file \'{path}\' does NOT exists on the iRODS server') + return False + + return True + + def _upload_file_action(self, source, destination): + self.__connect() + # The upload path consists of a zone, username and path + destination = f'/{self.irods_zone}/home/{self.username}/{destination}' + logger.debug(f'Uploading to file: \'{destination}\'') + try: + obj = self.client.data_objects.create(destination) + logger.debug(f'Created file: \'{destination}\'') + # Open 'both' files and copy 4K data each time. + with obj.open('w') as irods_file, open(source,'rb') as source_file_binary: + while True: + buf = source_file_binary.read(4096) + if buf: + irods_file.write(buf) + else: + break + + obj.metadata.add('source',f'Upload from VRE DataDropOff\n Added file: {destination} uploaded by: {self.sender_name}({self.sender_email})') + + except irods.exception.OVERWRITE_WITHOUT_FORCE_FLAG: + logger.warning('The uploaded file already exists. So we did NOT upload the new file!') + return False + + return True + + def _download_file_action(self, source, destination): + self.__connect() + logger.debug(f'Downloading file: \'{source}\' to \'{destination}\'') + try: + obj = self.client.data_objects.get(f'/{self.irods_zone}/home/{self.username}/{source}') + # Open 'both' files and copy 4K data each time. + with obj.open('r') as irods_source_file, open(destination,'wb') as local_destination_file: + while True: + buf = irods_source_file.read(4096) + if buf: + local_destination_file.write(buf) + else: + break + + except irods.exception.DataObjectDoesNotExist: + logger.error(f'File: \'{source}\' does not exists on the iRODS server') + return False + + return True \ No newline at end of file diff --git a/VRE/storage/engines/webdav.py b/VRE/storage/engines/webdav.py new file mode 100644 index 0000000..edc9df8 --- /dev/null +++ b/VRE/storage/engines/webdav.py @@ -0,0 +1,64 @@ +import logging +logger = logging.getLogger(__name__) + +from storage.storage import BaseStorage +from storage.utils import human_filesize +import storage.exceptions as StorageException + +# WebDAV Support - https://pypi.org/project/webdavclient3/ +from webdav3.client import Client +from webdav3.exceptions import WebDavException, ResponseErrorCode + +class WebDAVStorage(BaseStorage): + + TYPE = 'webdav' + + def __connect(self): + # Connect to the external storage. This function can be run multiple times. It will check if it has already a connection to re-use + try: + # When this fails with an Attribute error, that means that the 'client' variable is not set and we need to make a new connection + assert(self.client) + except AttributeError: + # Because the 'client' variable is not known, the WebDAV connections is not created yet. So do it now! + self.client = Client({ + 'webdav_hostname' : self.url, + 'webdav_login' : self.username, + 'webdav_password' : self.password, + }) + + try: + # Here we abuse the .free check to see if the login credentials do work + free_space = self.client.free() + logger.info(f'Created WebDAV connection to url: \'{self.url}\', with space left: {human_filesize(free_space)}') + except ResponseErrorCode as ex: + # Login went wrong, so delete the client variable for next run/try + del(self.client) + + # If there was an authentication error, raise exception and quit. + if 401 == ex.code: + raise StorageException.InvalidAuthentication(self.username) + + # TODO: More errors..... + + def _file_exists_action(self, path): + self.__connect() + return self.client.check(path) + + def _directory_exists_action(self, path): + self.__connect() + return self.client.check(path) + + def _make_folder_action(self, path): + self.__connect() + self.client.mkdir(path) + return True + + def _upload_file_action(self, source, destination): + self.__connect() + self.client.upload(local_path = source, remote_path = destination) + return True + + def _download_file_action(self, source, destination): + self.__connect() + self.client.download(source,destination) + return True \ No newline at end of file diff --git a/VRE/storage/exceptions.py b/VRE/storage/exceptions.py new file mode 100644 index 0000000..4ca6914 --- /dev/null +++ b/VRE/storage/exceptions.py @@ -0,0 +1,57 @@ + +class BaseStorageError(Exception): + pass + +class StorageActionNotImplemented(Exception): + def __init__(self, storage, action, message='is not implemented'): + self.storage = storage + self.action = action + self.message = message + super().__init__(self.message) + + def __str__(self): + return f'{self.action} on class {self.storage} {self.message}' + +class FileDoesNotExist(BaseStorageError): + def __init__(self, source, message='File does not exists on disk'): + self.source = source + self.message = message + super().__init__(self.message) + + def __str__(self): + return f'{self.source} -> {self.message}' + +class InvalidLocation(BaseStorageError): + def __init__(self, location, message='Location does not exists or is not valid'): + self.location = location + self.message = message + super().__init__(self.message) + + def __str__(self): + return f'{self.location} -> {self.message}' + +class InvalidAuthentication(BaseStorageError): + def __init__(self, user, message='Authentication failed'): + self.user = user + self.message = message + super().__init__(self.message) + + def __str__(self): + return f'{self.user} -> {self.message}' + +class UnknownStorageEngine(BaseStorageError): + def __init__(self, engine, message='Storage engine is unknown, not available'): + self.engine = engine + self.message = message + super().__init__(self.message) + + def __str__(self): + return f'{self.engine} -> {self.message}' + +class MissingEncryptionKey(BaseStorageError): + def __init__(self, message='The encryption keys are missing'): + self.message = message + super().__init__(self.message) + + def __str__(self): + return f'{self.message}' \ No newline at end of file diff --git a/VRE/storage/storage.py b/VRE/storage/storage.py new file mode 100644 index 0000000..152d4d2 --- /dev/null +++ b/VRE/storage/storage.py @@ -0,0 +1,247 @@ +import logging +logger = logging.getLogger(__name__) + +import tempfile +from datetime import datetime +import subprocess, shlex +import shutil +import os +import glob +import re +from pathlib import Path +import importlib + +import storage.exceptions as StorageException + +class Storage(): + + CLASS_REGEX = re.compile(r'class\s+(?P[^\s\(]+)\s*\(\s*BaseStorage\s*\)\s*:') + + # This acts like a factory function. It will return a storage object from the requested engine + def __new__(self, storage_type, url = None, username = None, password = None, source = None, destination = None, encryption_key = None, sender_name = None, sender_email = None): + storage_type = storage_type.lower() + engines = Storage.__load_storage_engines() + logger.debug(f'Available storage engines({len(Storage.available_engines())}): {Storage.available_engines()}') + if storage_type not in engines: + raise StorageException.UnknownStorageEngine(storage_type) + + return engines[storage_type](url, username, password, source, destination, encryption_key, sender_name, sender_email) + + @staticmethod + def __load_storage_engines(): + loaded_engines = {} + engines = (Path(__file__)).parent.joinpath('engines') + + for engine in [x for x in engines.glob('*.py') if x.is_file()]: + with engine.open() as python_file: + data = python_file.read() + class_name = Storage.CLASS_REGEX.findall(data) + if len(class_name) == 1: + storage_engine_module = importlib.import_module('.{}' . format(engine.stem),package='storage.engines') + storage_engine_class = getattr(storage_engine_module, class_name[0]) + loaded_engines[storage_engine_class.TYPE.lower()] = storage_engine_class + + return loaded_engines + + @staticmethod + def available_engines(): + engines = list(Storage.__load_storage_engines().keys()) + engines.sort() + return engines + +class BaseStorage(): + + ENCFS_XML = '.encfs6.xml' + ENCRYPT_CMD = '/usr/bin/encfs' + FUSER_MOUNT = '/bin/fusermount' + + TYPE = '' + + def __init__(self, url = None, username = None, password = None, source = None, destination = None, encryption_key = None, sender_name = None, sender_email = None): + if source is not None and not os.path.exists(source): + logger.error(f'Source file is not available on disk! It has vanished from: {source}') + raise StorageException.FileDoesNotExist(source) + + # if destination is None: + # logger.error(f'Destination is not valid: {destination}') + # raise StorageException.InvalidLocation(destination) + + self.source = source + self.destination_dir = None if destination is None else os.path.dirname(destination) + self.destination_file = None if destination is None else os.path.basename(destination) + self.encryption_key = encryption_key + self.encrypted = False + + self.url = url + self.username = username + self.password = password + self.sender_name = sender_name + self.sender_email = sender_email + + def encrypt_source(self): + if self.encryption_key is None: + logger.error(f'Cannot encrypt source file {self.source} due to missing encryption key!') + raise StorageException.MissingEncryptionKey() + + if self.encrypted: + logger.warning('File is already encrypted') + return True + + start_time = datetime.now() + logger.info(f'Encrypting new uploaded file: {self.source}') + + encrypted_dir = tempfile.mkdtemp() + logger.debug(f'Created encrypted source folder: {encrypted_dir}') + + decoded_dir = tempfile.mkdtemp() + logger.debug(f'Created decoded folder: {decoded_dir}') + + new_encryption_setup = True + existing_encfs_file = os.path.join(self.destination_dir,BaseStorage.ENCFS_XML) + logger.debug(f'Check for existing encryption key file \'{existing_encfs_file}\' on the destination location.') + if self.file_exists(existing_encfs_file): + logger.debug(f'Copying existing \'{BaseStorage.ENCFS_XML}\' file...') + self.download_file(existing_encfs_file, os.path.join(encrypted_dir,BaseStorage.ENCFS_XML)) + logger.info(f'Using existing \'{existing_encfs_file}\' from destination location.') + new_encryption_setup = False + + # Mounting part between source and encrypted folder + # TODO: Check what happens when there are spaces in the dir names... need some quotes I guess + cmd = f'{BaseStorage.ENCRYPT_CMD} --standard -S {encrypted_dir} {decoded_dir}' + logger.debug(f'Creating an encrypted EncFS mount point with command: {cmd}') + + process = subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE) + # # Send encryption password + logger.debug('Mounting in action. Sending encryption key...') + (output,error) = process.communicate(input=self.encryption_key.encode()) + + if process.wait(timeout=30) != 0: + output = output.decode().strip() + logger.error(f'Error creating an encrypted mount with EncFS. Error: \'{output}\'') + raise RuntimeError(f'Mounting error EncFS: {output}') + + logger.debug(f'Mountpoint is ready at path: {decoded_dir}') + + if new_encryption_setup: + logger.info(f'We have a new \'{BaseStorage.ENCFS_XML}\' file that needs to be moved to the same destination: {self.destination_dir}') + self.upload_file(os.path.join(encrypted_dir,BaseStorage.ENCFS_XML), existing_encfs_file, True) + + # Here we ignore the subdirectories on the destination. This will be fixed during the upload + destination_file = os.path.join(decoded_dir,self.destination_dir, self.destination_file) + logger.debug(f'Moving source file \'{self.source}\' to \'{destination_file}\' for encryption.') + os.makedirs(os.path.dirname(destination_file)) + shutil.move(self.source,destination_file) + + # Here we umount the decoded directory, so we only have the encypted data left + logger.debug(f'Encrypting is done, un-mounting decoded folder: {decoded_dir}') + cmd = f'{BaseStorage.FUSER_MOUNT} -u {decoded_dir}' + logger.debug(f'Umounting cmd: {cmd}') + process = subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE) + if process.wait() != 0: + # TODO: Better error handling... Add raise exception + logger.error(f'Error un-mounting mount point: {decoded_dir}') + raise RuntimeError(f'Un-mounting error EncFS: {process}') + + logger.debug(f'Cleanup temporary decoded dir: {decoded_dir}') + shutil.rmtree(decoded_dir) + + # Find the newly created encrypted file and move it back to the original source file + # We use the glob function so we can also support subdirectories in the encrypted storage + logger.debug(f'Finding newly created encrypted file in the encrypted source folder: {encrypted_dir}') + encrypted_listing = glob.glob(f'{encrypted_dir}/**',recursive=True) + logger.debug(f'Found encrypted file: {encrypted_listing[-1]}') + + # Source file is been changed to the new encrypted file name. So use that for the file upload process + self.source = os.path.join(os.path.dirname(self.source),os.path.basename(encrypted_listing[-1])) + self.destination_file = os.path.basename(self.source) + logger.debug(f'Moving encrypted file {encrypted_listing[-1]} back to original file: {self.source}') + logger.debug(f'Updated the destination file name based on the encrypted name: {self.destination_file}') + shutil.move(encrypted_listing[-1],self.source) + logger.info(f'Encrypted to \'{self.source}\' in {datetime.now() - start_time} (h:mm:ss.ms)') + + self.encrypted = True + return True + + def file_exists(self, path): + logger.debug(f'Check if file exists at path: \'{path}\' with engine: \'{self.TYPE}\'') + file_exists = self._file_exists_action(path) + exists = 'exist' if file_exists else 'does not exist' + logger.debug(f'File \'{path}\' {exists} on storage \'{self.TYPE}\'') + return file_exists + + def upload_file(self, source = None, destination = None, move = False): + source = self.source if source is None else source + destination = os.path.join(self.destination_dir,self.destination_file) if destination is None else destination + upload_ok = None + + if source is None or destination is None: + logger.error(f'Error uploading file. Either source: \'{source}\' or destination: \'{destination}\' is not set!') + + start_time = datetime.now() + logger.debug(f'Start uploading file: \'{source}\' to: \'{destination}\' with engine: \'{self.TYPE}\'') + + if not self.directory_exists(os.path.dirname(destination)): + self.create_directories(os.path.dirname(destination)) + + upload_ok = self._upload_file_action(source,destination) + if upload_ok: + logger.info(f'Uploaded \'{source}\' to: \'{destination}\' with engine: \'{self.TYPE}\' in {datetime.now() - start_time} (h:mm:ss.ms)') + if move or self.encrypted: + os.unlink(source) + logger.debug('Removed source file from disk!') + else: + logger.error(f'Error uploading \'{source}\' to: \'{destination}\' with engine: \'{self.TYPE}\' in {datetime.now() - start_time} (h:mm:ss.ms)') + + return upload_ok + + def directory_exists(self, path): + #logger.debug() + return self._directory_exists_action(path) + + def download_file(self, source = None, destination = None, move = False): + source = self.source if source is None else source + #destination = self.destination if destination is None else destination + destination = os.path.join(self.destination_dir,os.path.basename(self.destination_file)) if destination is None else destination + download_ok = None + + if source is None or destination is None: + logger.error(f'Error downloading file. Either source: {source} or destination: {destination} is not set!') + + start_time = datetime.now() + logger.debug('Downloading file: {source} to: {destination}') + download_ok = self._download_file_action(source,destination) + + if download_ok: + logger.info(f'Downloaded \'{source}\' to: \'{destination}\' in {datetime.now() - start_time} (h:mm:ss.ms)') + else: + logger.error(f'Downloading failed for \'{source}\' to: \'{destination}\' in {datetime.now() - start_time} (h:mm:ss.ms)') + + return download_ok + + def create_directories(self, path): + folders = [] + for folder in path.strip('/').split('/'): + # Store travelled path. We need this to make the directories on the remote servers + folders.append(folder) + if not self.directory_exists('/'.join(folders)): + logger.debug(f'Creating folder {folder} with full path: {"/".join(folders)}') + self._make_folder_action('/'.join(folders)) + else: + logger.debug(f'Folder \'{folder}\' already exists.') + + return True + + def _file_exists_action(self, path): + raise StorageException.StorageActionNotImplemented('BaseStorage','file_exists') + + def _directory_exists_action(self, path): + raise StorageException.StorageActionNotImplemented('BaseStorage','directory_exists') + + def _upload_file_action(self, source, destination): + raise StorageException.StorageActionNotImplemented('BaseStorage','_upload_file') + + def _download_file_action(self, source, destination): + raise StorageException.StorageActionNotImplemented('BaseStorage','_download_file') + + def _make_folder_action(self, path): + raise StorageException.StorageActionNotImplemented('BaseStorage','_make_folder_action') \ No newline at end of file diff --git a/VRE/storage/utils.py b/VRE/storage/utils.py new file mode 100644 index 0000000..b1349a7 --- /dev/null +++ b/VRE/storage/utils.py @@ -0,0 +1,8 @@ +def human_filesize(nbytes): + suffixes = ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB'] + i = 0 + while nbytes >= 1024 and i < len(suffixes)-1: + nbytes /= 1024. + i += 1 + f = ('%.2f' % nbytes).rstrip('0').rstrip('.') + return '%s %s' % (f, suffixes[i]) \ No newline at end of file diff --git a/clouds.yaml.example b/clouds.yaml.example new file mode 100644 index 0000000..d471f70 --- /dev/null +++ b/clouds.yaml.example @@ -0,0 +1,14 @@ +clouds: + hpc: + auth: + auth_url: [API_URL] + username: "[API_USERNAME]" + password: "[API_PASSWORD]" + project_id: [PROJECT_ID] + project_name: "vre" + user_domain_name: "Default" + + region_name: "RegionOne" + interface: "public" + identity_api_version: 3 + \ No newline at end of file diff --git a/doc/API.rst b/doc/API.rst new file mode 100644 index 0000000..7ccd198 --- /dev/null +++ b/doc/API.rst @@ -0,0 +1,8 @@ +===== +API +===== + +The API can also be found at the location /api/swagger/ /api/redoc/. + +.. openapi:: swagger.yaml + diff --git a/doc/Makefile b/doc/Makefile new file mode 100644 index 0000000..1c19fc3 --- /dev/null +++ b/doc/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = output + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/doc/_images/Gitea_Generate_API_Token.png b/doc/_images/Gitea_Generate_API_Token.png new file mode 100644 index 0000000..d83c9e7 Binary files /dev/null and b/doc/_images/Gitea_Generate_API_Token.png differ diff --git a/doc/_images/Gitea_Generate_API_Token_Done.png b/doc/_images/Gitea_Generate_API_Token_Done.png new file mode 100644 index 0000000..8e8d078 Binary files /dev/null and b/doc/_images/Gitea_Generate_API_Token_Done.png differ diff --git a/doc/_images/Github_Generate_API_Token_Done.png b/doc/_images/Github_Generate_API_Token_Done.png new file mode 100644 index 0000000..be7f08f Binary files /dev/null and b/doc/_images/Github_Generate_API_Token_Done.png differ diff --git a/doc/_images/Github_Generate_API_Token_Step1.png b/doc/_images/Github_Generate_API_Token_Step1.png new file mode 100644 index 0000000..73e3191 Binary files /dev/null and b/doc/_images/Github_Generate_API_Token_Step1.png differ diff --git a/doc/_images/Github_Generate_API_Token_Step2.png b/doc/_images/Github_Generate_API_Token_Step2.png new file mode 100644 index 0000000..0b5af42 Binary files /dev/null and b/doc/_images/Github_Generate_API_Token_Step2.png differ diff --git a/doc/_images/Github_Generate_API_Token_Step3.png b/doc/_images/Github_Generate_API_Token_Step3.png new file mode 100644 index 0000000..4b40eb6 Binary files /dev/null and b/doc/_images/Github_Generate_API_Token_Step3.png differ diff --git a/doc/_images/Github_Generate_API_Token_Step4.png b/doc/_images/Github_Generate_API_Token_Step4.png new file mode 100644 index 0000000..e538c4b Binary files /dev/null and b/doc/_images/Github_Generate_API_Token_Step4.png differ diff --git a/doc/_images/Owncloud_Generate_API_Token.png b/doc/_images/Owncloud_Generate_API_Token.png new file mode 100644 index 0000000..44ca4fe Binary files /dev/null and b/doc/_images/Owncloud_Generate_API_Token.png differ diff --git a/doc/_images/Owncloud_Generate_API_Token_Done.png b/doc/_images/Owncloud_Generate_API_Token_Done.png new file mode 100644 index 0000000..beeef77 Binary files /dev/null and b/doc/_images/Owncloud_Generate_API_Token_Done.png differ diff --git a/doc/_images/Owncloud_WebDAV_Location.png b/doc/_images/Owncloud_WebDAV_Location.png new file mode 100644 index 0000000..7805da9 Binary files /dev/null and b/doc/_images/Owncloud_WebDAV_Location.png differ diff --git a/doc/_static/RUG_Logo.jpg b/doc/_static/RUG_Logo.jpg new file mode 100644 index 0000000..da3037b Binary files /dev/null and b/doc/_static/RUG_Logo.jpg differ diff --git a/doc/_static/custom.css b/doc/_static/custom.css new file mode 100644 index 0000000..2852270 --- /dev/null +++ b/doc/_static/custom.css @@ -0,0 +1,10 @@ +/* override table width restrictions as found on https://github.com/getpelican/pelican/issues/1311 */ +.wy-table-responsive table td, .wy-table-responsive table th { + /* !important prevents the common CSS stylesheets from + overriding this as on RTD they are loaded after this stylesheet */ + white-space: normal !important; +} + +.wy-table-responsive { + overflow: visible !important; +} \ No newline at end of file diff --git a/doc/authentication.rst b/doc/authentication.rst new file mode 100644 index 0000000..ed4f083 --- /dev/null +++ b/doc/authentication.rst @@ -0,0 +1,14 @@ +============== +Authentication +============== + +--- +Web +--- +Normal web login + +--- +API +--- +.. automodule:: apps.api.authentication + :members: \ No newline at end of file diff --git a/doc/conf.py b/doc/conf.py new file mode 100644 index 0000000..8096a44 --- /dev/null +++ b/doc/conf.py @@ -0,0 +1,242 @@ +# Configuration file for the Sphinx documentation builder. +# +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import os +import sys +sys.path.insert(0, os.path.abspath('../tusd/hooks')) + +# Django autodoc +sys.path.insert(0, os.path.abspath('../VRE')) +os.environ['DJANGO_SETTINGS_MODULE'] = 'VRE.settings' +import django +django.setup() + +# -- Project information ----------------------------------------------------- + +project = 'Virtual Research Environment' +copyright = '2020-2021, Joshua Rubingh, Elwin Buisman' +author = 'Joshua Rubingh, Elwin Buisman' + +# The master toctree document. +master_doc = 'index' + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.napoleon', + 'sphinx.ext.autodoc', + 'sphinx.ext.viewcode', + 'sphinx.ext.coverage', + 'sphinx_markdown_builder', + 'sphinxcontrib.openapi', +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store','build/*'] + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'alabaster' + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +html_theme_options = { + 'logo': 'RUG_Logo.jpg', + 'logo_name' : True +} + +# -- Options for LaTeX output --------------------------------------------- +# Install Ubuntu/Debian package(s): texlive-latex-recommended, texlive-fonts-recommended, texlive-latex-extra, netpbm +latex_engine = 'pdflatex' +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # + 'papersize': 'a4paper', + 'releasename':" ", + # Sonny, Lenny, Glenn, Conny, Rejne, Bjarne and Bjornstrup + # 'fncychap': '\\usepackage[Lenny]{fncychap}', + 'fncychap': '\\usepackage{fncychap}', + 'fontpkg': '\\usepackage{amsmath,amsfonts,amssymb,amsthm}', + + 'figure_align':'htbp', + # The font size ('10pt', '11pt' or '12pt'). + # + 'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + # + 'preamble': r''' + %%%%%%%%%%%%%%%%%%%% Meher %%%%%%%%%%%%%%%%%% + %%%add number to subsubsection 2=subsection, 3=subsubsection + %%% below subsubsection is not good idea. + \setcounter{secnumdepth}{3} + % + %%%% Table of content upto 2=subsection, 3=subsubsection + \setcounter{tocdepth}{1} + + \usepackage{amsmath,amsfonts,amssymb,amsthm} + \usepackage{graphicx} + + %%% reduce spaces for Table of contents, figures and tables + %%% it is used "\addtocontents{toc}{\vskip -1.2cm}" etc. in the document + \usepackage[notlot,nottoc,notlof]{} + + \usepackage{color} + \usepackage{transparent} + \usepackage{eso-pic} + \usepackage{lipsum} + + \usepackage{footnotebackref} %%link at the footnote to go to the place of footnote in the text + + %% spacing between line + \usepackage{setspace} + %%%%\onehalfspacing + %%%%\doublespacing + \singlespacing + + + %%%%%%%%%%% datetime + \usepackage{datetime} + + \newdateformat{MonthYearFormat}{% + \monthname[\THEMONTH], \THEYEAR} + + + %% RO, LE will not work for 'oneside' layout. + %% Change oneside to twoside in document class + \usepackage{fancyhdr} + \pagestyle{fancy} + \fancyhf{} + + %%% Alternating Header for oneside + \fancyhead[L]{\ifthenelse{\isodd{\value{page}}}{ \small \nouppercase{\leftmark} }{}} + \fancyhead[R]{\ifthenelse{\isodd{\value{page}}}{}{ \small \nouppercase{\rightmark} }} + + %%% Alternating Header for two side + %\fancyhead[RO]{\small \nouppercase{\rightmark}} + %\fancyhead[LE]{\small \nouppercase{\leftmark}} + + %% for oneside: change footer at right side. If you want to use Left and right then use same as header defined above. + %% \fancyfoot[R]{\ifthenelse{\isodd{\value{page}}}{{\tiny Meher Krishna Patel} }{\href{http://pythondsp.readthedocs.io/en/latest/pythondsp/toc.html}{\tiny PythonDSP}}} + + %%% Alternating Footer for two side + %% %\fancyfoot[RO, RE]{\scriptsize Meher Krishna Patel (mekrip@gmail.com)} + + %%% page number + \fancyfoot[CO, CE]{\thepage} + + \renewcommand{\headrulewidth}{0.5pt} + \renewcommand{\footrulewidth}{0.5pt} + + \RequirePackage{tocbibind} %%% comment this to remove page number for following + \addto\captionsenglish{\renewcommand{\contentsname}{Table of contents}} + %% \addto\captionsenglish{\renewcommand{\listfigurename}{List of figures}} + %% \addto\captionsenglish{\renewcommand{\listtablename}{List of tables}} + %% % \addto\captionsenglish{\renewcommand{\chaptername}{Chapter}} + + + %%reduce spacing for itemize + \usepackage{enumitem} + \setlist{nosep} + + %%%%%%%%%%% Quote Styles at the top of chapter + %% \usepackage{epigraph} + %% \setlength{\epigraphwidth}{0.8\columnwidth} + %% \newcommand{\chapterquote}[2]{\epigraphhead[60]{\epigraph{\textit{#1}}{\textbf {\textit{--#2}}}}} + %%%%%%%%%%% Quote for all places except Chapter + %% \newcommand{\sectionquote}[2]{{\quote{\textit{``#1''}}{\textbf {\textit{--#2}}}}} + ''', + + + 'maketitle': r''' + \pagenumbering{Roman} %%% to avoid page 1 conflict with actual page 1 + + \begin{titlepage} + + \begingroup % for PDF information dictionary + \def\endgraf{ }\def\and{\& }% + \pdfstringdefDisableCommands{\def\\{, }}% overwrite hyperref setup + \hypersetup{pdfauthor={\@author}, pdftitle={\@title}}% + \endgroup + + \centering + + \vspace*{40mm} %%% * is used to give space from top + \textbf{\Huge {RUG Virtual Research Environment (VRE)} } + + \vspace{0mm} + \begin{figure}[!h] + \centering + \includegraphics[scale=0.4]{RUG_Logo.jpg} + \end{figure} + + \vspace{100mm} + \Large \textbf{{Joshua Rubingh, Elwin Buisman}} + + + \small Created on : February, 2020 + + \vspace*{0mm} + \small Last updated : \MonthYearFormat\today + + + %% \vfill adds at the bottom + \vfill + %% \small \textit{More documents are freely available at }{\href{http://pythondsp.readthedocs.io/en/latest/pythondsp/toc.html}{PythonDSP}} + \end{titlepage} + + \clearpage + \pagenumbering{roman} + \tableofcontents + \listoffigures + \listoftables + \clearpage + \pagenumbering{arabic} + ''', + + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', + 'sphinxsetup': \ + 'hmargin={0.7in,0.7in}, vmargin={1in,1in}, \ + verbatimwithframe=true, \ + TitleColor={rgb}{0,0,0}, \ + HeaderFamily=\\rmfamily\\bfseries, \ + InnerLinkColor={rgb}{0,0,1}, \ + OuterLinkColor={rgb}{0,0,1}', + + 'tableofcontents':' ', +} + +latex_logo = '_static/RUG_Logo.jpg' + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'documentation.tex', project, + author, 'report') +] \ No newline at end of file diff --git a/doc/development.rst b/doc/development.rst new file mode 100644 index 0000000..2c7b941 --- /dev/null +++ b/doc/development.rst @@ -0,0 +1,239 @@ +=========== +Development +=========== + +In order to develop on this software, you can setup a development environment using these steps. + + +In order to install this Virtual Research Environment project, we use the following packages / software. + +* Redis +* Django +* NGINX +* TUSD (The Upload Server Daemon) +* Demo portal + +First we need to checkout the code. + +.. code-block:: bash + + git clone https://git.web.rug.nl/VRE/data_drop-off.git /opt/deploy/VRE + +----- +Redis +----- + +Redis is used for storing the schedule/background actions. For development we use the default Redis setup **without** authentication. Install Redis with the default package manager. For Debian based: + +.. code-block:: console + + sudo apt install redis-server + +------ +Django +------ + +The Django code consists of three parts. There is a REST API server, a background scheduler and a demo portal. For development we use all three parts. They all work from the same Python3 virtual environment. + +Common +====== + +First we need to create the Python virtual environment. This is done with Python 3. + +.. code-block:: console + + python3 -m venv venv + +This will give us a virtual python environment on the location *venv* in the root of the code dir. Next we need to install the required libraries + +.. code-block:: bash + + source venv/bin/activate + pip install -r VRE/requirements.txt + pip install -r demo_portal/requirements.txt + pip install -r tusd/hooks/requirements.txt + + +REST API +======== + +Out of the box the REST API server only needs two required settings to work. These settings needs to be placed in a .env file located in the VRE/VRE folder. There should be an .env.example file which you can use as a template. + +The minimal settings that needs to be set are: + +* **SECRET_KEY**: A uniquely secret key. Used for cookie/session encryption +* **DEBUG**: Enable debug + +Then we can setup and start the REST API server with the following commands. + +.. code-block:: bash + + source venv/bin/activate + ./VRE/manage.py migrate + ./VRE/manage.py loaddata virtual_machine_initial_data + ./VRE/manage.py createsuperuser + +And start with: + +.. code-block:: bash + + source venv/bin/activate + ./VRE/manage.py runserver 0.0.0.0:1337 + +Now you can access your REST API server documentation on http://localhost:1337/api/redoc/ and the admin at http://localhost:1337/admin/ + +There are more settings available to setup. These can be added the to .env file of the REST API. + +.. literalinclude:: ../VRE/VRE/.env.example + :language: bash + +Scheduler +========= + +The scheduler is used for background tasks such as creating new workspaces or other long taking actions. The scheduler needs the same python3 environment as the REST API. So here we asume that the Python3 virtual environment is setup correctly. + +.. code-block:: bash + + source venv/bin/activate + ./VRE/manage.py run_huey + + +Users +===== + +We also need a TUSD user for API communication between the REST API and the TUSD server. So we create a new user in the REST API admin. Go to http://localhost:1337/admin/auth/user/add/ and create a new user. When the user is created go to the API tokens and select the token for the TUSD user. We need the *key* and *secret* of the TUSD user for later use. +make sure the TUSD user has the **superuser** status. This is needed. + +----- +NGINX +----- + +NGINX is used on multiple places on the project. This means that we will create multiple virtual domains to get everthing working correctly. + +**We do not cover SSL setups in this document** + +Common +====== + +First install NGINX with LUA support through the package manager. For Debian based this would be: + +.. code-block:: console + + sudo apt install nginx libnginx-mod-http-lua + +TUSD +==== + +LUA +--- +There is usage of LUA in NGINX so we can handle some dynamic data on the server side. All LUA code should be placed in the folder `/etc/nginx/lua`. + +.. code-block:: console + + sudo ln -s /opt/deploy/VRE/nginx/lua /etc/nginx/lua + + +VHost +----- +After installation of the packages, create a symbolic link in the `/etc/nginx/sites-enabled` so that a new VHost is created. + +Important parts of the VHost configuration: + +.. literalinclude:: ../nginx/tus.vhost.conf + :language: bash + +And there should be a `lua` folder in the `/etc/nginx` folder. This can be a symbolic link to the LUA folder that is provided with this project. + +In order to test if NGINX is configured correctly run `nginx -t` and it should give an OK message: + +.. code-block:: bash + + nginx: the configuration file /etc/nginx/nginx.conf syntax is ok + nginx: configuration file /etc/nginx/nginx.conf test is successful + + +--- +TUS +--- +TUS = `The Upload Server `_. This is a resumable upload server that speaks HTTP. This server is a stand-alone server that is running behind the NGINX server. This is needed as NGINX is manipulating the headers so extra information is added to the uploads. + +It is even possible to run a TUS instance on a different location (Amsterdam). As long as the TUS is reachable by the NGINX frontend server, and the TUS server can post webhooks back to the REST API server. + +Setup +===== + +The services is started with a simple bash script. This makes sure that all settings are loaded and the right parameters are used with the TUSD Go daemon server. + +The daemon needs to know the following information. These settings are required: + +* **WEBHOOK_URL**: This is the full url to the REST API server to post updates during uploads. +* **DROPOFF_API_HAWK_KEY**: The key for the token that is created on the REST API server for communication with the REST API server. +* **DROPOFF_API_HAWK_SECRET**: The secret value that belongs to the token *DROPOFF_API_HAWK_KEY*. + +This information can be placed in an .env file in the same folder where the startup (*startup.sh*) script is located. An example .env file: + +.. literalinclude:: ../tusd/.env.example + :language: bash + +In the startup.sh script there are some default variables that can be overwritten by adding them to the .env file above. + +.. literalinclude:: ../tusd/startup.sh + :language: bash + :lines: 5-16 + +This will start the TUS server running on TCP port 1080. + +Data storage +------------ +The upload data is stored at a folder that is configured in the TUS startup command. This should be folder that is writable by the user that is running the TUS instance. **Make sure that the upload folder is not directly accessible by the webserver**. Else files can be downloaded. + +Hooks +----- +The TUS is capable of handling hooks based on uploaded files. There are two types of hooks. 'Normal' hooks and webhooks. It is not possible to run both hook systems at the same time due to the blocking nature of the pre-create hook. So we use the 'normal' hook system. That means that custom scripts are run. Those scripts can then post the data to a webserver in order to get a Webhook functionality with the 'normal' hooks. +At the moment, there is only a HTTP webcall done in the hook system. There is no actual file movement yet. +For now we have used the following hooks: + +- **pre-create**: This hook will run when a new upload starts. This will trigger the REST API server to store the upload in the database, and check if the upload is allowed based on an unique upload url and unique upload code. +- **post-finish**: This hook will run when an upload is finished. And will update the REST API server with the file size and actual filename (unique) on disk. + +An example of a hook as used in this project is the *pre-create.py* script. + +.. literalinclude:: ../tusd/hooks/pre-create.py + +This hook uses the same data payload as when TUS would use the Webhook system. So using 'Normal' hooks or using Webhooks with REST API Server should both work out of the box. + +----------- +Demo portal +----------- + +In order to test the REST API and be able to give a demo, there is a demo portal that can be used. + +Out of the box the Demo portal only needs a few required settings to work. These settings needs to be placed in a .env file located in the demo_portal/demo_portal folder. There should be an .env.example file which you can use as a template. + +The minimal settings that needs to be set are: + +* **SECRET_KEY**: A uniquely secret key. Used for cookie/session encryption +* **DEBUG**: Enable debug +* **DROPOFF_API_USER_KEY**: The key for the token that is created on the REST API server for communication with the REST API server. +* **DROPOFF_API_USER_SECRET**: The secret value that belongs to the token *DROPOFF_API_USER_KEY*. + +Then we can setup and start the demo portal with the following commands. + +.. code-block:: bash + + source venv/bin/activate + ./VRE/manage.py migrate + +And start with: + +.. code-block:: bash + + source venv/bin/activate + ./VRE/manage.py runserver 0.0.0.0:8000 + +Now you can access your demo portal at http://localhost:8000. In order to login as a researcher, you need to create a new user on the REST API server admin. Because the portal is using the REST API server for logging users in. + +There are more settings available to setup. These can be added the to .env file of the demo portal. + +.. literalinclude:: ../demo_portal/demo_portal/.env.example + :language: bash diff --git a/doc/documentation.pdf b/doc/documentation.pdf new file mode 100644 index 0000000..7c65007 Binary files /dev/null and b/doc/documentation.pdf differ diff --git a/doc/index.rst b/doc/index.rst new file mode 100644 index 0000000..9e76686 --- /dev/null +++ b/doc/index.rst @@ -0,0 +1,33 @@ +.. Virtual Research Environment documentation master file, created by + sphinx-quickstart on Fri Feb 21 13:13:01 2020. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +========================================== +Virtual Research Environment documentation +========================================== + +Here you can read more information about the RUG Virtual Research Environment. This document will contain information about installing the platform. As information about the used webhooks and security. + +The platform can be run inside a docker setup or just local for development. + +.. toctree:: + :caption: Table of Contents + :maxdepth: 2 + + install + storage + models + API + authentication + signals + tus + development + +------------------ +Indices and tables +------------------ + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` \ No newline at end of file diff --git a/doc/install.rst b/doc/install.rst new file mode 100644 index 0000000..9a02fb5 --- /dev/null +++ b/doc/install.rst @@ -0,0 +1,64 @@ +============ +Installation +============ + +The Virtual Research Environment will be installed with a docker compose setup. This way the hole platform is up and running within minutes. + +First we need to checkout the code. + +.. code-block:: bash + + git clone https://git.web.rug.nl/VRE/data_drop-off.git /opt/deploy/VRE + +------ +Docker +------ + +Make sure you have Docker and Docker-Compose installed. On Debian based systems you can install it with the following setup https://docs.docker.com/engine/install/debian/ + +Run the following commands and a complete production setup is created. + +.. code-block:: bash + + cd /opt/deploy/VRE + docker-compose up + +The docker setup is created according to the docker-compose.yaml in the root directory. This will create a own virtual network and some persistant storage volumes. + +The following persistant volumes are created: + +* Postgress data. This is used for Postgress database files +* Redis data. This is used for Redis storage +* TUSD data. This is used for the temporary file uploads that are being processed +* Static files data. This is used for Django static files served by NGINX + + +During installation the following docker containers will be created: + +* Postgress DB server +* Redis DB server +* The Upload Service Daemon sever (TUSD) +* Django REST API server +* Django background scheduler server +* NGINX TUSD frontend server +* NGINX API frontend server + + +And there will be two extra docker containers running a demo site to communicate with the REST API + +* Django demo portal server +* NGINX demo frontend server + +Only the NGINX containers have an connection with the outside world. By default you have: + +* REST API and admin on http://localhost:1337/api/redoc/ and http://localhost:1337/api/admin/ +* TUSD Upload server on http://localhost:1080/files +* Django demo portal on http://localhost:8080 + +Settings +======== + +You can change the Docker setup by changing the settings in the file docker/project.env. Every setting has some explanation what is does or where it is fore. + +.. literalinclude:: ../docker/project.env + :language: bash \ No newline at end of file diff --git a/doc/make.bat b/doc/make.bat new file mode 100644 index 0000000..2119f51 --- /dev/null +++ b/doc/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/doc/models.rst b/doc/models.rst new file mode 100644 index 0000000..0f66485 --- /dev/null +++ b/doc/models.rst @@ -0,0 +1,51 @@ +====== +Models +====== + +---- +Base +---- +.. automodule:: lib.models.base + :members: + +------- +Dropoff +------- +.. automodule:: apps.dropoff.models + :members: + +---------- +Invitation +---------- +.. automodule:: apps.invitation.models + :members: + +---------- +Researcher +---------- +.. automodule:: apps.researcher.models + :members: + +------- +Storage +------- +.. automodule:: apps.storage.models + :members: + +----- +Study +----- +.. automodule:: apps.study.models + :members: + +--------------- +Virtual Machine +--------------- +.. automodule:: apps.virtual_machine.models + :members: + +----- +Token +----- +.. automodule:: apps.api.models + :members: \ No newline at end of file diff --git a/doc/requirements.txt b/doc/requirements.txt new file mode 100644 index 0000000..58cf04b --- /dev/null +++ b/doc/requirements.txt @@ -0,0 +1,3 @@ +Sphinx==3.5.1 +sphinx-markdown-builder==0.5.4 +sphinxcontrib-openapi==0.7.0 \ No newline at end of file diff --git a/doc/signals.rst b/doc/signals.rst new file mode 100644 index 0000000..13d6f18 --- /dev/null +++ b/doc/signals.rst @@ -0,0 +1,21 @@ +======= +Signals +======= + +--- +API +--- +.. automodule:: apps.api.signals + :members: + +---------- +Invitation +---------- +.. automodule:: apps.invitation.signals + :members: + +---------- +Researcher +---------- +.. automodule:: apps.researcher.signals + :members: diff --git a/doc/storage.rst b/doc/storage.rst new file mode 100644 index 0000000..11892e1 --- /dev/null +++ b/doc/storage.rst @@ -0,0 +1,187 @@ +======== +Storages +======== +Here you can read which storage engines are supported. And how they can be used. Per storage engine there is a short description how to setup. + +Per storage engine the following options needs to be specified + +- **location**: Full path of the storage engine or API +- **username**: The username that is able to connect to the storage +- **password**: The password that is needed to connect to the storage + +When we connect a storage to a study, the options to set a storage path and encryption will become available. So you are able to reuse the same storage configuration for multiple studies, but per combination storage/study you can specify storage paths, encryption, notification etc + +----- +Gitea +----- + +In order to use Gitea as a storage, you need to create an 'Application' in Gitea. This will create a new token that is used for authenticating with the API. + +Creating access token +===================== + +Login into the Gitea server with a webrowser and go to your 'Applications' in your 'Settings'. + +.. figure:: _images/Gitea_Generate_API_Token.png + :width: 100% + :align: center + :alt: Gitea API Token generating + +1. Click on you profile menu +2. Click on 'Settings' +3. Click on 'Applications' +4. Enter a name for this Application. + +Then press the 'Generate token' button and a new token will be generated with the new Application. + +.. figure:: _images/Gitea_Generate_API_Token_Done.png + :width: 100% + :align: center + :alt: Gitea API Token generating done + +5. Store the token somewhere save +6. Here is your newly created Application with the latest activity. This will show you the last time this Application is used. +7. Here you can delete the Application. This will revoke the token, and will block new API calls and uploads. + +VRE Storage values +================== + +Now we have created an Gitea API Application we can use this in the VRE Dropoff Storage settings. + +- **location**: https://\[gitea.host.com\]/api/v1#\[Your_Repository_Name\] (It is important to add '/api/v1' to the url) +- **username**: Your own username +- **password**: The saved Application token at step 5 + +------ +Github +------ + +In order to use Gitea as a storage, you need to create an 'Application' in Gitea. This will create a new token that is used for authenticating with the API. + +Creating access token +===================== + +Login into the Github with a webrowser and follow the steps below to make an API token. + +.. figure:: _images/Github_Generate_API_Token_Step1.png + :width: 100% + :align: center + :alt: Github API Token generating + +1. Click on you profile menu +2. Click on 'Settings' +3. Click on 'Developer settings' + +.. figure:: _images/Github_Generate_API_Token_Step2.png + :width: 100% + :align: center + :alt: Github API Token generating done + +4. Click on 'Personal access tokens' + +.. figure:: _images/Github_Generate_API_Token_Step3.png + :width: 100% + :align: center + :alt: Github API Token generating done + +5. Click on 'Generate new token' + +.. figure:: _images/Github_Generate_API_Token_Step4.png + :width: 100% + :align: center + :alt: Github API Token generating done + +6. Enter a name +7. Select the **Repo** checkbox. + +.. figure:: _images/Github_Generate_API_Token_Done.png + :width: 100% + :align: center + :alt: Github API Token generating done + +8. Store the token somewhere save +9. Here is your newly created Application with the latest activity. This will show you the last time this Application is used. +10. Here you can delete the Application. This will revoke the token, and will block new API calls and uploads. + +VRE Storage values +================== + +Now we have created an Github.com API Application we can use this in the VRE Dropoff Storage settings. + +- **location**: https://api.github.com#\[Your_Repository_Name\] +- **username**: Your own username +- **password**: The saved Application token at step 8 + +------ +WebDAV +------ + +In order to use a WebDAV server as a storage there are different ways of creating API tokens and/or authentication. So for WebDAV we have steps per working known WebDAV servers. + +Owncloud/Nexcloud/Unishare +========================== + +Creating access token +--------------------- + +Login into the Owncloud/Nexcloud/Unishare server with a webrowser and go to your 'Security' in your 'Settings'. + +.. figure:: _images/Owncloud_Generate_API_Token.png + :width: 100% + :align: center + :alt: Owncloud/Nexcloud/Unishare API Token generating + +1. Click on you profile menu +2. Click on 'Settings' +3. Click on 'Security' +4. Enter a name for this Application. +5. Press the 'Generate token' button and a new token will be generated with the new Application. + + +.. figure:: _images/Owncloud_Generate_API_Token_Done.png + :width: 100% + :align: center + :alt: Owncloud/Nexcloud/Unishare API Token generating done + +6. Store the token somewhere save +7. Here is your newly created Application with the latest activity. This will show you the last time this Application is used. +8. Here you can delete the Application. This will revoke the token, and will block new API calls and uploads. + +VRE Storage values +------------------ + +.. figure:: _images/Owncloud_WebDAV_Location.png + :width: 200px + :align: right + :alt: Owncloud/Nexcloud/Unishare WebDAV url + +Now we have created an Owncloud/Nexcloud/Unishare API Application we can use this in the VRE Dropoff Storage settings. For Owncloud/Nexcloud/Unishare there are some extra steps to get the right WebDAV settings. + +1. Go to the Files Application +2. Click on the Settings icon +3. Copy the url value at the field WebDAV. Make sure you copy the full path + +- **location**: https://unishare.nl/remote.php/dav/files/ +- **username**: Your own username shown at step 6 +- **password**: The saved Application token at step 6 + +----- +iRODS +----- + +In order to use iRODS as a storage, you need to enter some credential values in the the data storage form. + +Creating access token +===================== +There is no need for creating separate access tokens. As iRODS does not support multiple login tokens per user, we need to use the same credentials as the user is using when reading data from the iRODS server. + +Keep in mind, that when you change the iRODS credentials (due to missing password), you need to update the credentials here as well. + +As iRODS works with federation, you need to know your iRODS zone. This will be added to the address location when entering iRODS credentials. + +VRE Storage values +================== + +- **location**: hostname or ip of the iRODS server with '#\[iRODS ZONE NAME\]' appended +- **username**: Your own username +- **password**: Your own password \ No newline at end of file diff --git a/doc/swagger.yaml b/doc/swagger.yaml new file mode 100644 index 0000000..9e21d04 --- /dev/null +++ b/doc/swagger.yaml @@ -0,0 +1,2619 @@ +swagger: '2.0' +info: + title: Virtual Research Environment API + description: Here you can see a list of API endpoints and actions that are available + to communicate with the VRE API + termsOfService: https://www.rug.nl + contact: + email: vre_team@rug.nl + license: + name: MIT License + version: v1 +host: localhost:8080 +schemes: + - http +basePath: /api +consumes: + - application/json +produces: + - application/json +securityDefinitions: + Hawk: + type: apiKey + description: "HTTP Holder-Of-Key Authentication Scheme, https://github.com/hapijs/hawk,\ + \ https://hawkrest.readthedocs.io/en/latest/
Ex header:'Authorization': 'Hawk mac=\"F4+S9cu7yZiZEgdtqzMpOOdudvqcV2V2Yzk2WcphECc=\"\ + , hash=\"+7fKUX+djeQolvnLTxr0X47e//UHKbkRlajwMw3tx3w=\", id=\"7FI5JET4\", ts=\"\ + 1592905433\", nonce=\"DlV-fL\"'" + name: Authorization + in: header +security: + - Hawk: [] +paths: + /auth/login/: + post: + operationId: auth_login_create + description: This will let you login to the REST API. Login with your username + and password, and get a key and a secret back that is used for further communication + using HAWK signing of requests (https://github.com/hapijs/hawk). + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/TokenLogin' + responses: + '200': + description: response description + schema: + $ref: '#/definitions/Token' + tags: + - auth + security: [] + parameters: [] + /auth/users/: + get: + operationId: auth_users_list + description: '' + parameters: + - name: page + in: query + description: A page number within the paginated result set. + required: false + type: integer + responses: + '200': + description: '' + schema: + required: + - count + - results + type: object + properties: + count: + type: integer + next: + type: string + format: uri + x-nullable: true + previous: + type: string + format: uri + x-nullable: true + results: + type: array + items: + $ref: '#/definitions/User' + tags: + - auth + post: + operationId: auth_users_create + description: '' + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/UserCreate' + responses: + '201': + description: '' + schema: + $ref: '#/definitions/UserCreate' + tags: + - auth + parameters: [] + /auth/users/activation/: + post: + operationId: auth_users_activation + description: '' + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/Activation' + responses: + '201': + description: '' + schema: + $ref: '#/definitions/Activation' + tags: + - auth + parameters: [] + /auth/users/me/: + get: + operationId: auth_users_me_read + description: '' + parameters: + - name: page + in: query + description: A page number within the paginated result set. + required: false + type: integer + responses: + '200': + description: '' + schema: + required: + - count + - results + type: object + properties: + count: + type: integer + next: + type: string + format: uri + x-nullable: true + previous: + type: string + format: uri + x-nullable: true + results: + type: array + items: + $ref: '#/definitions/User' + tags: + - auth + put: + operationId: auth_users_me_update + description: '' + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/User' + responses: + '200': + description: '' + schema: + $ref: '#/definitions/User' + tags: + - auth + patch: + operationId: auth_users_me_partial_update + description: '' + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/User' + responses: + '200': + description: '' + schema: + $ref: '#/definitions/User' + tags: + - auth + delete: + operationId: auth_users_me_delete + description: '' + parameters: [] + responses: + '204': + description: '' + tags: + - auth + parameters: [] + /auth/users/resend_activation/: + post: + operationId: auth_users_resend_activation + description: '' + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/SendEmailReset' + responses: + '201': + description: '' + schema: + $ref: '#/definitions/SendEmailReset' + tags: + - auth + parameters: [] + /auth/users/reset_password/: + post: + operationId: auth_users_reset_password + description: '' + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/SendEmailReset' + responses: + '201': + description: '' + schema: + $ref: '#/definitions/SendEmailReset' + tags: + - auth + parameters: [] + /auth/users/reset_password_confirm/: + post: + operationId: auth_users_reset_password_confirm + description: '' + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/PasswordResetConfirm' + responses: + '201': + description: '' + schema: + $ref: '#/definitions/PasswordResetConfirm' + tags: + - auth + parameters: [] + /auth/users/reset_username/: + post: + operationId: auth_users_reset_username + description: '' + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/SendEmailReset' + responses: + '201': + description: '' + schema: + $ref: '#/definitions/SendEmailReset' + tags: + - auth + parameters: [] + /auth/users/reset_username_confirm/: + post: + operationId: auth_users_reset_username_confirm + description: '' + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/UsernameResetConfirm' + responses: + '201': + description: '' + schema: + $ref: '#/definitions/UsernameResetConfirm' + tags: + - auth + parameters: [] + /auth/users/set_password/: + post: + operationId: auth_users_set_password + description: '' + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/SetPassword' + responses: + '201': + description: '' + schema: + $ref: '#/definitions/SetPassword' + tags: + - auth + parameters: [] + /auth/users/set_username/: + post: + operationId: auth_users_set_username + description: '' + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/SetUsername' + responses: + '201': + description: '' + schema: + $ref: '#/definitions/SetUsername' + tags: + - auth + parameters: [] + /auth/users/{id}/: + get: + operationId: auth_users_read + description: '' + parameters: [] + responses: + '200': + description: '' + schema: + $ref: '#/definitions/User' + tags: + - auth + put: + operationId: auth_users_update + description: '' + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/User' + responses: + '200': + description: '' + schema: + $ref: '#/definitions/User' + tags: + - auth + patch: + operationId: auth_users_partial_update + description: '' + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/User' + responses: + '200': + description: '' + schema: + $ref: '#/definitions/User' + tags: + - auth + delete: + operationId: auth_users_delete + description: '' + parameters: [] + responses: + '204': + description: '' + tags: + - auth + parameters: + - name: id + in: path + description: A unique integer value identifying this user. + required: true + type: integer + /v1/dropoffs/: + get: + operationId: v1_dropoffs_list + description: API endpoint for listing data drops. This is a readonly endpoint. + parameters: + - name: page + in: query + description: A page number within the paginated result set. + required: false + type: integer + responses: + '200': + description: '' + schema: + required: + - count + - results + type: object + properties: + count: + type: integer + next: + type: string + format: uri + x-nullable: true + previous: + type: string + format: uri + x-nullable: true + results: + type: array + items: + $ref: '#/definitions/DataDrop' + tags: + - v1 + parameters: [] + /v1/dropoffs/webhook/: + post: + operationId: v1_dropoffs_webhook + description: Special webhook url for TUSD communication + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/DataDrop' + responses: + '201': + description: '' + schema: + $ref: '#/definitions/DataDrop' + tags: + - v1 + parameters: [] + /v1/dropoffs/{id}/: + get: + operationId: v1_dropoffs_read + description: API endpoint for listing data drops. This is a readonly endpoint. + parameters: [] + responses: + '200': + description: '' + schema: + $ref: '#/definitions/DataDrop' + tags: + - v1 + parameters: + - name: id + in: path + description: A unique integer value identifying this datadrop. + required: true + type: integer + /v1/invitations/: + get: + operationId: v1_invitations_list + description: API endpoint for creating/reading/updating/deleting and sending + invitations. + parameters: + - name: page + in: query + description: A page number within the paginated result set. + required: false + type: integer + responses: + '200': + description: '' + schema: + required: + - count + - results + type: object + properties: + count: + type: integer + next: + type: string + format: uri + x-nullable: true + previous: + type: string + format: uri + x-nullable: true + results: + type: array + items: + $ref: '#/definitions/Invitation' + tags: + - v1 + post: + operationId: v1_invitations_create + description: API endpoint for creating/reading/updating/deleting and sending + invitations. + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/Invitation' + responses: + '201': + description: '' + schema: + $ref: '#/definitions/Invitation' + tags: + - v1 + parameters: [] + /v1/invitations/{id}/: + get: + operationId: v1_invitations_read + description: API endpoint for creating/reading/updating/deleting and sending + invitations. + parameters: [] + responses: + '200': + description: '' + schema: + $ref: '#/definitions/Invitation' + tags: + - v1 + put: + operationId: v1_invitations_update + description: API endpoint for creating/reading/updating/deleting and sending + invitations. + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/Invitation' + responses: + '200': + description: '' + schema: + $ref: '#/definitions/Invitation' + tags: + - v1 + patch: + operationId: v1_invitations_partial_update + description: API endpoint for creating/reading/updating/deleting and sending + invitations. + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/Invitation' + responses: + '200': + description: '' + schema: + $ref: '#/definitions/Invitation' + tags: + - v1 + delete: + operationId: v1_invitations_delete + description: API endpoint for creating/reading/updating/deleting and sending + invitations. + parameters: [] + responses: + '204': + description: '' + tags: + - v1 + parameters: + - name: id + in: path + description: A unique integer value identifying this invitation. + required: true + type: integer + /v1/invitations/{id}/send_email/: + put: + operationId: v1_invitations_send_email + description: Resend the invitation email + parameters: + - name: data + in: body + required: true + schema: + type: object + properties: {} + responses: + '200': + description: '' + schema: + type: object + properties: {} + tags: + - v1 + parameters: + - name: id + in: path + description: A unique integer value identifying this invitation. + required: true + type: integer + /v1/researchers/: + get: + operationId: v1_researchers_list + description: API endpoint for listing researchers. This is a readonly endpoint + ordered by username. + parameters: + - name: page + in: query + description: A page number within the paginated result set. + required: false + type: integer + responses: + '200': + description: '' + schema: + required: + - count + - results + type: object + properties: + count: + type: integer + next: + type: string + format: uri + x-nullable: true + previous: + type: string + format: uri + x-nullable: true + results: + type: array + items: + $ref: '#/definitions/Researcher' + tags: + - v1 + post: + operationId: v1_researchers_create + description: API endpoint for listing researchers. This is a readonly endpoint + ordered by username. + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/Researcher' + responses: + '201': + description: '' + schema: + $ref: '#/definitions/Researcher' + tags: + - v1 + parameters: [] + /v1/researchers/{id}/: + get: + operationId: v1_researchers_read + description: API endpoint for listing researchers. This is a readonly endpoint + ordered by username. + parameters: [] + responses: + '200': + description: '' + schema: + $ref: '#/definitions/Researcher' + tags: + - v1 + put: + operationId: v1_researchers_update + description: API endpoint for listing researchers. This is a readonly endpoint + ordered by username. + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/Researcher' + responses: + '200': + description: '' + schema: + $ref: '#/definitions/Researcher' + tags: + - v1 + patch: + operationId: v1_researchers_partial_update + description: API endpoint for listing researchers. This is a readonly endpoint + ordered by username. + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/Researcher' + responses: + '200': + description: '' + schema: + $ref: '#/definitions/Researcher' + tags: + - v1 + delete: + operationId: v1_researchers_delete + description: API endpoint for listing researchers. This is a readonly endpoint + ordered by username. + parameters: [] + responses: + '204': + description: '' + tags: + - v1 + parameters: + - name: id + in: path + description: A unique integer value identifying this researcher. + required: true + type: integer + /v1/storageengines/: + get: + operationId: v1_storageengines_list + description: API endpoint for creating/reading/updating/deleting storages. + parameters: + - name: page + in: query + description: A page number within the paginated result set. + required: false + type: integer + responses: + '200': + description: '' + schema: + required: + - count + - results + type: object + properties: + count: + type: integer + next: + type: string + format: uri + x-nullable: true + previous: + type: string + format: uri + x-nullable: true + results: + type: array + items: + $ref: '#/definitions/StorageEngine' + tags: + - v1 + post: + operationId: v1_storageengines_create + description: API endpoint for creating/reading/updating/deleting storages. + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/StorageEngine' + responses: + '201': + description: '' + schema: + $ref: '#/definitions/StorageEngine' + tags: + - v1 + parameters: [] + /v1/storageengines/{id}/: + get: + operationId: v1_storageengines_read + description: API endpoint for creating/reading/updating/deleting storages. + parameters: [] + responses: + '200': + description: '' + schema: + $ref: '#/definitions/StorageEngine' + tags: + - v1 + put: + operationId: v1_storageengines_update + description: API endpoint for creating/reading/updating/deleting storages. + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/StorageEngine' + responses: + '200': + description: '' + schema: + $ref: '#/definitions/StorageEngine' + tags: + - v1 + patch: + operationId: v1_storageengines_partial_update + description: API endpoint for creating/reading/updating/deleting storages. + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/StorageEngine' + responses: + '200': + description: '' + schema: + $ref: '#/definitions/StorageEngine' + tags: + - v1 + delete: + operationId: v1_storageengines_delete + description: API endpoint for creating/reading/updating/deleting storages. + parameters: [] + responses: + '204': + description: '' + tags: + - v1 + parameters: + - name: id + in: path + description: A unique integer value identifying this storage engine. + required: true + type: integer + /v1/storagelocations/: + get: + operationId: v1_storagelocations_list + description: API endpoint for creating/reading/updating/deleting storages. + parameters: + - name: page + in: query + description: A page number within the paginated result set. + required: false + type: integer + responses: + '200': + description: '' + schema: + required: + - count + - results + type: object + properties: + count: + type: integer + next: + type: string + format: uri + x-nullable: true + previous: + type: string + format: uri + x-nullable: true + results: + type: array + items: + $ref: '#/definitions/StorageLocation' + tags: + - v1 + post: + operationId: v1_storagelocations_create + description: API endpoint for creating/reading/updating/deleting storages. + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/StorageLocation' + responses: + '201': + description: '' + schema: + $ref: '#/definitions/StorageLocation' + tags: + - v1 + parameters: [] + /v1/storagelocations/{id}/: + get: + operationId: v1_storagelocations_read + description: API endpoint for creating/reading/updating/deleting storages. + parameters: [] + responses: + '200': + description: '' + schema: + $ref: '#/definitions/StorageLocation' + tags: + - v1 + put: + operationId: v1_storagelocations_update + description: API endpoint for creating/reading/updating/deleting storages. + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/StorageLocation' + responses: + '200': + description: '' + schema: + $ref: '#/definitions/StorageLocation' + tags: + - v1 + patch: + operationId: v1_storagelocations_partial_update + description: API endpoint for creating/reading/updating/deleting storages. + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/StorageLocation' + responses: + '200': + description: '' + schema: + $ref: '#/definitions/StorageLocation' + tags: + - v1 + delete: + operationId: v1_storagelocations_delete + description: API endpoint for creating/reading/updating/deleting storages. + parameters: [] + responses: + '204': + description: '' + tags: + - v1 + parameters: + - name: id + in: path + description: A unique integer value identifying this storage location. + required: true + type: integer + /v1/studies/: + get: + operationId: v1_studies_list + description: API endpoint for creating/reading/updating/deleting studies. + parameters: + - name: page + in: query + description: A page number within the paginated result set. + required: false + type: integer + responses: + '200': + description: '' + schema: + required: + - count + - results + type: object + properties: + count: + type: integer + next: + type: string + format: uri + x-nullable: true + previous: + type: string + format: uri + x-nullable: true + results: + type: array + items: + $ref: '#/definitions/Study' + tags: + - v1 + post: + operationId: v1_studies_create + description: API endpoint for creating/reading/updating/deleting studies. + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/Study' + responses: + '201': + description: '' + schema: + $ref: '#/definitions/Study' + tags: + - v1 + parameters: [] + /v1/studies/{id}/: + get: + operationId: v1_studies_read + description: API endpoint for creating/reading/updating/deleting studies. + parameters: [] + responses: + '200': + description: '' + schema: + $ref: '#/definitions/Study' + tags: + - v1 + put: + operationId: v1_studies_update + description: API endpoint for creating/reading/updating/deleting studies. + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/Study' + responses: + '200': + description: '' + schema: + $ref: '#/definitions/Study' + tags: + - v1 + patch: + operationId: v1_studies_partial_update + description: API endpoint for creating/reading/updating/deleting studies. + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/Study' + responses: + '200': + description: '' + schema: + $ref: '#/definitions/Study' + tags: + - v1 + delete: + operationId: v1_studies_delete + description: API endpoint for creating/reading/updating/deleting studies. + parameters: [] + responses: + '204': + description: '' + tags: + - v1 + parameters: + - name: id + in: path + description: A unique integer value identifying this studie. + required: true + type: integer + /v1/studies/{id}/upload/: + get: + operationId: v1_studies_upload + description: API endpoint for creating/reading/updating/deleting studies. + parameters: [] + responses: + '200': + description: '' + schema: + $ref: '#/definitions/Study' + tags: + - v1 + parameters: + - name: id + in: path + description: A unique integer value identifying this studie. + required: true + type: integer + /v1/virtualmachines/: + get: + operationId: v1_virtualmachines_list + description: API endpoint for creating/reading/updating/deleting virtual machines. + parameters: + - name: page + in: query + description: A page number within the paginated result set. + required: false + type: integer + responses: + '200': + description: '' + schema: + required: + - count + - results + type: object + properties: + count: + type: integer + next: + type: string + format: uri + x-nullable: true + previous: + type: string + format: uri + x-nullable: true + results: + type: array + items: + $ref: '#/definitions/VirtualMachine' + tags: + - v1 + post: + operationId: v1_virtualmachines_create + description: API endpoint for creating/reading/updating/deleting virtual machines. + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/VirtualMachine' + responses: + '201': + description: '' + schema: + $ref: '#/definitions/VirtualMachine' + tags: + - v1 + parameters: [] + /v1/virtualmachines/access/: + get: + operationId: v1_virtualmachines_access_list + description: API endpoint for listing virtual machines logins. This is a readonly + endpoint. + parameters: + - name: page + in: query + description: A page number within the paginated result set. + required: false + type: integer + responses: + '200': + description: '' + schema: + required: + - count + - results + type: object + properties: + count: + type: integer + next: + type: string + format: uri + x-nullable: true + previous: + type: string + format: uri + x-nullable: true + results: + type: array + items: + $ref: '#/definitions/VirtualMachineAccess' + tags: + - v1 + parameters: [] + /v1/virtualmachines/access/{id}/: + get: + operationId: v1_virtualmachines_access_read + description: API endpoint for listing virtual machines logins. This is a readonly + endpoint. + parameters: [] + responses: + '200': + description: '' + schema: + $ref: '#/definitions/VirtualMachineAccess' + tags: + - v1 + parameters: + - name: id + in: path + description: A unique integer value identifying this virtual machine login. + required: true + type: integer + /v1/virtualmachines/gpu/: + get: + operationId: v1_virtualmachines_gpu_list + description: API endpoint for listing virtual machines available GPU types. + This is a readonly endpoint. + parameters: + - name: page + in: query + description: A page number within the paginated result set. + required: false + type: integer + responses: + '200': + description: '' + schema: + required: + - count + - results + type: object + properties: + count: + type: integer + next: + type: string + format: uri + x-nullable: true + previous: + type: string + format: uri + x-nullable: true + results: + type: array + items: + $ref: '#/definitions/VirtualMachineGPU' + tags: + - v1 + parameters: [] + /v1/virtualmachines/gpu/{id}/: + get: + operationId: v1_virtualmachines_gpu_read + description: API endpoint for listing virtual machines available GPU types. + This is a readonly endpoint. + parameters: [] + responses: + '200': + description: '' + schema: + $ref: '#/definitions/VirtualMachineGPU' + tags: + - v1 + parameters: + - name: id + in: path + description: A unique integer value identifying this virtual machine GPU. + required: true + type: integer + /v1/virtualmachines/memory/: + get: + operationId: v1_virtualmachines_memory_list + description: API endpoint for listing virtual machines available memory types. + This is a readonly endpoint. + parameters: + - name: page + in: query + description: A page number within the paginated result set. + required: false + type: integer + responses: + '200': + description: '' + schema: + required: + - count + - results + type: object + properties: + count: + type: integer + next: + type: string + format: uri + x-nullable: true + previous: + type: string + format: uri + x-nullable: true + results: + type: array + items: + $ref: '#/definitions/VirtualMachineMemory' + tags: + - v1 + parameters: [] + /v1/virtualmachines/memory/{id}/: + get: + operationId: v1_virtualmachines_memory_read + description: API endpoint for listing virtual machines available memory types. + This is a readonly endpoint. + parameters: [] + responses: + '200': + description: '' + schema: + $ref: '#/definitions/VirtualMachineMemory' + tags: + - v1 + parameters: + - name: id + in: path + description: A unique integer value identifying this virtual machine memory. + required: true + type: integer + /v1/virtualmachines/network/: + get: + operationId: v1_virtualmachines_network_list + description: API endpoint for listing virtual machines available network types. + This is a readonly endpoint. + parameters: + - name: page + in: query + description: A page number within the paginated result set. + required: false + type: integer + responses: + '200': + description: '' + schema: + required: + - count + - results + type: object + properties: + count: + type: integer + next: + type: string + format: uri + x-nullable: true + previous: + type: string + format: uri + x-nullable: true + results: + type: array + items: + $ref: '#/definitions/VirtualMachineNetwork' + tags: + - v1 + parameters: [] + /v1/virtualmachines/network/{id}/: + get: + operationId: v1_virtualmachines_network_read + description: API endpoint for listing virtual machines available network types. + This is a readonly endpoint. + parameters: [] + responses: + '200': + description: '' + schema: + $ref: '#/definitions/VirtualMachineNetwork' + tags: + - v1 + parameters: + - name: id + in: path + description: A unique integer value identifying this virtual machine network. + required: true + type: integer + /v1/virtualmachines/os/: + get: + operationId: v1_virtualmachines_os_list + description: API endpoint for listing virtual machines available operating systems. + This is a readonly endpoint. + parameters: + - name: page + in: query + description: A page number within the paginated result set. + required: false + type: integer + responses: + '200': + description: '' + schema: + required: + - count + - results + type: object + properties: + count: + type: integer + next: + type: string + format: uri + x-nullable: true + previous: + type: string + format: uri + x-nullable: true + results: + type: array + items: + $ref: '#/definitions/VirtualMachineOperatingSystem' + tags: + - v1 + parameters: [] + /v1/virtualmachines/os/{id}/: + get: + operationId: v1_virtualmachines_os_read + description: API endpoint for listing virtual machines available operating systems. + This is a readonly endpoint. + parameters: [] + responses: + '200': + description: '' + schema: + $ref: '#/definitions/VirtualMachineOperatingSystem' + tags: + - v1 + parameters: + - name: id + in: path + description: A unique integer value identifying this virtual machine operating + system. + required: true + type: integer + /v1/virtualmachines/profiles/: + get: + operationId: v1_virtualmachines_profiles_list + description: API endpoint for listing virtual machines available profiles. This + is a readonly endpoint. + parameters: + - name: page + in: query + description: A page number within the paginated result set. + required: false + type: integer + responses: + '200': + description: '' + schema: + required: + - count + - results + type: object + properties: + count: + type: integer + next: + type: string + format: uri + x-nullable: true + previous: + type: string + format: uri + x-nullable: true + results: + type: array + items: + $ref: '#/definitions/VirtualMachineProfile' + tags: + - v1 + parameters: [] + /v1/virtualmachines/profiles/{id}/: + get: + operationId: v1_virtualmachines_profiles_read + description: API endpoint for listing virtual machines available profiles. This + is a readonly endpoint. + parameters: [] + responses: + '200': + description: '' + schema: + $ref: '#/definitions/VirtualMachineProfile' + tags: + - v1 + parameters: + - name: id + in: path + description: A unique integer value identifying this virtual machine profile. + required: true + type: integer + /v1/virtualmachines/storage/: + get: + operationId: v1_virtualmachines_storage_list + description: API endpoint for listing virtual machines available storage types. + This is a readonly endpoint. + parameters: + - name: page + in: query + description: A page number within the paginated result set. + required: false + type: integer + responses: + '200': + description: '' + schema: + required: + - count + - results + type: object + properties: + count: + type: integer + next: + type: string + format: uri + x-nullable: true + previous: + type: string + format: uri + x-nullable: true + results: + type: array + items: + $ref: '#/definitions/VirtualMachineStorage' + tags: + - v1 + parameters: [] + /v1/virtualmachines/storage/{id}/: + get: + operationId: v1_virtualmachines_storage_read + description: API endpoint for listing virtual machines available storage types. + This is a readonly endpoint. + parameters: [] + responses: + '200': + description: '' + schema: + $ref: '#/definitions/VirtualMachineStorage' + tags: + - v1 + parameters: + - name: id + in: path + description: A unique integer value identifying this virtual machine storage. + required: true + type: integer + /v1/virtualmachines/{id}/: + get: + operationId: v1_virtualmachines_read + description: API endpoint for creating/reading/updating/deleting virtual machines. + parameters: [] + responses: + '200': + description: '' + schema: + $ref: '#/definitions/VirtualMachine' + tags: + - v1 + put: + operationId: v1_virtualmachines_update + description: API endpoint for creating/reading/updating/deleting virtual machines. + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/VirtualMachine' + responses: + '200': + description: '' + schema: + $ref: '#/definitions/VirtualMachine' + tags: + - v1 + patch: + operationId: v1_virtualmachines_partial_update + description: API endpoint for creating/reading/updating/deleting virtual machines. + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/VirtualMachine' + responses: + '200': + description: '' + schema: + $ref: '#/definitions/VirtualMachine' + tags: + - v1 + delete: + operationId: v1_virtualmachines_delete + description: API endpoint for creating/reading/updating/deleting virtual machines. + parameters: [] + responses: + '204': + description: '' + tags: + - v1 + parameters: + - name: id + in: path + description: A unique integer value identifying this virtual machine. + required: true + type: integer +definitions: + TokenLogin: + required: + - username + - password + type: object + properties: + username: + title: Username + description: Your username to login + type: string + maxLength: 200 + minLength: 1 + password: + title: Password + description: Your password to login + type: string + maxLength: 200 + minLength: 1 + Token: + type: object + properties: + key: + title: Key + description: The key for this token. This is used for Hawk verification. + type: string + maxLength: 16 + minLength: 1 + secret: + title: Secret + description: The secret for this token. This is used for Hawk signing. + type: string + maxLength: 64 + minLength: 1 + User: + type: object + properties: + email: + title: Email address + type: string + format: email + maxLength: 254 + id: + title: ID + type: integer + readOnly: true + username: + title: Username + description: Required. 150 characters or fewer. Letters, digits and @/./+/-/_ + only. + type: string + readOnly: true + minLength: 1 + UserCreate: + required: + - username + - password + type: object + properties: + email: + title: Email address + type: string + format: email + maxLength: 254 + username: + title: Username + description: Required. 150 characters or fewer. Letters, digits and @/./+/-/_ + only. + type: string + pattern: ^[\w.@+-]+$ + maxLength: 150 + minLength: 1 + id: + title: ID + type: integer + readOnly: true + password: + title: Password + type: string + minLength: 1 + Activation: + required: + - uid + - token + type: object + properties: + uid: + title: Uid + type: string + minLength: 1 + token: + title: Token + type: string + minLength: 1 + SendEmailReset: + required: + - email + type: object + properties: + email: + title: Email + type: string + format: email + minLength: 1 + PasswordResetConfirm: + required: + - uid + - token + - new_password + type: object + properties: + uid: + title: Uid + type: string + minLength: 1 + token: + title: Token + type: string + minLength: 1 + new_password: + title: New password + type: string + minLength: 1 + UsernameResetConfirm: + required: + - new_username + type: object + properties: + new_username: + title: Username + description: Required. 150 characters or fewer. Letters, digits and @/./+/-/_ + only. + type: string + pattern: ^[\w.@+-]+$ + maxLength: 150 + minLength: 1 + SetPassword: + required: + - new_password + - current_password + type: object + properties: + new_password: + title: New password + type: string + minLength: 1 + current_password: + title: Current password + type: string + minLength: 1 + SetUsername: + required: + - current_password + - new_username + type: object + properties: + current_password: + title: Current password + type: string + minLength: 1 + new_username: + title: Username + description: Required. 150 characters or fewer. Letters, digits and @/./+/-/_ + only. + type: string + pattern: ^[\w.@+-]+$ + maxLength: 150 + minLength: 1 + Invitation: + required: + - name + - email + - study + type: object + properties: + url: + title: Url + type: string + format: uri + readOnly: true + id: + title: Id + type: string + readOnly: true + researcher: + title: Researcher + type: string + readOnly: true + created_at: + title: Date created + description: The date and time this model has been created + type: string + format: date-time + readOnly: true + updated_at: + title: Date updated + description: The date and time this model has been updated + type: string + format: date-time + readOnly: true + name: + title: Name + description: The name of the uploader / data provider. + type: string + maxLength: 200 + minLength: 1 + email: + title: Email address + description: The email address of the uploader / data provider. + type: string + format: email + maxLength: 254 + minLength: 1 + upload_code: + title: Upload code + description: A unique upload code. Will be generated when a new study is saved. + type: string + maxLength: 20 + minLength: 1 + mail_sent: + title: Date mailed + description: The date when the last invitation is send. + type: string + format: date-time + x-nullable: true + study: + title: Studie + description: The study where this invitation belongs to. + type: string + format: uri + DataDrop: + required: + - original_filename + - upload_filename + - study + type: object + properties: + url: + title: Url + type: string + format: uri + readOnly: true + id: + title: Id + type: string + readOnly: true + researcher: + title: Researcher + type: string + readOnly: true + uploader: + $ref: '#/definitions/Invitation' + created_at: + title: Date created + description: The date and time this model has been created + type: string + format: date-time + readOnly: true + updated_at: + title: Date updated + description: The date and time this model has been updated + type: string + format: date-time + readOnly: true + original_filename: + title: Original filename + description: The original filename as the uploader has uploaded it. + type: string + maxLength: 1024 + minLength: 1 + upload_filename: + title: Uploaded unique filename + description: A unique filename that is used to store on disc. + type: string + maxLength: 1024 + minLength: 1 + filesize: + title: Filesize + description: Filesize of the upload. + type: integer + readOnly: true + ip: + title: IP address + description: Ip address of the uploader. + type: string + readOnly: true + minLength: 1 + encrypted: + title: Encrypted + description: Is the file encrypted during upload. + type: boolean + readOnly: true + status: + title: Status + description: Datadrop status. + type: string + enum: + - NEW + - UPLOADING + - UPLOADED + - MOVING + - DONE + - ERROR + status_message: + title: Status message + description: Datadrop status message. + type: string + x-nullable: true + study: + title: Studie + description: The study where this data drop belongs to. + type: string + format: uri + Researcher: + required: + - first_name + - last_name + - email_address + type: object + properties: + url: + title: Url + type: string + format: uri + readOnly: true + id: + title: Id + type: string + readOnly: true + researcher: + title: Researcher + type: string + readOnly: true + first_name: + title: First name + type: string + minLength: 1 + last_name: + title: Last name + type: string + minLength: 1 + email_address: + title: Email address + type: string + minLength: 1 + token_key: + title: Token key + type: string + readOnly: true + token_secret: + title: Token secret + type: string + readOnly: true + created_at: + title: Date created + description: The date and time this model has been created + type: string + format: date-time + readOnly: true + updated_at: + title: Date updated + description: The date and time this model has been updated + type: string + format: date-time + readOnly: true + StorageEngine: + required: + - name + - engine + - location + - username + - password + type: object + properties: + url: + title: Url + type: string + format: uri + readOnly: true + id: + title: Id + type: string + readOnly: true + researcher: + title: Researcher + type: string + readOnly: true + created_at: + title: Date created + description: The date and time this model has been created + type: string + format: date-time + readOnly: true + updated_at: + title: Date updated + description: The date and time this model has been updated + type: string + format: date-time + readOnly: true + name: + title: Name + description: Easy to remember name for this storage. + type: string + maxLength: 50 + minLength: 1 + engine: + title: Engine + description: The engine storage type. + type: string + enum: + - WEBDAV + - GITEA + - GITHUB + - IRODS + location: + title: Location + description: Full location/url where to store/get the data. + type: string + maxLength: 1024 + minLength: 1 + username: + title: Username + description: The user name to connect to the storage. + type: string + maxLength: 100 + minLength: 1 + password: + title: Password + description: The password for the user name to connect to the storage. + type: string + maxLength: 100 + minLength: 1 + StorageLocation: + required: + - study + - storageengine + type: object + properties: + url: + title: Url + type: string + format: uri + readOnly: true + id: + title: Id + type: string + readOnly: true + researcher: + title: Researcher + type: string + readOnly: true + encrypted: + title: Encrypted + type: string + readOnly: true + name: + title: Name + type: string + readOnly: true + created_at: + title: Date created + description: The date and time this model has been created + type: string + format: date-time + readOnly: true + updated_at: + title: Date updated + description: The date and time this model has been updated + type: string + format: date-time + readOnly: true + direction: + title: Direction + description: Is it a 'source' or 'destination'. + type: string + enum: + - DESTINATION + - SOURCE + path: + title: Path + description: Folder to store the data. Will be created when does not exists. + type: string + maxLength: 1024 + encryption_password: + title: Encryption password + description: Password for encryption the uploaded data. Leave empty to disable + encryption. + type: string + maxLength: 100 + study: + title: Studie + description: Select the study where you need this storage location + type: string + format: uri + storageengine: + title: Storage engine + description: Select the storage engine where to store the data + type: string + format: uri + Study: + required: + - name + type: object + properties: + url: + title: Url + type: string + format: uri + readOnly: true + id: + title: Id + type: string + readOnly: true + researcher: + title: Researcher + type: string + readOnly: true + api_upload_url: + title: Api upload url + type: string + readOnly: true + web_upload_url: + title: Web upload url + type: string + readOnly: true + total_files: + title: Total files + type: string + readOnly: true + total_file_size: + title: Total file size + type: string + readOnly: true + total_invitations: + title: Total invitations + type: string + readOnly: true + storage: + $ref: '#/definitions/StorageLocation' + invitations: + type: array + items: + $ref: '#/definitions/Invitation' + readOnly: true + files: + type: array + items: + $ref: '#/definitions/DataDrop' + readOnly: true + created_at: + title: Date created + description: The date and time this model has been created + type: string + format: date-time + readOnly: true + updated_at: + title: Date updated + description: The date and time this model has been updated + type: string + format: date-time + readOnly: true + name: + title: Name + description: Name of the research study. + type: string + maxLength: 200 + minLength: 1 + description: + title: Description + description: Enter a short description for this study. + type: string + x-nullable: true + upload_code: + title: Upload code + description: A unique upload code. Will be generated when a new study is saved. + type: string + readOnly: true + minLength: 1 + upload_uuid: + title: Upload url key + description: A unique upload url. Will be generated when a new study is saved. + type: string + format: uuid + readOnly: true + VirtualMachineAccess: + required: + - login_key + - password + - virtual_machine_ip + - virtual_machine + type: object + properties: + url: + title: Url + type: string + format: uri + readOnly: true + id: + title: Id + type: string + readOnly: true + researcher: + title: Researcher + type: string + readOnly: true + user: + title: User + type: string + readOnly: true + created_at: + title: Date created + description: The date and time this model has been created + type: string + format: date-time + readOnly: true + updated_at: + title: Date updated + description: The date and time this model has been updated + type: string + format: date-time + readOnly: true + login_key: + title: Login key + description: The private key to login to the virtual machine. + type: string + maxLength: 2048 + minLength: 1 + password: + title: Password + description: The SSH password to login. + type: string + maxLength: 46 + minLength: 1 + virtual_machine_ip: + title: Login IP + description: The IP address to login to the virtual machine. + type: string + maxLength: 46 + minLength: 1 + virtual_machine: + title: Virtual machine + description: The virtual machine to login to. + type: string + format: uri + VirtualMachine: + required: + - name + - study + - profile + - operating_system + - base_memory_type + - base_storage_type + - networks + type: object + properties: + url: + title: Url + type: string + format: uri + readOnly: true + id: + title: Id + type: string + readOnly: true + researcher: + title: Researcher + type: string + readOnly: true + remote_id: + title: Remote id + type: string + readOnly: true + profile_name: + title: Profile name + type: string + readOnly: true + operating_system_name: + title: Operating system name + type: string + readOnly: true + total_memory: + title: Total memory + type: string + readOnly: true + total_storage: + title: Total storage + type: string + readOnly: true + access: + type: array + items: + $ref: '#/definitions/VirtualMachineAccess' + readOnly: true + created_at: + title: Date created + description: The date and time this model has been created + type: string + format: date-time + readOnly: true + updated_at: + title: Date updated + description: The date and time this model has been updated + type: string + format: date-time + readOnly: true + name: + title: Name + description: Easy to remember name for this virtual machine. + type: string + maxLength: 50 + minLength: 1 + base_memory_amount: + title: Base memory amount + description: Amount of memory. Default is 1 + type: integer + base_storage_amount: + title: Base storage amount + description: Amount of disk storage. Default is 1 + type: integer + additional_gpu_amount: + title: Additional gpu amount + description: Amount of GPUs. Default is 0 + type: integer + additional_memory_amount: + title: Additional memory amount + description: Amount of memory. Default is 0 + type: integer + additional_storage_amount: + title: Additional storage amount + description: Amount of storage. Default is 0 + type: integer + study: + title: Study + description: The study for which this virtual machine is used. + type: string + format: uri + profile: + title: Profile + description: The virtual machine selected profile. + type: string + format: uri + operating_system: + title: Operating system + description: The operating system for this virtual machine. + type: string + format: uri + base_memory_type: + title: Base memory type + description: Basic memory + type: string + format: uri + base_storage_type: + title: Base storage type + description: Basic disk size + type: string + format: uri + additional_gpu_type: + title: Additional gpu type + description: Additional GPU + type: string + format: uri + x-nullable: true + additional_memory_type: + title: Additional memory type + description: Additional memory + type: string + format: uri + x-nullable: true + additional_storage_type: + title: Additional storage type + description: Additional storage + type: string + format: uri + x-nullable: true + networks: + description: Networks connected to this virtual machine. + type: array + items: + description: Networks connected to this virtual machine. + type: string + format: uri + uniqueItems: true + VirtualMachineGPU: + required: + - name + type: object + properties: + url: + title: Url + type: string + format: uri + readOnly: true + id: + title: Id + type: string + readOnly: true + researcher: + title: Researcher + type: string + readOnly: true + created_at: + title: Date created + description: The date and time this model has been created + type: string + format: date-time + readOnly: true + updated_at: + title: Date updated + description: The date and time this model has been updated + type: string + format: date-time + readOnly: true + name: + title: Name + description: Easy to remember name for this virtual machine part. + type: string + maxLength: 50 + minLength: 1 + VirtualMachineMemory: + required: + - name + type: object + properties: + url: + title: Url + type: string + format: uri + readOnly: true + id: + title: Id + type: string + readOnly: true + researcher: + title: Researcher + type: string + readOnly: true + unit_value: + title: Unit value + type: string + readOnly: true + created_at: + title: Date created + description: The date and time this model has been created + type: string + format: date-time + readOnly: true + updated_at: + title: Date updated + description: The date and time this model has been updated + type: string + format: date-time + readOnly: true + name: + title: Name + description: Easy to remember name for this virtual machine part. + type: string + maxLength: 50 + minLength: 1 + VirtualMachineNetwork: + required: + - name + - network_type + type: object + properties: + url: + title: Url + type: string + format: uri + readOnly: true + id: + title: Id + type: string + readOnly: true + researcher: + title: Researcher + type: string + readOnly: true + unit_value: + title: Unit value + type: string + readOnly: true + created_at: + title: Date created + description: The date and time this model has been created + type: string + format: date-time + readOnly: true + updated_at: + title: Date updated + description: The date and time this model has been updated + type: string + format: date-time + readOnly: true + name: + title: Name + description: Easy to remember name for this virtual machine part. + type: string + maxLength: 50 + minLength: 1 + network_type: + title: Network type + description: Network type. Either pirvate or public + type: string + enum: + - PRIVATE + - PUBLIC + VirtualMachineOperatingSystem: + required: + - name + type: object + properties: + url: + title: Url + type: string + format: uri + readOnly: true + id: + title: Id + type: string + readOnly: true + researcher: + title: Researcher + type: string + readOnly: true + created_at: + title: Date created + description: The date and time this model has been created + type: string + format: date-time + readOnly: true + updated_at: + title: Date updated + description: The date and time this model has been updated + type: string + format: date-time + readOnly: true + name: + title: Name + description: Easy to remember name for this virtual machine part. + type: string + maxLength: 50 + minLength: 1 + VirtualMachineStorage: + required: + - name + type: object + properties: + url: + title: Url + type: string + format: uri + readOnly: true + id: + title: Id + type: string + readOnly: true + researcher: + title: Researcher + type: string + readOnly: true + unit_value: + title: Unit value + type: string + readOnly: true + created_at: + title: Date created + description: The date and time this model has been created + type: string + format: date-time + readOnly: true + updated_at: + title: Date updated + description: The date and time this model has been updated + type: string + format: date-time + readOnly: true + name: + title: Name + description: Easy to remember name for this virtual machine part. + type: string + maxLength: 50 + minLength: 1 + VirtualMachineProfile: + required: + - memory_type + - networks + - storage_type + - gpu_type + - name + type: object + properties: + url: + title: Url + type: string + format: uri + readOnly: true + id: + title: Id + type: string + readOnly: true + researcher: + title: Researcher + type: string + readOnly: true + description: + title: Description + type: string + readOnly: true + memory_type: + $ref: '#/definitions/VirtualMachineMemory' + networks: + type: array + items: + $ref: '#/definitions/VirtualMachineNetwork' + storage_type: + $ref: '#/definitions/VirtualMachineStorage' + gpu_type: + $ref: '#/definitions/VirtualMachineGPU' + created_at: + title: Date created + description: The date and time this model has been created + type: string + format: date-time + readOnly: true + updated_at: + title: Date updated + description: The date and time this model has been updated + type: string + format: date-time + readOnly: true + name: + title: Name + description: Easy to remember name for this virtual machine profile. + type: string + maxLength: 50 + minLength: 1 + memory_amount: + title: Memory amount + description: Amount of memory. Default is 1 + type: integer + storage_amount: + title: Storage amount + description: Amount of disk storage. Default is 1 + type: integer + gpu_amount: + title: Gpu amount + description: Amount of GPUs. Default is 0 + type: integer diff --git a/doc/tus.rst b/doc/tus.rst new file mode 100644 index 0000000..316d3e3 --- /dev/null +++ b/doc/tus.rst @@ -0,0 +1,71 @@ +========= +TUS Hooks +========= + +In order to process the uploads so that we know to which study an upload belongs to, we use web hooks. + +More information about TUSD and hooks can be found here: https://github.com/tus/tusd/blob/master/docs/hooks.md + +In the VRE setup, we have chosen for **file-based hooks** (https://github.com/tus/tusd/blob/master/docs/hooks.md#file-hooks) because then we can have more fleixible power in handlings the uploads. And with the file-based hooks we can still make HTTP requests in the hook code. + +A dependency to the web hooks to work is the use of NGINX with LUA. In order to know to which study an upload belongs we, NGINX will add extra meta headers to the TUSD server so that the extra study information is available in for the hook scripts. + +Settings +======== +In order to communicate with the REST API server, we need some settings to be entered. Create a *.env* file in the TUSD main folder. And fill at least the tree values: + * WEBHOOK_URL + * DROPOFF_API_HAWK_KEY + * DROPOFF_API_HAWK_SECRET + +.. literalinclude:: ../tusd/.env.example + :language: bash + +NGINX / LUA +=========== + +Every file that is uploaded goes through NGINX with a virtual url. During the upload we add a UUID of the study to the upload url. When the client is uploading the data, NGINX will read out the UUID from the url, and add that to the meta data for the TUSD service. And NGINX will strip the url for the TUSD daemon in order to accept the upload. +So basically, NGINX is needed in order to be able to add study information to the TUSD uploads. + + +**NGINX config:** + +.. literalinclude:: ../nginx/tus.vhost.conf + :lines: 54-91 + :language: bash + +**LUA code:** + +.. literalinclude:: ../nginx/lua/dropoff_tus.lua + :language: lua + + +Python hooks +============ + +All the communication between the TUSD service and the REST API is secured with HAWK (https://github.com/mozilla/hawk). Therefore the hooks need to know a key and a secret of a user on the REST API that is allowed to make the webhook calls. Make sure you have created such a user. + +pre-create +---------- + +When a new upload is started, the *pre-create* hook will check if the study ID is valid and that the upload can start. This will also anounce the upload to the REST API server and therefore can be found in the overview of data drops. + + +.. literalinclude:: ../tusd/hooks/pre-create.py + + +post-finish +----------- + +When the upload is done, the file needs to be processed and moved to the final destination. This is done with the *post-finish* hook. + +This hook is a bit more complex and does multuple things. + * Check if there is a study ID + * Get the storage information based on the study ID + * Then it encrypts the file if requested + * Upload the file to the destination strorages + * Report back if upload is processed correctly + + +.. literalinclude:: ../tusd/hooks/post-finish.py + :linenos: + diff --git a/docker-compose.yaml b/docker-compose.yaml new file mode 100644 index 0000000..df43a8c --- /dev/null +++ b/docker-compose.yaml @@ -0,0 +1,83 @@ +version: '3' + +services: + # This is the database server that is used by Django + db: + container_name: postgresdb + image: postgres:latest + restart: always + env_file: + - docker/project.env + # ports: + # - 5432:5432 + volumes: + - postgres-data:/var/lib/postgresql/data + + # This is the redis server for HUEY (background processes) + redis: + container_name: redisdb + image: redis + env_file: + - docker/project.env + # We need the 'bash' command in order to get the environment variables + command: bash -c 'redis-server --requirepass "$${REDIS_PASSWORD}" --appendonly yes' + restart: always + volumes: + - redis-data:/data + + # The Django API REST server. This should only be used for API calls + # TODO: Cleanup / split code better between API, scheduler and portal + broker-api: + container_name: broker_api + env_file: + - docker/project.env + # This feels like bad practice.... + build: + context: ./ + dockerfile: ./docker/Dockerfile.api +# command: gunicorn wsgi:application --bind 0.0.0.0:8000 --workers=4 + # ports: + # - 8800:8000 + depends_on: + - db + - redis + volumes: + - staticfiles:/home/app/web/staticfiles + + # The standaard NGINX server in front of the API. This will filter out all non API calls (/api/) + # And will also server as a static file server for the API documentation + broker-api-ngx: + container_name: broker_api_ngx + env_file: + - docker/project.env + build: + context: ./ + dockerfile: ./docker/Dockerfile.nginx + restart: always + ports: + - 1337:80 + depends_on: + - broker-api + volumes: + - staticfiles:/var/www/staticfiles + + # The API background scheduler based on Django and HUEY. Needs a Redis server + # This will process background tasks like creating new VPS machines + broker-scheduler: + container_name: broker_scheduler + env_file: + - docker/project.env + # This feels like bad practice.... + build: + context: ./ + dockerfile: ./docker/Dockerfile.scheduler + command: python manage.py run_huey + depends_on: + - broker-api + - db + - redis + +volumes: + postgres-data: + redis-data: + staticfiles: \ No newline at end of file diff --git a/docker/Dockerfile.api b/docker/Dockerfile.api new file mode 100644 index 0000000..ff23ba5 --- /dev/null +++ b/docker/Dockerfile.api @@ -0,0 +1,43 @@ +# Use the official Python image from the Docker Hub +FROM python:3.8 + +# These two environment variables prevent __pycache__/ files. +ENV PYTHONUNBUFFERED 1 +ENV PYTHONDONTWRITEBYTECODE 1 + +# Update packages and install nc for database up detection +RUN apt-get update && apt-get install -y netcat + +# Create an app user in the app group. +RUN useradd --user-group --create-home --no-log-init --shell /bin/bash app +ENV APP_HOME=/home/app/web + +# Create the staticfiles directory. This avoids permission errors. +RUN mkdir -p ${APP_HOME}/staticfiles + +# Change the workdir. +WORKDIR ${APP_HOME} + +# Copy the code. +COPY ./VRE/. ${APP_HOME} + +# Upgrade pip +RUN pip install --upgrade pip wheel + +# Install the requirements. +RUN pip install -r requirements.txt + +# Startup script when the image is starting up with +COPY ./docker/entrypoint.api.sh ${APP_HOME}/entrypoint.api.sh +RUN chmod +x ${APP_HOME}/entrypoint.api.sh + +# fix file rights +RUN chown -R app:app $APP_HOME + +# Run as user +USER app:app + +# ENTRYPOINT does not work with variables..?? (${APP_HOME}) +ENTRYPOINT ["/home/app/web/entrypoint.api.sh"] +# Strange exit errors.... +#ENTRYPOINT ["sh","-c","${APP_HOME}/entrypoint.api.sh"] \ No newline at end of file diff --git a/docker/Dockerfile.nginx b/docker/Dockerfile.nginx new file mode 100644 index 0000000..c4494f3 --- /dev/null +++ b/docker/Dockerfile.nginx @@ -0,0 +1,8 @@ +FROM nginx:mainline + +RUN mkdir /code + +COPY ./nginx/api.vhost.conf /etc/nginx/conf.d/ + +RUN rm /etc/nginx/conf.d/default.conf +RUN sed -i 's@http://localhost:8000;@http://broker-api:8000;@g' /etc/nginx/conf.d/api.vhost.conf \ No newline at end of file diff --git a/docker/Dockerfile.scheduler b/docker/Dockerfile.scheduler new file mode 100644 index 0000000..7457915 --- /dev/null +++ b/docker/Dockerfile.scheduler @@ -0,0 +1,28 @@ +# Use the official Python image from the Docker Hub +FROM python:3.8 + +# These two environment variables prevent __pycache__/ files. +ENV PYTHONUNBUFFERED 1 +ENV PYTHONDONTWRITEBYTECODE 1 + +# Create an app user in the app group. +RUN useradd --user-group --create-home --no-log-init --shell /bin/bash app +ENV APP_HOME=/home/app/web + +# Change the workdir. +WORKDIR ${APP_HOME} + +# Copy the code. +COPY ./VRE/. ${APP_HOME} + +# Upgrade pip +RUN pip install --upgrade pip wheel + +# Install the requirements. +RUN pip install -r requirements.txt + +# fix file rights +RUN chown -R app:app $APP_HOME + +# Run as user +USER app:app \ No newline at end of file diff --git a/docker/entrypoint.api.sh b/docker/entrypoint.api.sh new file mode 100755 index 0000000..23fe832 --- /dev/null +++ b/docker/entrypoint.api.sh @@ -0,0 +1,31 @@ +#!/bin/sh + +if [ "$DATABASE" = "postgres" ]; then + echo "Waiting for postgres..." + + while ! nc -z $DATABASE_HOST $DATABASE_PORT; do + sleep 0.1 + done + + echo "PostgreSQL started" +fi + +# Make migrations and migrate the database. +echo "Making migrations and migrating the database. " +# Should migrations not already be made..... +python manage.py makemigrations --noinput + +python manage.py migrate --noinput +# This initial data should only run once..... not sure what happens if run multiple times +python manage.py loaddata virtual_machine_initial_data + +# This is not needed for the API?? Needs to figure this out.. +python manage.py collectstatic --noinput + +# Create a super user +python manage.py dockersetup ${DJANGO_ADMIN_NAME} ${DJANGO_ADMIN_PASSWORD} ${DJANGO_ADMIN_EMAIL} +# Create the HAWK REST user +python manage.py dockersetup --key ${DROPOFF_API_HAWK_KEY} --secret ${DROPOFF_API_HAWK_SECRET} ${DROPOFF_API_USER} ${DROPOFF_API_PASSWORD} ${DROPOFF_API_EMAIL} + +gunicorn VRE.wsgi:application --bind 0.0.0.0:8000 --workers=4 +#exec "$@" \ No newline at end of file diff --git a/docker/project.env b/docker/project.env new file mode 100644 index 0000000..42d048f --- /dev/null +++ b/docker/project.env @@ -0,0 +1,123 @@ +# The Postgress database container needs a database and user settings for use with Django +# The database and user will be created when the Postgress container is build +POSTGRES_USER=userone +POSTGRES_PASSWORD=secretpassword +POSTGRES_DB=project_db + +# The Django super user username. This user is created during build of this docker instance +DJANGO_ADMIN_NAME=admin +# The Django super user password. +DJANGO_ADMIN_PASSWORD=password +# The Django super user email for password retrieval. +DJANGO_ADMIN_EMAIL=admin+no-reply@rug.nl + +# The TUSD super user username. This user is created during build of this docker instance +# This used will also get a predifened token key and secret. Look for variable DROPOFF_API_HAWK_KEY and DROPOFF_API_HAWK_SECRET +DROPOFF_API_USER=tusdhook +# The TUSD super user password. +DROPOFF_API_PASSWORD=doemaarwat +# The TUSD super user email for password retrieval. +DROPOFF_API_EMAIL=tusd+no-reply@rug.nl + + +# A uniquely secret key +# https://docs.djangoproject.com/en/dev/ref/settings/#secret-key +SECRET_KEY=@wb=#(f4uc0l%e!5*eo+aoflnxb(@!l9!=c5w=4b+x$=!8&vy%' + +# Disable debug in production +# https://docs.djangoproject.com/en/dev/ref/settings/#debug +DEBUG=True + +# Allowed hosts that Django does server. Use comma separated list Take care when NGINX is proxying in front of Django +# https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts +ALLOWED_HOSTS=127.0.0.1,localhost,0.0.0.0,broker-api,broker-api-ngx + +# All internal IPS for Django. Use comma separated list +# https://docs.djangoproject.com/en/dev/ref/settings/#internal-ips +INTERNAL_IPS=127.0.0.1 + +# Enter the database url connection. Enter all parts even the port numbers: https://github.com/jacobian/dj-database-url +# By default a local sqlite3 database is used. +DATABASE_URL=postgres://userone:secretpassword@postgresdb:5432/project_db + +# The location on disk where the static files will be placed during deployment. Setting is required +# https://docs.djangoproject.com/en/dev/ref/settings/#static-root +STATIC_ROOT=staticfiles + +# Enter the default timezone for the visitors when it is not known. +# https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-TIME_ZONE +TIME_ZONE=Europe/Amsterdam + +# Email settings +# https://docs.djangoproject.com/en/dev/ref/settings/#email-host +# EMAIL_HOST= + +# Email user name +# https://docs.djangoproject.com/en/dev/ref/settings/#email-host-user +# EMAIL_HOST_USER= + +# Email password +# https://docs.djangoproject.com/en/dev/ref/settings/#email-host-password +# EMAIL_HOST_PASSWORD= + +# Email server port number to use. Default is 25 +# https://docs.djangoproject.com/en/dev/ref/settings/#email-port +# EMAIL_PORT= + +# Does the email server supports TLS? +# https://docs.djangoproject.com/en/dev/ref/settings/#email-use-tls +# EMAIL_USE_TLS= + +https://docs.djangoproject.com/en/dev/ref/settings/#default-from-email +DEFAULT_FROM_EMAIL=Do not reply + +# The sender address. This needs to be one of the allowed domains due to SPF checks +# The code will use a reply-to header to make sure that replies goes to the researcher and not this address +EMAIL_FROM_ADDRESS=Do not reply + +# The Redis server is used for background tasks. Enter the variables below. Leave password empty if authentication is not enabled. +# The hostname or IP where the Redis server is running. Default is localhost +REDIS_HOST=redisdb + +# The Redis port number on which the server is running. Default is 6379 +REDIS_PORT=6379 + +# The Redis password when authentication is enabled +REDIS_PASSWORD=redispassword + +# The amount of connections to be made inside a connection pool. Default is 10 +REDIS_CONNECTIONS=10 + +# Enter the full path to the Webbased file uploading without the Study ID part. The Study ID will be added to this url based on the visitor. +DROPOFF_BASE_URL=http://localhost:8080/dropoffs/ + +# Enter the full url to the NGINX service that is in front of the TUSD service. By default that is http://localhost:1090 +DROPOFF_UPLOAD_HOST=http://localhost:1090 + +# Which file extensions are **NOT** allowed to be uploaded. By default the extensions exe,com,bat,lnk,sh are not allowed +DROPOFF_NOT_ALLOWED_EXTENSIONS=exe,com,bat,lnk,sh + + +# TUS Daemon settings +# Change the required variable below to your needs. +# You can here also overrule the default variables in the startup.sh script + +# This is the full url to the REST API server to post updates during uploads. +WEBHOOK_URL=http://api-nginx/api/v1/dropoffs/webhook/ + +# The key for the token that is created on the REST API server for communication with the REST API server. +# This token will be created during building the Docker image +DROPOFF_API_HAWK_KEY=sDl6YmRv + +# The secret value that belongs to the token DROPOFF_API_HAWK_KEY. +# This token will be created during building the Docker image +DROPOFF_API_HAWK_SECRET=ExfcR524851PxVmbNzvR7qkoHwzSSJ1A + + +# Enter the super API user his key and secret. This is used on the portal side for getting study data on the upload page +# We abuse the TUSD user for this +DROPOFF_API_USER_KEY=sDl6YmRv +DROPOFF_API_USER_SECRET=ExfcR524851PxVmbNzvR7qkoHwzSSJ1A + +# What is the full VRE Portal domains. By default http://localhost:1337/api +VRE_BROKER_API=http://api-nginx/api diff --git a/docker/project.example.env b/docker/project.example.env new file mode 100644 index 0000000..90c2d6a --- /dev/null +++ b/docker/project.example.env @@ -0,0 +1,69 @@ +# The Postgress database settings used for Django +POSTGRES_USER=userone +POSTGRES_PASSWORD=secretpassword +POSTGRES_DB=project_db + +DATABASE=postgres +DATABASE_HOST=postgresdb +DATABASE_PORT=5432 + +# Enter the database url connection: https://github.com/jacobian/dj-database-url +DATABASE_URL=postgres://userone:secretpassword@postgresdb:5432/project_db + +# The Redis server settings used for Django background tasks +REDIS_HOST=redisdb +REDIS_PASSWORD=redispassword + +DJANGO_ADMIN_NAME=admin +DJANGO_ADMIN_PASSWORD=password +DJANGO_ADMIN_EMAIL=admin+no-reply@rug.nl + +# A uniquely secret key +SECRET_KEY=@wb=#(f4vc0l(e!5*eo+a@flnxb2@!l9!=c6w=4b+x$=!8&vy%' + +# Disable debug in production +DEBUG=True + +# Allowed hosts that Django does server. Take care when NGINX is proxying infront of Django +ALLOWED_HOSTS=127.0.0.1,localhost,0.0.0.0,api-nginx,portal-nginx + +# The location on disk where the static files will be placed during deployment. Setting is required +STATIC_ROOT=staticfiles + +# Enter the default timezone for the visitors when it is not known. +TIME_ZONE=Europe/Amsterdam + +# Email settings +# Mail host +#EMAIL_HOST= + +# Email user name +#EMAIL_HOST_USER= + +# Email password +#EMAIL_HOST_PASSWORD= + +# Email server port number to use +#EMAIL_PORT= + +# Does the email server supports TLS? +#EMAIL_USE_TLS=yes + +# What is the Dropoff hostname (webinterface) +DROPOFF_BASE_URL=http://localhost:8000/dropoffs/ + +# What is the Dropoff Upload host +DROPOFF_UPLOAD_HOST=http://localhost:1080 + +# What is the full VRE Portal domains +VRE_BROKER_API=http://api-nginx/api + +# TUS Daemon settings +# Change the variable below to your needs. You can also add more variables that are used in the startup.sh script +# TODO: How to generate this data when starting up? As this user does not exists yet in the database... Should be generated everytime we rebuild... +WEBHOOK_URL=http://api-nginx/api/v1/dropoffs/webhook/ +DROPOFF_API_USER=tusdhook +DROPOFF_API_PASSWORD=doemaarwat +DROPOFF_API_EMAIL=tusd+no-reply@rug.nl +DROPOFF_API_HAWK_KEY=sDl6YmRv +DROPOFF_API_HAWK_SECRET=ExfcR524851PxVmbNzvR7qkoHwzSSJ1A diff --git a/nginx/api.vhost.conf b/nginx/api.vhost.conf new file mode 100644 index 0000000..44038d7 --- /dev/null +++ b/nginx/api.vhost.conf @@ -0,0 +1,77 @@ +## +# You should look at the following URL's in order to grasp a solid understanding +# of Nginx configuration files in order to fully unleash the power of Nginx. +# https://www.nginx.com/resources/wiki/start/ +# https://www.nginx.com/resources/wiki/start/topics/tutorials/config_pitfalls/ +# https://wiki.debian.org/Nginx/DirectoryStructure +# +# In most cases, administrators will remove this file from sites-enabled/ and +# leave it as reference inside of sites-available where it will continue to be +# updated by the nginx packaging team. +# +# This file will automatically load configuration files provided by other +# applications, such as Drupal or Wordpress. These applications will be made +# available underneath a path with that package name, such as /drupal8. +# +# Please see /usr/share/doc/nginx-doc/examples/ for more detailed examples. +## + +# Default server configuration +# + +server { + listen 80; + listen [::]:80; + + # SSL configuration + # + # listen 443 ssl default_server; + # listen [::]:443 ssl default_server; + # + # Note: You should disable gzip for SSL traffic. + # See: https://bugs.debian.org/773332 + # + # Read up on ssl_ciphers to ensure a secure configuration. + # See: https://bugs.debian.org/765782 + # + # Self signed certs generated by the ssl-cert package + # Don't use them in a production server! + # + # include snippets/snakeoil.conf; + + root /var/www/html; + + gzip off; + + # Add index.php to the list if you are using PHP + index index.html index.htm index.nginx-debian.html; + + server_name _; + + access_log /var/log/nginx/vre-api.access.log; + error_log /var/log/nginx/vre-api.error.log; + + location /static { + alias /var/www/staticfiles; + } + + # Only allow /api/ and /admin/ calls to this server + location ~* ^/(api|admin)/ { + # First attempt to serve request as file, then + # as directory, then fall back to displaying a 404. + # try_files $uri $uri/ =404; + + proxy_pass http://localhost:8000; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; + + proxy_redirect off; + # Make sure you have hostname and portnumber if service is running on different port then 80 or 443 + proxy_set_header Host $host; # :$server_port; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Host $sent_http_host; + proxy_set_header X-Forwarded-Proto $scheme; + } +} diff --git a/run_scheduler.sh b/run_scheduler.sh new file mode 100755 index 0000000..0e922bd --- /dev/null +++ b/run_scheduler.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +# This will start the huey task scheduling: https://huey.readthedocs.io/en/latest/contrib.html#django +# Make sure this script is started in the same folder as where 'clouds.yaml' is. Else Cloud connections will fail + +source venv/bin/activate +./VRE/manage.py run_huey