Compare commits
12 commits
913180b8fd
...
6f9a06faa1
Author | SHA1 | Date | |
---|---|---|---|
6f9a06faa1 | |||
6e4f3d7be3 | |||
d9a0de1c68 | |||
cayop | 60890ae8f1 | ||
Cayo Puigdefabregas | 2c3225f08b | ||
Cayo Puigdefabregas | 4c07920a6a | ||
Cayo Puigdefabregas | 3a7eb0f97e | ||
Cayo Puigdefabregas | baa4d87a11 | ||
Cayo Puigdefabregas | 9ebd3b18a4 | ||
Cayo Puigdefabregas | baf5c1e924 | ||
Cayo Puigdefabregas | 717b4ab6d5 | ||
Cayo Puigdefabregas | 8cfa83f921 |
34
.env.example
Normal file
34
.env.example
Normal file
|
@ -0,0 +1,34 @@
|
|||
# IDHUB
|
||||
####
|
||||
|
||||
IDHUB_DOMAIN=localhost
|
||||
IDHUB_ALLOWED_HOSTS=${IDHUB_DOMAIN},${IDHUB_DOMAIN}:9001,127.0.0.1,127.0.0.1:9001
|
||||
IDHUB_TIME_ZONE='Europe/Madrid'
|
||||
#IDHUB_SECRET_KEY='uncomment-it-and-fill-this'
|
||||
# enable dev flags when DEVELOPMENT deployment
|
||||
# adapt to your domain in a production/reverse proxy env
|
||||
IDHUB_CSRF_TRUSTED_ORIGINS='https://idhub.example.org'
|
||||
|
||||
# fill this section with your email credentials
|
||||
IDHUB_DEFAULT_FROM_EMAIL="user@example.org"
|
||||
IDHUB_EMAIL_HOST="smtp.example.org"
|
||||
IDHUB_EMAIL_HOST_USER="smtp_user"
|
||||
IDHUB_EMAIL_HOST_PASSWORD="smtp_passwd"
|
||||
IDHUB_EMAIL_PORT=25
|
||||
IDHUB_EMAIL_USE_TLS=True
|
||||
IDHUB_EMAIL_BACKEND="django.core.mail.backends.smtp.EmailBackend"
|
||||
|
||||
# replace with production data
|
||||
# this is used when IDHUB_DEPLOYMENT is not equal to DEVELOPMENT
|
||||
IDHUB_ADMIN_USER='admin'
|
||||
IDHUB_ADMIN_PASSWD='admin'
|
||||
IDHUB_ADMIN_EMAIL='admin@example.org'
|
||||
|
||||
# this option needs to be set to 'n' to be able to make work idhub in docker
|
||||
# by default it is set to 'y' to facilitate idhub dev when outside docker
|
||||
IDHUB_SYNC_ORG_DEV='n'
|
||||
|
||||
# TODO that is only for testing
|
||||
IDHUB_ENABLE_EMAIL=false
|
||||
IDHUB_ENABLE_2FACTOR_AUTH=false
|
||||
IDHUB_ENABLE_DOMAIN_CHECKER=false
|
File diff suppressed because one or more lines are too long
33
docker-compose.yml
Normal file
33
docker-compose.yml
Normal file
|
@ -0,0 +1,33 @@
|
|||
services:
|
||||
|
||||
idhub:
|
||||
init: true
|
||||
build:
|
||||
dockerfile: docker/idhub.Dockerfile
|
||||
environment:
|
||||
- DOMAIN=${IDHUB_DOMAIN:-localhost}
|
||||
- ALLOWED_HOSTS=${IDHUB_ALLOWED_HOSTS:-$IDHUB_DOMAIN}
|
||||
- DEBUG=true
|
||||
- INITIAL_ADMIN_EMAIL=${IDHUB_ADMIN_EMAIL}
|
||||
- INITIAL_ADMIN_PASSWORD=${IDHUB_ADMIN_PASSWD}
|
||||
- CREATE_TEST_USERS=true
|
||||
- ENABLE_EMAIL=${IDHUB_ENABLE_EMAIL:-true}
|
||||
- ENABLE_2FACTOR_AUTH=${IDHUB_ENABLE_2FACTOR_AUTH:-true}
|
||||
- ENABLE_DOMAIN_CHECKER=${IDHUB_ENABLE_DOMAIN_CHECKER:-true}
|
||||
- SECRET_KEY=${IDHUB_SECRET_KEY:-publicsecretisnotsecureVtmKBfxpVV47PpBCF2Nzz2H6qnbd}
|
||||
- STATIC_ROOT=${IDHUB_STATIC_ROOT:-/static/}
|
||||
- MEDIA_ROOT=${IDHUB_MEDIA_ROOT:-/media/}
|
||||
- PORT=${IDHUB_PORT:-9001}
|
||||
- DEFAULT_FROM_EMAIL=${IDHUB_DEFAULT_FROM_EMAIL}
|
||||
- EMAIL_HOST=${IDHUB_EMAIL_HOST}
|
||||
- EMAIL_HOST_USER=${IDHUB_EMAIL_HOST_USER}
|
||||
- EMAIL_HOST_PASSWORD=${IDHUB_EMAIL_HOST_PASSWORD}
|
||||
- EMAIL_PORT=${IDHUB_EMAIL_PORT}
|
||||
- EMAIL_USE_TLS=${IDHUB_EMAIL_USE_TLS}
|
||||
- EMAIL_BACKEND=${IDHUB_EMAIL_BACKEND}
|
||||
- SUPPORTED_CREDENTIALS=['CourseCredential', 'EOperatorClaim', 'FederationMembership', 'FinancialVulnerabilityCredential', 'MembershipCard']
|
||||
- SYNC_ORG_DEV=${IDHUB_SYNC_ORG_DEV}
|
||||
ports:
|
||||
- 9001:9001
|
||||
volumes:
|
||||
- .:/opt/idhub
|
35
docker-reset.sh
Executable file
35
docker-reset.sh
Executable file
|
@ -0,0 +1,35 @@
|
|||
#!/bin/sh
|
||||
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
set -e
|
||||
set -u
|
||||
# DEBUG
|
||||
set -x
|
||||
|
||||
main() {
|
||||
cd "$(dirname "${0}")"
|
||||
|
||||
rm -fv ./db.sqlite3
|
||||
if [ ! -f .env ]; then
|
||||
cp -v .env.example .env
|
||||
echo "WARNING: .env was not there, .env.example was copied, this only happens once"
|
||||
fi
|
||||
|
||||
docker compose down -v
|
||||
docker compose build
|
||||
docker compose up ${detach_arg:-}
|
||||
|
||||
# TODO docker registry
|
||||
#project=dkr-dsg.ac.upc.edu/trustchain-oc1-orchestral
|
||||
#idhub_image=${project}/idhub:${idhub_tag}
|
||||
#idhub_branch=$(git -C IdHub branch --show-current)
|
||||
# docker build -f docker/idhub.Dockerfile -t ${idhub_image} -t ${project}/idhub:${idhub_branch}__latest .
|
||||
#docker tag hello-world:latest farga.pangea.org/pedro/test/hello-world
|
||||
#docker push farga.pangea.org/pedro/test/hello-world:latest
|
||||
}
|
||||
|
||||
main "${@}"
|
||||
|
||||
# written in emacs
|
||||
# -*- mode: shell-scrip; -*-t
|
34
docker/idhub.Dockerfile
Normal file
34
docker/idhub.Dockerfile
Normal file
|
@ -0,0 +1,34 @@
|
|||
FROM python:3.11.7-slim-bookworm
|
||||
|
||||
# last line is dependencies for weasyprint (for generating pdfs in lafede pilot) https://doc.courtbouillon.org/weasyprint/stable/first_steps.html#debian-11
|
||||
RUN apt update && \
|
||||
apt-get install -y \
|
||||
git \
|
||||
sqlite3 \
|
||||
jq \
|
||||
libpango-1.0-0 libpangoft2-1.0-0 \
|
||||
&& pip install cffi brotli \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /opt/idhub
|
||||
|
||||
# reduce size (python specifics) -> src https://stackoverflow.com/questions/74616667/removing-pip-cache-after-installing-dependencies-in-docker-image
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
# here document in dockerfile src https://stackoverflow.com/questions/40359282/launch-a-cat-command-unix-into-dockerfile
|
||||
RUN cat > /etc/pip.conf <<END
|
||||
[install]
|
||||
compile = no
|
||||
|
||||
[global]
|
||||
no-cache-dir = True
|
||||
END
|
||||
|
||||
RUN pip install --upgrade pip
|
||||
|
||||
# not needed anymore?
|
||||
#COPY ssikit_trustchain/didkit-0.3.2-cp311-cp311-manylinux_2_34_x86_64.whl /opt/idhub
|
||||
COPY ./requirements.txt /opt/idhub
|
||||
RUN pip install -r requirements.txt
|
||||
|
||||
COPY docker/idhub.entrypoint.sh /
|
||||
ENTRYPOINT sh /idhub.entrypoint.sh
|
139
docker/idhub.entrypoint.sh
Executable file
139
docker/idhub.entrypoint.sh
Executable file
|
@ -0,0 +1,139 @@
|
|||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
set -u
|
||||
set -x
|
||||
|
||||
|
||||
usage() {
|
||||
cat <<END
|
||||
ERROR: you need to map your idhub git repo volume to docker, suggested volume mapping is:
|
||||
|
||||
volumes:
|
||||
- ./IdHub:/opt/idhub
|
||||
END
|
||||
exit 1
|
||||
}
|
||||
|
||||
inject_env_vars() {
|
||||
# related https://www.kenmuse.com/blog/avoiding-dubious-ownership-in-dev-containers/
|
||||
git config --global --add safe.directory "${idhub_dir}"
|
||||
export COMMIT="commit: $(git log --pretty=format:'%h' -n 1)"
|
||||
|
||||
cat > status_data <<END
|
||||
DOMAIN=${DOMAIN}
|
||||
END
|
||||
}
|
||||
|
||||
deployment_strategy() {
|
||||
# detect if existing deployment (TODO only works with sqlite)
|
||||
if [ -f "${idhub_dir}/db.sqlite3" ]; then
|
||||
echo "INFO: detected EXISTING deployment"
|
||||
./manage.py migrate
|
||||
|
||||
# warn admin that it should re-enter password to keep the service working
|
||||
./manage.py send_mail_admins
|
||||
else
|
||||
# this file helps all docker containers to guess number of hosts involved
|
||||
# right now is only needed by new deployment for oidc
|
||||
if [ -d "/sharedsecret" ]; then
|
||||
touch /sharedsecret/${DOMAIN}
|
||||
fi
|
||||
|
||||
# move the migrate thing in docker entrypoint
|
||||
# inspired by https://medium.com/analytics-vidhya/django-with-docker-and-docker-compose-python-part-2-8415976470cc
|
||||
echo "INFO detected NEW deployment"
|
||||
./manage.py migrate
|
||||
|
||||
printf "This is DEVELOPMENT/PILOTS_EARLY DEPLOYMENT: including demo hardcoded data\n creating initial Datas\n" >&2
|
||||
./manage.py initial_datas
|
||||
|
||||
if [ "${OIDC_ORGS:-}" ]; then
|
||||
config_oidc4vp
|
||||
else
|
||||
echo "Note: skipping oidc4vp config"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
_set() {
|
||||
key="${1}"
|
||||
value="${2}"
|
||||
domain="${3}"
|
||||
sqlite3 db.sqlite3 "update oidc4vp_organization set ${key}='${value}' where domain='${domain}';"
|
||||
}
|
||||
|
||||
_get() {
|
||||
sqlite3 -json db.sqlite3 "select * from oidc4vp_organization;"
|
||||
}
|
||||
|
||||
_lines () {
|
||||
local myfile="${1}"
|
||||
cat "${myfile}" | wc -l
|
||||
}
|
||||
|
||||
config_oidc4vp() {
|
||||
# populate your config
|
||||
data="$(_get)"
|
||||
echo "${data}" | jq --arg domain "${DOMAIN}" '{ ($domain): .}' > /sharedsecret/${DOMAIN}
|
||||
|
||||
while true; do
|
||||
echo wait the other idhubs to write, this is the only oportunity to sync with other idhubs in the docker compose
|
||||
## break when no empty files left
|
||||
if ! wc -l /sharedsecret/* | awk '{print $1;}' | grep -qE '^0$'; then
|
||||
break
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
# get other configs
|
||||
for host in /sharedsecret/*; do
|
||||
# we are flexible on querying for DOMAIN: the first one based on regex
|
||||
target_domain="$(cat "${host}" | jq -r 'keys[0]')"
|
||||
if [ "${target_domain}" != "${DOMAIN}" ]; then
|
||||
filtered_data="$(cat "${host}" | jq --arg domain "${DOMAIN}" 'first(.[][] | select(.domain | test ($domain)))')"
|
||||
client_id="$(echo "${filtered_data}" | jq -r '.client_id')"
|
||||
client_secret="$(echo "${filtered_data}" | jq -r '.client_secret')"
|
||||
|
||||
_set my_client_id ${client_id} ${target_domain}
|
||||
_set my_client_secret ${client_secret} ${target_domain}
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
runserver() {
|
||||
PORT="${PORT:-8000}"
|
||||
if [ ! "${DEBUG:-}" = "true" ]; then
|
||||
./manage.py collectstatic
|
||||
if [ "${EXPERIMENTAL:-}" = "true" ]; then
|
||||
# reloading on source code changing is a debugging future, maybe better then use debug
|
||||
# src https://stackoverflow.com/questions/12773763/gunicorn-autoreload-on-source-change/24893069#24893069
|
||||
# gunicorn with 1 worker, with more than 1 worker this is not expected to work
|
||||
gunicorn --access-logfile - --error-logfile - -b :${PORT} trustchain_idhub.wsgi:application
|
||||
else
|
||||
./manage.py runserver 0.0.0.0:${PORT}
|
||||
fi
|
||||
else
|
||||
./manage.py runserver 0.0.0.0:${PORT}
|
||||
fi
|
||||
}
|
||||
|
||||
check_app_is_there() {
|
||||
if [ ! -f "./manage.py" ]; then
|
||||
usage
|
||||
fi
|
||||
}
|
||||
|
||||
main() {
|
||||
idhub_dir='/opt/idhub'
|
||||
cd "${idhub_dir}"
|
||||
|
||||
check_app_is_there
|
||||
|
||||
deployment_strategy
|
||||
|
||||
inject_env_vars
|
||||
|
||||
runserver
|
||||
}
|
||||
|
||||
main "${@}"
|
|
@ -105,6 +105,7 @@ class Command(BaseCommand):
|
|||
assert dname
|
||||
assert title
|
||||
except Exception:
|
||||
ldata = {}
|
||||
title = ''
|
||||
_name = ''
|
||||
|
||||
|
|
|
@ -33,10 +33,11 @@ class UserView(LoginRequiredMixin):
|
|||
]
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
err_txt = "User domain is {} which does not match server domain {}".format(
|
||||
request.get_host(), settings.DOMAIN
|
||||
)
|
||||
assert request.get_host() == settings.DOMAIN, err_txt
|
||||
if settings.ENABLE_DOMAIN_CHECKER:
|
||||
err_txt = "User domain is {} which does not match server domain {}".format(
|
||||
request.get_host(), settings.DOMAIN
|
||||
)
|
||||
assert request.get_host() == settings.DOMAIN, err_txt
|
||||
self.admin_validated = cache.get("KEY_DIDS")
|
||||
response = super().get(request, *args, **kwargs)
|
||||
|
||||
|
@ -55,10 +56,11 @@ class UserView(LoginRequiredMixin):
|
|||
return url or response
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
err_txt = "User domain is {} which does not match server domain {}".format(
|
||||
request.get_host(), settings.DOMAIN
|
||||
)
|
||||
assert request.get_host() == settings.DOMAIN, err_txt
|
||||
if settings.ENABLE_DOMAIN_CHECKER:
|
||||
err_txt = "User domain is {} which does not match server domain {}".format(
|
||||
request.get_host(), settings.DOMAIN
|
||||
)
|
||||
assert request.get_host() == settings.DOMAIN, err_txt
|
||||
self.admin_validated = cache.get("KEY_DIDS")
|
||||
response = super().post(request, *args, **kwargs)
|
||||
url = self.check_gdpr()
|
||||
|
|
|
@ -680,7 +680,7 @@ class VerificableCredential(models.Model):
|
|||
credential_subject = ujson.loads(data).get("credentialSubject", {})
|
||||
return credential_subject.items()
|
||||
|
||||
def issue(self, did, domain):
|
||||
def issue(self, did, domain, save=True):
|
||||
if self.status == self.Status.ISSUED:
|
||||
return
|
||||
|
||||
|
@ -700,6 +700,9 @@ class VerificableCredential(models.Model):
|
|||
if not valid:
|
||||
return
|
||||
|
||||
if not save:
|
||||
return vc_str
|
||||
|
||||
self.data = self.user.encrypt_data(vc_str)
|
||||
|
||||
self.status = self.Status.ISSUED
|
||||
|
|
61
idhub/templates/credentials/device-snapshot-v1.json
Normal file
61
idhub/templates/credentials/device-snapshot-v1.json
Normal file
|
@ -0,0 +1,61 @@
|
|||
{
|
||||
"@context": [
|
||||
"https://www.w3.org/2018/credentials/v1"
|
||||
],
|
||||
"type": ["VerifiableCredential", "DeviceSnapshot"],
|
||||
"issuer": "{{ issuer_did }}",
|
||||
"issuanceDate": "{{ issuance_date }}",
|
||||
"credentialSubject": {
|
||||
"operatorId": "{{ operator_id }}",
|
||||
"uuid": "{{ uuid }}",
|
||||
"type": "hardwareList",
|
||||
"software": "workbench-script",
|
||||
"deviceId": [
|
||||
{
|
||||
"name": "Manufacturer",
|
||||
"value": "{{ manufacturer }}"
|
||||
},
|
||||
{
|
||||
"name": "Model",
|
||||
"value": "{{ model }}"
|
||||
},
|
||||
{
|
||||
"name": "Serial",
|
||||
"value": "{{ serial_number }}"
|
||||
},
|
||||
{
|
||||
"name": "SKU",
|
||||
"value": "{{ sku }}"
|
||||
},
|
||||
{
|
||||
"name": "EthernetMacAddress",
|
||||
"value": "{{ mac }}"
|
||||
}
|
||||
],
|
||||
"timestamp": "{{ issuance_date }}"
|
||||
},
|
||||
"evidence": [
|
||||
{
|
||||
"type": "HardwareList",
|
||||
"operation": "dmidecode",
|
||||
"output": "{{ dmidecode }}",
|
||||
"timestamp": "{{ issuance_date }}"
|
||||
},
|
||||
{
|
||||
"type": "HardwareList",
|
||||
"operation": "smartctl",
|
||||
"output": {{ smartctl|default:'""'|safe }},
|
||||
"timestamp": "{{ issuance_date }}"
|
||||
},
|
||||
{
|
||||
"type": "HardwareList",
|
||||
"operation": "inxi",
|
||||
"output": {{ inxi|default:'""'|safe }},
|
||||
"timestamp": "{{ issuance_date }}"
|
||||
}
|
||||
],
|
||||
"credentialSchema": {
|
||||
"id": "https://idhub.pangea.org/vc_schemas/device-snapshot-v1.json",
|
||||
"type": "FullJsonSchemaValidator2021"
|
||||
}
|
||||
}
|
|
@ -6,6 +6,7 @@ black==23.9.1
|
|||
python-decouple==3.8
|
||||
jsonschema[format]==4.19.1
|
||||
pandas==2.1.1
|
||||
numpy>=1.21,<2.0
|
||||
xlrd==2.0.1
|
||||
odfpy==1.4.1
|
||||
requests==2.31.0
|
||||
|
|
122
schemas/device-snapshot-v1.json
Normal file
122
schemas/device-snapshot-v1.json
Normal file
|
@ -0,0 +1,122 @@
|
|||
{
|
||||
"$id": "https://idhub.pangea.org/vc_schemas/device-snapshot-v1.json",
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"title": "DeviceSnapshotV1",
|
||||
"description": "Snapshot create by workbench-script, software for discover hardware in one device.",
|
||||
"name": [
|
||||
{
|
||||
"value": "Snapshot",
|
||||
"lang": "en"
|
||||
}
|
||||
],
|
||||
"type": "object",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "https://www.w3.org/2018/credentials/v1"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"credentialSubject": {
|
||||
"description": "Define the properties of a digital device snapshot",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"operatorId": {
|
||||
"description": "Indentifier related to the product operator, defined a hash of an Id token (10 chars enough)",
|
||||
"type": "string",
|
||||
"minLength": 10
|
||||
},
|
||||
"uuid": {
|
||||
"description": "Unique identifier of the snapshot.",
|
||||
"type": "string",
|
||||
"minLength": 36
|
||||
},
|
||||
"type": {
|
||||
"description": "Defines a snapshot type, e.g., hardwareList, dataDeletion (need to adjust the enum values).",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"hardwareList", "dataDeletion"
|
||||
],
|
||||
"minLength": 1
|
||||
},
|
||||
"software": {
|
||||
"description": "Name of the snapshot software used.",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"workbench-script"
|
||||
],
|
||||
"minLength": 1
|
||||
},
|
||||
"deviceId": {
|
||||
"description": "List of identification properties for the device, each with a name and value.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"description": "The type of device identifier information, e.g., ManufacturerSerial, EthernetMacAddress.",
|
||||
"type": "string"
|
||||
},
|
||||
"value": {
|
||||
"description": "The value of the device identifier information.",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": ["name", "value"]
|
||||
}
|
||||
},
|
||||
"timestamp": {
|
||||
"description": "Date and time of this snapshot.",
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"uuid",
|
||||
"type",
|
||||
"timestamp"
|
||||
]
|
||||
},
|
||||
"evidence": {
|
||||
"description": "Contains supporting evidence about the process which resulted in the issuance of this credential as a result of system operations.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": {
|
||||
"description": "Type of evidence, linked to credentialSubject.type.",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"HardwareList",
|
||||
"DataDeletion"
|
||||
]
|
||||
},
|
||||
"operation": {
|
||||
"description": "Specifies the command executed for evidence generation.",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"inxi",
|
||||
"dmidecode",
|
||||
"smartctl"
|
||||
]
|
||||
},
|
||||
"output": {
|
||||
"description": "Output from the executed command.",
|
||||
"type": "string"
|
||||
},
|
||||
"timestamp": {
|
||||
"description": "Timestamp of the evidence generation if needed.",
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type",
|
||||
"operation",
|
||||
"output"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
|
@ -31,6 +31,7 @@ SECRET_KEY = config('SECRET_KEY')
|
|||
|
||||
# SECURITY WARNING: don't run with debug turned on in production!
|
||||
DEBUG = config('DEBUG', default=False, cast=bool)
|
||||
DEVELOPMENT = config('DEVELOPMENT', default=False, cast=bool)
|
||||
|
||||
DOMAIN = config("DOMAIN")
|
||||
assert DOMAIN not in [None, ''], "DOMAIN var is MANDATORY"
|
||||
|
@ -240,5 +241,6 @@ OIDC_ORGS = config('OIDC_ORGS', '')
|
|||
ENABLE_EMAIL = config('ENABLE_EMAIL', default=True, cast=bool)
|
||||
CREATE_TEST_USERS = config('CREATE_TEST_USERS', default=False, cast=bool)
|
||||
ENABLE_2FACTOR_AUTH = config('ENABLE_2FACTOR_AUTH', default=True, cast=bool)
|
||||
ENABLE_DOMAIN_CHECKER = config('ENABLE_DOMAIN_CHECKER', default=True, cast=bool)
|
||||
COMMIT = config('COMMIT', default='')
|
||||
|
||||
|
|
|
@ -7,6 +7,7 @@ app_name = 'webhook'
|
|||
|
||||
urlpatterns = [
|
||||
path('verify/', views.webhook_verify, name='verify'),
|
||||
path('sign/', views.webhook_issue, name='sign'),
|
||||
path('tokens/', views.WebHookTokenView.as_view(), name='tokens'),
|
||||
path('tokens/new', views.TokenNewView.as_view(), name='new_token'),
|
||||
path('tokens/<int:pk>/del', views.TokenDeleteView.as_view(), name='delete_token'),
|
||||
|
|
|
@ -11,6 +11,8 @@ from pyvckit.verify import verify_vp, verify_vc
|
|||
from uuid import uuid4
|
||||
|
||||
from idhub.mixins import AdminView
|
||||
from idhub_auth.models import User
|
||||
from idhub.models import DID, Schemas, VerificableCredential
|
||||
from webhook.models import Token
|
||||
from webhook.tables import TokensTable
|
||||
|
||||
|
@ -22,7 +24,7 @@ def webhook_verify(request):
|
|||
if not auth_header or not auth_header.startswith('Bearer '):
|
||||
return JsonResponse({'error': 'Invalid or missing token'}, status=401)
|
||||
|
||||
token = auth_header.split(' ')[1]
|
||||
token = auth_header.split(' ')[1].strip("'").strip('"')
|
||||
tk = Token.objects.filter(token=token).first()
|
||||
if not tk:
|
||||
return JsonResponse({'error': 'Invalid or missing token'}, status=401)
|
||||
|
@ -51,6 +53,60 @@ def webhook_verify(request):
|
|||
return JsonResponse({'error': 'Invalid request method'}, status=400)
|
||||
|
||||
|
||||
@csrf_exempt
|
||||
def webhook_issue(request):
|
||||
if request.method == 'POST':
|
||||
auth_header = request.headers.get('Authorization')
|
||||
if not auth_header or not auth_header.startswith('Bearer '):
|
||||
return JsonResponse({'error': 'Invalid or missing token'}, status=401)
|
||||
|
||||
token = auth_header.split(' ')[1].strip("'").strip('"')
|
||||
tk = Token.objects.filter(token=token).first()
|
||||
if not tk:
|
||||
return JsonResponse({'error': 'Invalid or missing token'}, status=401)
|
||||
|
||||
try:
|
||||
data = json.loads(request.body)
|
||||
except json.JSONDecodeError:
|
||||
return JsonResponse({'error': 'Invalid JSON'}, status=400)
|
||||
|
||||
typ = data.get("type")
|
||||
vc = data.get("data")
|
||||
save = data.get("save", True)
|
||||
try:
|
||||
vc = json.dumps(vc)
|
||||
except Exception:
|
||||
return JsonResponse({'error': 'Invalid JSON'}, status=400)
|
||||
|
||||
if not typ or not vc:
|
||||
return JsonResponse({'error': 'Invalid JSON'}, status=400)
|
||||
|
||||
did = DID.objects.filter(user__isnull=True).first()
|
||||
if not did:
|
||||
return JsonResponse({'error': 'Invalid DID'}, status=400)
|
||||
|
||||
schema = Schemas.objects.filter(type=typ).first()
|
||||
if not schema:
|
||||
return JsonResponse({'error': 'Invalid credential'}, status=400)
|
||||
|
||||
user = User.objects.filter(is_admin=True).first()
|
||||
cred = VerificableCredential(
|
||||
csv_data=vc,
|
||||
issuer_did=did,
|
||||
schema=schema,
|
||||
user=user
|
||||
)
|
||||
|
||||
cred.set_type()
|
||||
vc_signed = cred.issue(did, domain=request.get_host(), save=save)
|
||||
|
||||
return JsonResponse({'status': 'success', "data": vc_signed}, status=200)
|
||||
|
||||
return JsonResponse({'status': 'fail'}, status=200)
|
||||
|
||||
return JsonResponse({'error': 'Invalid request method'}, status=400)
|
||||
|
||||
|
||||
class WebHookTokenView(AdminView, SingleTableView):
|
||||
template_name = "token.html"
|
||||
title = _("Credential management")
|
||||
|
@ -93,4 +149,3 @@ class TokenNewView(AdminView, View):
|
|||
Token.objects.create(token=uuid4())
|
||||
|
||||
return redirect('webhook:tokens')
|
||||
|
||||
|
|
Loading…
Reference in a new issue