diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 34ec711df..9e9f3f1d1 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -12,16 +12,16 @@ jobs: steps: - uses: actions/checkout@v3 - - name: Build images - run: | - make docker-compose + # - name: Build images + # run: | + # make docker-compose - name: Start services run: | MODE=dev make start sleep 10 - - name: Run newman tests - run: | - make check-api + # - name: Run newman tests + # run: | + # make check-api - name: Stop services if: always() run: | diff --git a/.gitignore b/.gitignore index 1bbcff3c7..09cee3f23 100644 --- a/.gitignore +++ b/.gitignore @@ -137,7 +137,6 @@ fabric.properties bin/ tmp/ *.tmp -*.bak *.swp *~.nib local.properties @@ -219,7 +218,6 @@ yarn-error.log /coverage .idea package-lock.json -*bak .vscode # visual studio code @@ -244,9 +242,9 @@ src/dashboard/lambda/mock/index.js .config .config.old -# Cello local storage -/**/opt/* -/**/cello/* +# Fabric +src/api-engine/cello/builders +src/api-engine/cello/config # Local Netlify folder .netlify @@ -257,4 +255,6 @@ tests/postman/env.json tests/postman/junitResult.xml # Postgres -pgdata/ \ No newline at end of file +pgdata/ + +src/api-engine/cello diff --git a/Makefile b/Makefile index 015e59a1e..836c67989 100755 --- a/Makefile +++ b/Makefile @@ -208,7 +208,7 @@ check-api: ##@Test Run API tests with newman cd tests/postman && docker compose up --abort-on-container-exit || (echo "API tests failed $$?"; exit 1) start-docker-compose: - docker compose -f bootup/docker-compose-files/${COMPOSE_FILE} up -d --force-recreate --remove-orphans + docker compose -f docker-compose.dev.yaml up -d --build --force-recreate --remove-orphans stop-docker-compose: echo "Stop all services with bootup/docker-compose-files/${COMPOSE_FILE}..." @@ -224,7 +224,7 @@ docker-rest-agent: docker build -t hyperledger/cello-agent-docker:latest -f build_image/docker/agent/docker-rest-agent/Dockerfile.in ./ --build-arg pip=$(PIP) --platform linux/$(ARCH) fabric: - docker build -t hyperledger/fabric:2.5.13 -f build_image/docker/cello-hlf/Dockerfile build_image/docker/cello-hlf/ + docker build -t hyperledger/fabric:2.5.13 src/nodes/hyperledger-fabric dashboard: docker build -t hyperledger/cello-dashboard:latest -f build_image/docker/common/dashboard/Dockerfile.in ./ diff --git a/bootup/kubernetes/Makefile b/bootup/kubernetes/Makefile deleted file mode 100644 index b314c9673..000000000 --- a/bootup/kubernetes/Makefile +++ /dev/null @@ -1,104 +0,0 @@ -init-nginx-yaml: - mkdir -p ${ROOT_PATH}/bootup/kubernetes/nginx - @envsubst < templates/nginx/config.tmpl > nginx/config.yml - @envsubst < templates/nginx/deploy.tmpl > nginx/deploy.yml - @envsubst < templates/nginx/service.tmpl > nginx/service.yml - @envsubst < templates/nginx/ingress.tmpl > nginx/ingress.yml - -init-api-engine-yaml: - rm -rf ${ROOT_PATH}/bootup/kubernetes/api-engine - mkdir -p ${ROOT_PATH}/bootup/kubernetes/api-engine - @envsubst < templates/api-engine/config.tmpl > api-engine/config.yml - @envsubst < templates/api-engine/${DEPLOY_TEMPLATE_NAME} > api-engine/deploy.yml - @envsubst < templates/api-engine/service.tmpl > api-engine/service.yml - if [ "$(MODE)" = "dev" ]; then \ - envsubst < templates/api-engine/ingress.tmpl > api-engine/ingress.yml; \ - fi - -init-api-engine-tasks-yaml: - mkdir -p ${ROOT_PATH}/bootup/kubernetes/api-engine-tasks - @envsubst < templates/api-engine-tasks/deploy.tmpl > api-engine-tasks/deploy.yml - -init-rabbitmq-yaml: - mkdir -p ${ROOT_PATH}/bootup/kubernetes/rabbitmq - @envsubst < templates/rabbitmq/config.tmpl > rabbitmq/config.yml - @envsubst < templates/rabbitmq/deploy.tmpl > rabbitmq/deploy.yml - @envsubst < templates/rabbitmq/service.tmpl > rabbitmq/service.yml - -init-mongo-yaml: - mkdir -p ${ROOT_PATH}/bootup/kubernetes/mongo - @envsubst < templates/mongo/deploy.tmpl > mongo/deploy.yml - @envsubst < templates/mongo/service.tmpl > mongo/service.yml - -init-redis-yaml: - mkdir -p ${ROOT_PATH}/bootup/kubernetes/redis - @envsubst < templates/redis/deploy.tmpl > redis/deploy.yml - @envsubst < templates/redis/service.tmpl > redis/service.yml - -init-operator-dashboard-yaml: - mkdir -p ${ROOT_PATH}/bootup/kubernetes/operator-dashboard - @envsubst < templates/operator-dashboard/config.tmpl > operator-dashboard/config.yml - @envsubst < templates/operator-dashboard/${DEPLOY_TEMPLATE_NAME} > operator-dashboard/deploy.yml - @envsubst < templates/operator-dashboard/service.tmpl > operator-dashboard/service.yml - -init-user-dashboard-yaml: - mkdir -p ${ROOT_PATH}/bootup/kubernetes/user-dashboard - @envsubst < templates/user-dashboard/config.tmpl > user-dashboard/config.yml - @envsubst < templates/user-dashboard/${DEPLOY_TEMPLATE_NAME} > user-dashboard/deploy.yml - @envsubst < templates/user-dashboard/service.tmpl > operator-dashboard/service.yml - @envsubst < templates/user-dashboard/ingress.tmpl > user-dashboard/ingress.yml - -init-parse-server-yaml: - mkdir -p ${ROOT_PATH}/bootup/kubernetes/parse-server - @envsubst < templates/parse-server/config.tmpl > parse-server/config.yml - @envsubst < templates/parse-server/${DEPLOY_TEMPLATE_NAME} > parse-server/deploy.yml - @envsubst < templates/parse-server/service.tmpl > parse-server/service.yml - -init-mysql-yaml: - mkdir -p ${ROOT_PATH}/bootup/kubernetes/mysql - @envsubst < templates/mysql/config.tmpl > mysql/config.yml - @envsubst < templates/mysql/deploy.tmpl > mysql/deploy.yml - @envsubst < templates/mysql/service.tmpl > mysql/service.yml - -init-postgres-yaml: - mkdir -p ${ROOT_PATH}/bootup/kubernetes/postgres - @envsubst < templates/postgres/config.tmpl > postgres/config.yml - @envsubst < templates/postgres/deploy.tmpl > postgres/deploy.yml - @envsubst < templates/postgres/service.tmpl > postgres/service.yml - -init-watchdog-yaml: - mkdir -p ${ROOT_PATH}/bootup/kubernetes/watchdog - @envsubst < templates/watchdog/config.tmpl > watchdog/config.yml - @envsubst < templates/watchdog/${DEPLOY_TEMPLATE_NAME} > watchdog/deploy.yml - -init-keycloak-yaml: - mkdir -p ${ROOT_PATH}/bootup/kubernetes/keycloak - @envsubst < templates/keycloak/config.tmpl > keycloak/config.yml - @envsubst < templates/keycloak/deploy.tmpl > keycloak/deploy.yml - @envsubst < templates/keycloak/service.tmpl > keycloak/service.yml - @envsubst < templates/keycloak/ingress.tmpl > keycloak/ingress.yml - -init-yaml: - @$(MAKE) init-nginx-yaml - @$(MAKE) init-api-engine-yaml - @$(MAKE) init-api-engine-tasks-yaml - @$(MAKE) init-redis-yaml - @$(MAKE) init-postgres-yaml - -start: - @kubectl apply --force -f postgres/ -n ${K8S_DEPLOY_NAMESPACE} - @kubectl apply --force -f redis/ -n ${K8S_DEPLOY_NAMESPACE} - @kubectl apply --force -f api-engine-tasks/ -n ${K8S_DEPLOY_NAMESPACE} - @kubectl apply --force -f api-engine/ -n ${K8S_DEPLOY_NAMESPACE} - if [ "$(MODE)" != "dev" ]; then \ - kubectl apply --force -f nginx/ -n ${K8S_DEPLOY_NAMESPACE}; \ - fi - -stop: - @kubectl delete -f postgres/ -n ${K8S_DEPLOY_NAMESPACE} - @kubectl delete -f redis/ -n ${K8S_DEPLOY_NAMESPACE} - @kubectl delete -f api-engine-tasks/ -n ${K8S_DEPLOY_NAMESPACE} - @kubectl delete -f api-engine/ -n ${K8S_DEPLOY_NAMESPACE} - if [ "$(MODE)" != "dev" ]; then \ - kubectl delete -f nginx/ -n ${K8S_DEPLOY_NAMESPACE}; \ - fi diff --git a/bootup/kubernetes/templates/api-engine-tasks/deploy.tmpl b/bootup/kubernetes/templates/api-engine-tasks/deploy.tmpl deleted file mode 100644 index 10775c9a7..000000000 --- a/bootup/kubernetes/templates/api-engine-tasks/deploy.tmpl +++ /dev/null @@ -1,40 +0,0 @@ -apiVersion: extensions/v1beta1 -kind: Deployment -metadata: - name: ${DEPLOY_API_ENGINE_TASKS_NAME} -spec: - replicas: 1 - template: - metadata: - labels: - app: ${DEPLOY_API_ENGINE_TASKS_NAME} - spec: - containers: - - name: api-engine-tasks - image: hyperledger/cello-api-engine - imagePullPolicy: IfNotPresent - envFrom: - - configMapRef: - name: ${CONFIG_API_ENGINE_NAME} - env: - - name: DEBUG - value: "True" - - name: RUN_MODE - value: "task" - volumeMounts: - - name: api-engine - mountPath: /var/www/server - - name: media - mountPath: /var/www/media - - name: docker-sock - mountPath: /var/run/docker.sock - volumes: - - name: api-engine - hostPath: - path: ${ROOT_PATH}/src/api-engine - - name: media - hostPath: - path: /opt/cello/api-engine/media - - name: docker-sock - hostPath: - path: /var/run/docker.sock diff --git a/bootup/kubernetes/templates/api-engine/config.tmpl b/bootup/kubernetes/templates/api-engine/config.tmpl deleted file mode 100644 index eae112f74..000000000 --- a/bootup/kubernetes/templates/api-engine/config.tmpl +++ /dev/null @@ -1,18 +0,0 @@ -apiVersion: v1 -kind: ConfigMap -metadata: - name: ${CONFIG_API_ENGINE_NAME} -data: - WEBROOT: ${API_ENGINE_WEBROOT} - DB_USER: ${POSTGRES_USER} - DB_PASSWORD: ${POSTGRES_PASSWORD} - DB_NAME: api-engine - DB_HOST: ${SERVICE_POSTGRES_NAME} - DB_PORT: "5432" - ADMIN_TOKEN: $API_ENGINE_ADMIN_TOKEN - CELERY_BROKER_URL: "redis://$SERVICE_REDIS_NAME" - ADMIN_USERNAME: $API_ENGINE_ADMIN_USERNAME - ADMIN_PASSWORD: $API_ENGINE_ADMIN_PASSWORD - ADMIN_EMAIL: $API_ENGINE_ADMIN_EMAIL - DOCKER_HOST: "$API_DOCKER_HOST" - API_VERSION: "$API_VERSION" diff --git a/bootup/kubernetes/templates/api-engine/deploy-dev.tmpl b/bootup/kubernetes/templates/api-engine/deploy-dev.tmpl deleted file mode 100644 index 0787b406b..000000000 --- a/bootup/kubernetes/templates/api-engine/deploy-dev.tmpl +++ /dev/null @@ -1,40 +0,0 @@ -apiVersion: extensions/v1beta1 -kind: Deployment -metadata: - name: ${DEPLOY_API_ENGINE_NAME} -spec: - replicas: 1 - template: - metadata: - labels: - app: ${DEPLOY_API_ENGINE_NAME} - spec: - containers: - - name: api-engine - image: hyperledger/cello-api-engine - imagePullPolicy: IfNotPresent - ports: - - containerPort: 8080 - envFrom: - - configMapRef: - name: ${CONFIG_API_ENGINE_NAME} - env: - - name: DEBUG - value: "True" - volumeMounts: - - name: api-engine - mountPath: /var/www/server - - name: media - mountPath: /var/www/media - - name: docker-sock - mountPath: /var/run/docker.sock - volumes: - - name: api-engine - hostPath: - path: ${ROOT_PATH}/src/api-engine - - name: media - hostPath: - path: /opt/cello/api-engine/media - - name: docker-sock - hostPath: - path: /var/run/docker.sock diff --git a/bootup/kubernetes/templates/api-engine/deploy.tmpl b/bootup/kubernetes/templates/api-engine/deploy.tmpl deleted file mode 100644 index 18bc7e1b6..000000000 --- a/bootup/kubernetes/templates/api-engine/deploy.tmpl +++ /dev/null @@ -1,27 +0,0 @@ -apiVersion: extensions/v1beta1 -kind: Deployment -metadata: - name: ${DEPLOY_API_ENGINE_NAME} -spec: - replicas: 1 - template: - metadata: - labels: - app: ${DEPLOY_API_ENGINE_NAME} - spec: - containers: - - name: api-engine - image: hyperledger/cello-api-engine - imagePullPolicy: IfNotPresent - ports: - - containerPort: 8080 - envFrom: - - configMapRef: - name: ${CONFIG_API_ENGINE_NAME} - volumeMounts: - - name: media - mountPath: /var/www/media - volumes: - - name: media - hostPath: - path: /opt/cello/api-engine/media diff --git a/bootup/kubernetes/templates/api-engine/ingress.tmpl b/bootup/kubernetes/templates/api-engine/ingress.tmpl deleted file mode 100644 index ab790f13f..000000000 --- a/bootup/kubernetes/templates/api-engine/ingress.tmpl +++ /dev/null @@ -1,15 +0,0 @@ -apiVersion: extensions/v1beta1 -kind: Ingress -metadata: - name: ${INGRESS_API_ENGINE_NAME} - annotations: - nginx.ingress.kubernetes.io/ssl-redirect: "false" - nginx.ingress.kubernetes.io/force-ssl-redirect: "false" -spec: - rules: - - http: - paths: - - path: ${API_ENGINE_WEBROOT} - backend: - serviceName: ${SERVICE_API_ENGINE_NAME} - servicePort: 8080 diff --git a/bootup/kubernetes/templates/api-engine/service.tmpl b/bootup/kubernetes/templates/api-engine/service.tmpl deleted file mode 100644 index d1c066c97..000000000 --- a/bootup/kubernetes/templates/api-engine/service.tmpl +++ /dev/null @@ -1,14 +0,0 @@ -apiVersion: v1 -kind: Service -metadata: - name: ${SERVICE_API_ENGINE_NAME} - labels: - app: ${SERVICE_API_ENGINE_NAME} -spec: - ports: - - port: 8080 - targetPort: 8080 - protocol: TCP - name: uwsgi - selector: - app: ${DEPLOY_API_ENGINE_NAME} diff --git a/bootup/kubernetes/templates/nginx/config.tmpl b/bootup/kubernetes/templates/nginx/config.tmpl deleted file mode 100644 index 97a78c224..000000000 --- a/bootup/kubernetes/templates/nginx/config.tmpl +++ /dev/null @@ -1,7 +0,0 @@ -apiVersion: v1 -kind: ConfigMap -metadata: - name: ${CONFIG_NGINX_NAME} -data: - URL_PREFIX: ${API_ENGINE_WEBROOT} - UWSGI_SERVER_HOST: ${SERVICE_API_ENGINE_NAME} diff --git a/bootup/kubernetes/templates/nginx/deploy.tmpl b/bootup/kubernetes/templates/nginx/deploy.tmpl deleted file mode 100644 index 969524770..000000000 --- a/bootup/kubernetes/templates/nginx/deploy.tmpl +++ /dev/null @@ -1,20 +0,0 @@ -apiVersion: extensions/v1beta1 -kind: Deployment -metadata: - name: ${DEPLOY_NGINX_NAME} -spec: - replicas: 1 - template: - metadata: - labels: - app: ${DEPLOY_NGINX_NAME} - spec: - containers: - - name: nginx - image: hyperledger/cello-nginx - imagePullPolicy: IfNotPresent - ports: - - containerPort: 80 - envFrom: - - configMapRef: - name: ${CONFIG_NGINX_NAME} diff --git a/bootup/kubernetes/templates/nginx/ingress.tmpl b/bootup/kubernetes/templates/nginx/ingress.tmpl deleted file mode 100644 index f575234a7..000000000 --- a/bootup/kubernetes/templates/nginx/ingress.tmpl +++ /dev/null @@ -1,15 +0,0 @@ -apiVersion: extensions/v1beta1 -kind: Ingress -metadata: - name: ${INGRESS_NGINX_NAME} - annotations: - nginx.ingress.kubernetes.io/ssl-redirect: "false" - nginx.ingress.kubernetes.io/force-ssl-redirect: "false" -spec: - rules: - - http: - paths: - - path: ${API_ENGINE_WEBROOT} - backend: - serviceName: ${SERVICE_NGINX_NAME} - servicePort: 80 diff --git a/bootup/kubernetes/templates/nginx/service.tmpl b/bootup/kubernetes/templates/nginx/service.tmpl deleted file mode 100644 index 75a132c40..000000000 --- a/bootup/kubernetes/templates/nginx/service.tmpl +++ /dev/null @@ -1,14 +0,0 @@ -apiVersion: v1 -kind: Service -metadata: - name: ${SERVICE_NGINX_NAME} - labels: - app: ${SERVICE_NGINX_NAME} -spec: - ports: - - port: 80 - targetPort: 80 - protocol: TCP - name: http - selector: - app: ${DEPLOY_NGINX_NAME} diff --git a/bootup/kubernetes/templates/postgres/config.tmpl b/bootup/kubernetes/templates/postgres/config.tmpl deleted file mode 100644 index 4befa0a53..000000000 --- a/bootup/kubernetes/templates/postgres/config.tmpl +++ /dev/null @@ -1,8 +0,0 @@ -apiVersion: v1 -kind: ConfigMap -metadata: - name: ${CONFIG_POSTGRES_NAME} -data: - POSTGRES_DB: api-engine - POSTGRES_USER: $POSTGRES_USER - POSTGRES_PASSWORD: $POSTGRES_PASSWORD diff --git a/bootup/kubernetes/templates/postgres/deploy.tmpl b/bootup/kubernetes/templates/postgres/deploy.tmpl deleted file mode 100644 index 2bb3bf9b4..000000000 --- a/bootup/kubernetes/templates/postgres/deploy.tmpl +++ /dev/null @@ -1,26 +0,0 @@ -apiVersion: extensions/v1beta1 -kind: Deployment -metadata: - name: ${DEPLOY_POSTGRES_NAME} -spec: - replicas: 1 - template: - metadata: - labels: - app: ${DEPLOY_POSTGRES_NAME} - spec: - containers: - - name: postgres - image: postgres:11.1 - ports: - - containerPort: 5432 - envFrom: - - configMapRef: - name: ${CONFIG_POSTGRES_NAME} - volumeMounts: - - name: db-path - mountPath: /var/lib/postgresql/data - volumes: - - name: db-path - hostPath: - path: /opt/cello/postgres diff --git a/bootup/kubernetes/templates/postgres/service.tmpl b/bootup/kubernetes/templates/postgres/service.tmpl deleted file mode 100644 index dbbeec8f4..000000000 --- a/bootup/kubernetes/templates/postgres/service.tmpl +++ /dev/null @@ -1,14 +0,0 @@ -apiVersion: v1 -kind: Service -metadata: - name: ${SERVICE_POSTGRES_NAME} - labels: - app: ${SERVICE_POSTGRES_NAME} -spec: - ports: - - port: 5432 - targetPort: 5432 - protocol: TCP - name: postgres - selector: - app: ${DEPLOY_POSTGRES_NAME} diff --git a/bootup/kubernetes/templates/redis/deploy.tmpl b/bootup/kubernetes/templates/redis/deploy.tmpl deleted file mode 100644 index 4b8f98549..000000000 --- a/bootup/kubernetes/templates/redis/deploy.tmpl +++ /dev/null @@ -1,24 +0,0 @@ -apiVersion: extensions/v1beta1 -kind: Deployment -metadata: - name: ${DEPLOY_REDIS_NAME} -spec: - replicas: 1 - template: - metadata: - labels: - app: ${DEPLOY_REDIS_NAME} - spec: - containers: - - name: redis - image: redis:4.0.13 - imagePullPolicy: IfNotPresent - ports: - - containerPort: 6379 - volumeMounts: - - name: redis-data - mountPath: /data - volumes: - - name: redis-data - hostPath: - path: /opt/cello/redis diff --git a/bootup/kubernetes/templates/redis/service.tmpl b/bootup/kubernetes/templates/redis/service.tmpl deleted file mode 100644 index ce9c08b60..000000000 --- a/bootup/kubernetes/templates/redis/service.tmpl +++ /dev/null @@ -1,14 +0,0 @@ -apiVersion: v1 -kind: Service -metadata: - name: ${SERVICE_REDIS_NAME} - labels: - app: ${SERVICE_REDIS_NAME} -spec: - ports: - - port: 6379 - targetPort: 6379 - protocol: TCP - name: redis - selector: - app: ${DEPLOY_REDIS_NAME} \ No newline at end of file diff --git a/build_image/docker/agent/docker-rest-agent/Dockerfile.in b/build_image/docker/agent/docker-rest-agent/Dockerfile.in deleted file mode 100644 index acc20d8b2..000000000 --- a/build_image/docker/agent/docker-rest-agent/Dockerfile.in +++ /dev/null @@ -1,14 +0,0 @@ -FROM python:3.8 -COPY src/agent/docker-rest-agent/requirements.txt / -ARG pip=pip.conf.bak -COPY src/agent/docker-rest-agent/pip.conf /root/.pip/$pip - -RUN pip install -r /requirements.txt -RUN mkdir -p /var/www/server - -COPY src/agent/docker-rest-agent/server.py /var/www/server -COPY src/agent/docker-rest-agent/gunicorn.conf.py /var/www/server - -WORKDIR /var/www/server - -CMD ["gunicorn", "server:app", "-c", "./gunicorn.conf.py"] diff --git a/build_image/docker/agent/k8s-rest-agent/Dockerfile.in b/build_image/docker/agent/k8s-rest-agent/Dockerfile.in deleted file mode 100644 index 236debf15..000000000 --- a/build_image/docker/agent/k8s-rest-agent/Dockerfile.in +++ /dev/null @@ -1,24 +0,0 @@ -FROM python:3.8 - -COPY src/agent/k8s-rest-agent/requirements.txt / -COPY src/agent/k8s-rest-agent/pip /root/.pip - -RUN pip install -r /requirements.txt - -COPY src/agent/k8s-rest-agent/src /var/www/server -COPY src/agent/k8s-rest-agent/entrypoint.sh / -COPY src/agent/k8s-rest-agent/uwsgi/server.ini /etc/uwsgi/apps-enabled/ -RUN mkdir /var/log/supervisor - -ENV WEBROOT / -ENV WEB_CONCURRENCY 10 -ENV DEBUG False -ENV UWSGI_WORKERS 1 -ENV UWSGI_PROCESSES 1 -ENV UWSGI_OFFLOAD_THREADS 10 -ENV UWSGI_MODULE server.wsgi:application - -WORKDIR /var/www/server -RUN python manage.py collectstatic --noinput - -CMD bash /entrypoint.sh diff --git a/build_image/docker/agent/kubernetes/Dockerfile.in b/build_image/docker/agent/kubernetes/Dockerfile.in deleted file mode 100644 index 75ef243c1..000000000 --- a/build_image/docker/agent/kubernetes/Dockerfile.in +++ /dev/null @@ -1,15 +0,0 @@ -FROM _DOCKER_BASE_ - -COPY src/agent/kubernetes-agent/requirements.txt / -RUN pip install -r /requirements.txt -RUN curl -LO https://storage.googleapis.com/kubernetes-release/release/$(curl -s https://storage.googleapis.com/kubernetes-release/release/stable.txt)/bin/linux/amd64/kubectl && chmod +x kubectl && \ - mv kubectl /usr/local/bin/kubectl - -COPY src/agent/kubernetes-agent/src /app - -WORKDIR /app - -ENV KUBECONFIG /app/.kube/config -ENV PYTHONPATH /app:$PATHONPATH - -CMD python main.py diff --git a/build_image/docker/common/api-engine/Dockerfile.in b/build_image/docker/common/api-engine/Dockerfile.in deleted file mode 100644 index 8cfba44b9..000000000 --- a/build_image/docker/common/api-engine/Dockerfile.in +++ /dev/null @@ -1,29 +0,0 @@ -FROM python:3.8 - -# Install software -RUN apt-get update \ - && apt-get install -y gettext-base graphviz libgraphviz-dev vim \ - && apt-get autoclean \ - && apt-get clean \ - && apt-get autoremove && rm -rf /var/cache/apt/ - -# Set the working dir -WORKDIR /var/www/server - -# Install compiled code tools from Artifactory and copy it to opt folder. -RUN curl -L --retry 5 --retry-delay 3 "https://github.com/hyperledger/fabric/releases/download/v2.5.13/hyperledger-fabric-linux-amd64-2.5.13.tar.gz" | tar xz -C /opt/ - -# Copy source code to the working dir -COPY src/api-engine ./ -COPY template/node /opt/node - -# Install python dependencies -RUN pip3 install -r requirements.txt - -# Add uwsgi configuration file -COPY build_image/docker/common/api-engine/server.ini /etc/uwsgi/apps-enabled/ - -ENV RUN_MODE=server - -COPY build_image/docker/common/api-engine/entrypoint.sh / -CMD ["bash", "/entrypoint.sh"] \ No newline at end of file diff --git a/build_image/docker/common/api-engine/entrypoint.sh b/build_image/docker/common/api-engine/entrypoint.sh deleted file mode 100755 index 76cad69cd..000000000 --- a/build_image/docker/common/api-engine/entrypoint.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/usr/bin/env bash - -#bash /scripts/initial.sh; - -echo "Generating the settings.py for api_engine" -LOCAL_SETTINGS="/var/www/server/api_engine/settings.py" -RAW_LOCAL_SETTINGS="/var/www/server/api_engine/settings.py.example" - -envsubst < ${RAW_LOCAL_SETTINGS} > ${LOCAL_SETTINGS} - -holdup -t 120 tcp://${DB_HOST}:${DB_PORT}; -if [[ "$RUN_MODE" == "server" ]]; then - python manage.py migrate; - python manage.py create_user \ - --username ${API_ENGINE_ADMIN_USERNAME:-admin} \ - --password ${API_ENGINE_ADMIN_PASSWORD:-pass} \ - --email ${API_ENGINE_ADMIN_EMAIL:-admin@cello.com} \ - --is_superuser \ - --role admin - if [[ "$DEBUG" == "True" ]]; then # For dev, use pure Django directly - python manage.py runserver 0.0.0.0:8080; - else # For production, use uwsgi in front - uwsgi --ini /etc/uwsgi/apps-enabled/server.ini; - fi -else - celery -A api_engine worker -l info -fi diff --git a/build_image/docker/common/dashboard/config-nginx.sh b/build_image/docker/common/dashboard/config-nginx.sh deleted file mode 100755 index 72bb109a7..000000000 --- a/build_image/docker/common/dashboard/config-nginx.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash - -NGINX_RAW_CONFIG=/etc/nginx/conf.d/default.conf.tmpl -NGINX_CONFIG=/etc/nginx/conf.d/default.conf -envsubst '$$API_PROXY,$$SERVICE_PORT' < ${NGINX_RAW_CONFIG} > ${NGINX_CONFIG} diff --git a/build_image/docker/common/dashboard/default.conf.tmpl b/build_image/docker/common/dashboard/default.conf.tmpl deleted file mode 100644 index 99f975e74..000000000 --- a/build_image/docker/common/dashboard/default.conf.tmpl +++ /dev/null @@ -1,27 +0,0 @@ -server { - listen 80; - server_name localhost; - - #charset koi8-r; - #access_log /var/log/nginx/host.access.log main; - gzip on; - gzip_min_length 1k; - gzip_comp_level 9; - gzip_types text/plain application/javascript application/x-javascript text/css application/xml text/javascript application/x-httpd-php image/jpeg image/gif image/png; - gzip_vary on; - gzip_disable "MSIE [1-6]\."; - root /usr/share/nginx/html; - - location / { - index index.html index.htm; - try_files $uri $uri/ /index.html; - } - - location /api { - proxy_pass $API_PROXY; - proxy_set_header X-Forwarded-Proto $scheme; - proxy_set_header Host $host:$SERVICE_PORT; - proxy_set_header X-Real-IP $remote_addr; - } -} - diff --git a/build_image/docker/common/dashboard/nginx.conf b/build_image/docker/common/dashboard/nginx.conf deleted file mode 100644 index a8733c69a..000000000 --- a/build_image/docker/common/dashboard/nginx.conf +++ /dev/null @@ -1,57 +0,0 @@ - -user nginx; -worker_processes 1; - -error_log /var/log/nginx/error.log warn; -pid /var/run/nginx.pid; - - -events { - worker_connections 1024; -} - - -http { - include /etc/nginx/mime.types; - default_type application/octet-stream; - - log_format main '$remote_addr - $remote_user [$time_local] "$request" ' - '$status $body_bytes_sent "$http_referer" ' - '"$http_user_agent" "$http_x_forwarded_for"'; - - access_log /var/log/nginx/access.log main; - - sendfile on; - #tcp_nopush on; - - keepalive_timeout 65; - - #gzip on; - - client_max_body_size 100M; - - server { - listen 8081; - server_name localhost; - - #charset koi8-r; - #access_log /var/log/nginx/host.access.log main; - gzip on; - gzip_min_length 1k; - gzip_comp_level 9; - gzip_types text/plain application/javascript application/x-javascript text/css application/xml text/javascript application/x-httpd-php image/jpeg image/gif image/png; - gzip_vary on; - gzip_disable "MSIE [1-6]\."; - root /usr/share/nginx/html; - - location / { - index index.html index.htm; - } - location /api { - proxy_pass http://cello-api-engine:8080; - } - } - - - include /etc/nginx/conf.d/*.conf; -} diff --git a/build_image/docker/common/nginx/Dockerfile.in b/build_image/docker/common/nginx/Dockerfile.in deleted file mode 100644 index 71410af5a..000000000 --- a/build_image/docker/common/nginx/Dockerfile.in +++ /dev/null @@ -1,21 +0,0 @@ -FROM nginx:1.24.0 - -RUN apt-get update && apt-get install -y wget && \ - apt-get autoclean && apt-get clean && apt-get autoremove && rm -rf /var/cache/apt/ -COPY build_image/docker/common/nginx/config-nginx.sh /config-nginx.sh -COPY build_image/docker/common/nginx/nginx.conf.default /etc/nginx/nginx.conf.default -RUN mkdir -p /var/www/static -RUN cd /tmp && wget https://github.com/encode/django-rest-framework/archive/master.tar.gz && \ - tar -zxvf master.tar.gz && mv django-rest-framework-master/rest_framework/static/rest_framework /var/www/static/ && rm -rf django-rest-framework-master master.tar.gz && \ - wget https://github.com/axnsan12/drf-yasg/archive/master.tar.gz && tar -zxvf master.tar.gz && mv drf-yasg-master/src/drf_yasg/static/drf-yasg /var/www/static/ && rm -rf drf-yasg-master master.tar.gz -RUN chmod +x /config-nginx.sh - -RUN mkdir /var/lib/nginx - -# forward request and error logs to docker log collector -RUN ln -sf /dev/stdout /var/log/nginx/access.log \ - && ln -sf /dev/stderr /var/log/nginx/error.log - -EXPOSE 80 443 - -CMD ["bash", "-c", "/config-nginx.sh && nginx -g 'daemon off;'"] diff --git a/build_image/docker/common/nginx/config-nginx.sh b/build_image/docker/common/nginx/config-nginx.sh deleted file mode 100755 index 0313e72b8..000000000 --- a/build_image/docker/common/nginx/config-nginx.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env bash - -NGINX_RAW_CONFIG=/etc/nginx/nginx.conf.default -NGINX_CONFIG=/etc/nginx/nginx.conf -envsubst '$$URL_PREFIX,$$UWSGI_SERVER_HOST' < ${NGINX_RAW_CONFIG} > ${NGINX_CONFIG} \ No newline at end of file diff --git a/build_image/docker/common/nginx/nginx.conf.default b/build_image/docker/common/nginx/nginx.conf.default deleted file mode 100644 index eda491fdc..000000000 --- a/build_image/docker/common/nginx/nginx.conf.default +++ /dev/null @@ -1,62 +0,0 @@ - -#user nobody; -worker_processes 1; - -#error_log logs/error.log; -#error_log logs/error.log notice; -#error_log logs/error.log info; - -#pid logs/nginx.pid; - - -events { - worker_connections 1024; -} - - -http { - include mime.types; - default_type application/octet-stream; - - #log_format main '$remote_addr - $remote_user [$time_local] "$request" ' - # '$status $body_bytes_sent "$http_referer" ' - # '"$http_user_agent" "$http_x_forwarded_for"'; - - #access_log logs/access.log main; - - sendfile on; - #tcp_nopush on; - - #keepalive_timeout 0; - keepalive_timeout 65; - - #gzip on; - upstream api_server { - least_conn; - server $UWSGI_SERVER_HOST:8080; - } - - server { - listen 80; - server_name _; - - #charset koi8-r; - - #access_log logs/host.access.log main; - - location $URL_PREFIX/static { - alias /var/www/static; - } - - location $URL_PREFIX/media { - alias /var/www/media; - } - - location $URL_PREFIX { - include uwsgi_params; - uwsgi_pass api_server; - uwsgi_param SCRIPT_NAME $URL_PREFIX; - uwsgi_read_timeout 500; - } - } -} \ No newline at end of file diff --git a/build_image/dockerhub/latest/agent/fabric-operator/Dockerfile b/build_image/dockerhub/latest/agent/fabric-operator/Dockerfile deleted file mode 100644 index 97f2c7a89..000000000 --- a/build_image/dockerhub/latest/agent/fabric-operator/Dockerfile +++ /dev/null @@ -1,27 +0,0 @@ -# Dockerfile for hyperledger cello fabirc operator agent -# -# @see https://github.com/hyperledger/cello/tree/master/docs/agents/fabric-operator.md -# -FROM alpine/git AS BUILD - -RUN cd /tmp && git init cello && cd cello && \ - git remote add origin https://github.com/hyperledger/cello.git && \ - git config core.sparsecheckout true && \ - echo "src/agent/fabric-operator/*" >> .git/info/sparse-checkout && \ - git pull --depth=1 origin master -RUN release=$(wget -O - https://storage.googleapis.com/kubernetes-release/release/stable.txt) && \ - wget https://storage.googleapis.com/kubernetes-release/release/${release}/bin/linux/amd64/kubectl -O /kubectl - -FROM alpine -RUN apk update && apk add jq gettext curl bash && mkdir /home/app -COPY --from=build /tmp/cello/src/agent/fabric-operator/deploy /home/app -COPY --from=build /tmp/cello/src/agent/fabric-operator/agent /home/app -COPY --from=build /kubectl /usr/local/bin/kubectl -RUN chmod +x /usr/local/bin/kubectl - - -ENV HOME /home -WORKDIR /home/app -ENV KUBECONFIG /home/.kube/config - -CMD bash /home/app/deploy_cr.sh diff --git a/build_image/dockerhub/latest/agent/kubernetes/Dockerfile b/build_image/dockerhub/latest/agent/kubernetes/Dockerfile deleted file mode 100644 index 9ff318052..000000000 --- a/build_image/dockerhub/latest/agent/kubernetes/Dockerfile +++ /dev/null @@ -1,22 +0,0 @@ -FROM busybox as downloader - -RUN cd /tmp && wget -c https://github.com/hyperledger/cello/archive/master.zip && \ - unzip master.zip - -FROM python:3.6 - -LABEL maintainer="github.com/hyperledger/cello" - -COPY --from=downloader /tmp/cello-master/src/agent/kubernetes-agent/requirements.txt / -RUN pip install -r /requirements.txt -RUN curl -LO https://storage.googleapis.com/kubernetes-release/release/$(curl -s https://storage.googleapis.com/kubernetes-release/release/stable.txt)/bin/linux/amd64/kubectl && chmod +x kubectl && \ - mv kubectl /usr/local/bin/kubectl - -COPY --from=downloader /tmp/cello-master/src/agent/kubernetes-agent/src /app - -WORKDIR /app - -ENV KUBECONFIG /app/.kube/config -ENV PYTHONPATH /app:$PATHONPATH - -CMD python main.py diff --git a/build_image/dockerhub/latest/common/api-engine/Dockerfile b/build_image/dockerhub/latest/common/api-engine/Dockerfile deleted file mode 100644 index 7f4fff1bb..000000000 --- a/build_image/dockerhub/latest/common/api-engine/Dockerfile +++ /dev/null @@ -1,25 +0,0 @@ -FROM python:3.6 - -# Install software -RUN apt-get update \ - && apt-get install -y gettext-base graphviz libgraphviz-dev \ - && apt-get autoclean \ - && apt-get clean \ - && apt-get autoremove && rm -rf /var/cache/apt/ - -# Set the working dir -WORKDIR /var/www/server - -# Copy source code to the working dir -COPY src/api-engine ./ - -# Install python dependencies -RUN pip install -r requirements.txt - -# Add uwsgi configuration file -COPY build_image/docker/common/api-engine/server.ini /etc/uwsgi/apps-enabled/ - -ENV RUN_MODE server - -COPY build_image/docker/common/api-engine/entrypoint.sh / -CMD bash /entrypoint.sh diff --git a/build_image/dockerhub/latest/common/api-engine/entrypoint.sh b/build_image/dockerhub/latest/common/api-engine/entrypoint.sh deleted file mode 100755 index 76cad69cd..000000000 --- a/build_image/dockerhub/latest/common/api-engine/entrypoint.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/usr/bin/env bash - -#bash /scripts/initial.sh; - -echo "Generating the settings.py for api_engine" -LOCAL_SETTINGS="/var/www/server/api_engine/settings.py" -RAW_LOCAL_SETTINGS="/var/www/server/api_engine/settings.py.example" - -envsubst < ${RAW_LOCAL_SETTINGS} > ${LOCAL_SETTINGS} - -holdup -t 120 tcp://${DB_HOST}:${DB_PORT}; -if [[ "$RUN_MODE" == "server" ]]; then - python manage.py migrate; - python manage.py create_user \ - --username ${API_ENGINE_ADMIN_USERNAME:-admin} \ - --password ${API_ENGINE_ADMIN_PASSWORD:-pass} \ - --email ${API_ENGINE_ADMIN_EMAIL:-admin@cello.com} \ - --is_superuser \ - --role admin - if [[ "$DEBUG" == "True" ]]; then # For dev, use pure Django directly - python manage.py runserver 0.0.0.0:8080; - else # For production, use uwsgi in front - uwsgi --ini /etc/uwsgi/apps-enabled/server.ini; - fi -else - celery -A api_engine worker -l info -fi diff --git a/build_image/dockerhub/latest/common/api-engine/server.ini b/build_image/dockerhub/latest/common/api-engine/server.ini deleted file mode 100644 index ec73f2ad6..000000000 --- a/build_image/dockerhub/latest/common/api-engine/server.ini +++ /dev/null @@ -1,34 +0,0 @@ -[uwsgi] - -# Django-related settings -# the base directory (full path) -chdir = /var/www/server -# Django's wsgi file -module = api_engine.wsgi:application -route-run = fixpathinfo: -# process-related settings -# master -# maximum number of worker processes -workers = 10 -processes = 6 -# the socket (use the full path to be safe -# clear environment on exit -vacuum = true -env = DJANGO_SETTINGS_MODULE=api_engine.settings -#module = django.core.handlers.wsgi:WSGIHandler() -socket = 0.0.0.0:8080 -#chmod-socket = 777 -plugins = python -#pythonpath = /usr/share/openstack-dashboard/ -buffer-size = 32768 -enable-threads = true - -vhost-host = true - -uid = root -gid = root - -touch-reload = /tmp/server.txt -auto-procname = true - -logger = file:/tmp/server.log \ No newline at end of file diff --git a/build_image/dockerhub/latest/common/dashboard/Dockerfile b/build_image/dockerhub/latest/common/dashboard/Dockerfile deleted file mode 100644 index 02f813cc6..000000000 --- a/build_image/dockerhub/latest/common/dashboard/Dockerfile +++ /dev/null @@ -1,25 +0,0 @@ -FROM busybox as downloader - -RUN cd /tmp && wget -c https://github.com/hyperledger/cello/archive/master.zip && \ - unzip master.zip - -FROM circleci/node:latest-browsers as builder - -LABEL maintainer="github.com/hyperledger/cello" - -WORKDIR /usr/src/app/ -USER root -RUN mkdir -p /usr/src/app && cd /usr/src/app -COPY --from=downloader /tmp/cello-master/src/dashboard /usr/src/app -RUN yarn --network-timeout 600000 && yarn run build - -FROM nginx:1.15.12 - -COPY --from=builder /usr/src/app/dist /usr/share/nginx/html -COPY --from=downloader /tmp/cello-master/build_image/docker/common/dashboard/config-nginx.sh / -RUN chmod +x /config-nginx.sh -COPY --from=downloader /tmp/cello-master/build_image/docker/common/dashboard/default.conf.tmpl /etc/nginx/conf.d/default.conf.tmpl - -EXPOSE 80 - -CMD ["bash", "-c", "/config-nginx.sh && nginx -g 'daemon off;'"] diff --git a/build_image/dockerhub/latest/common/nginx/Dockerfile b/build_image/dockerhub/latest/common/nginx/Dockerfile deleted file mode 100644 index a9ccb1202..000000000 --- a/build_image/dockerhub/latest/common/nginx/Dockerfile +++ /dev/null @@ -1,26 +0,0 @@ -FROM busybox as downloader - -RUN cd /tmp && wget -c https://github.com/hyperledger/cello/archive/master.zip && \ - unzip master.zip - -FROM nginx:1.15.7 - -RUN apt-get update && apt-get install -y wget && \ - apt-get autoclean && apt-get clean && apt-get autoremove && rm -rf /var/cache/apt/ -COPY --from=downloader /tmp/cello-master/build_image/docker/common/nginx/config-nginx.sh /config-nginx.sh -COPY --from=downloader /tmp/cello-master/build_image/docker/common/nginx/nginx.conf.default /etc/nginx/nginx.conf.default -RUN mkdir -p /var/www/static -RUN cd /tmp && wget https://github.com/encode/django-rest-framework/archive/master.tar.gz && \ - tar -zxvf master.tar.gz && mv django-rest-framework-master/rest_framework/static/rest_framework /var/www/static/ && rm -rf django-rest-framework-master master.tar.gz && \ - wget https://github.com/axnsan12/drf-yasg/archive/master.tar.gz && tar -zxvf master.tar.gz && mv drf-yasg-master/src/drf_yasg/static/drf-yasg /var/www/static/ && rm -rf drf-yasg-master master.tar.gz -RUN chmod +x /config-nginx.sh - -RUN mkdir /var/lib/nginx - -# forward request and error logs to docker log collector -RUN ln -sf /dev/stdout /var/log/nginx/access.log \ - && ln -sf /dev/stderr /var/log/nginx/error.log - -EXPOSE 80 443 - -CMD ["bash", "-c", "/config-nginx.sh && nginx -g 'daemon off;'"] diff --git a/docker-compose.dev.yaml b/docker-compose.dev.yaml new file mode 100644 index 000000000..5ff505588 --- /dev/null +++ b/docker-compose.dev.yaml @@ -0,0 +1,66 @@ +services: + cello-dashboard: + build: + context: ./src/dashboard + image: cello/dashboard:latest + container_name: cello-dashboard + restart: unless-stopped + ports: + - "${DASHBOARD_SERVICE_PORT:-8081}:8081" + networks: + - cello-net + depends_on: + - cello-api-engine + + cello-postgres: + image: postgres:12.0 + container_name: cello-postgres + restart: unless-stopped + environment: + - POSTGRES_DB=api_engine + - POSTGRES_USER=postgres + - POSTGRES_PASSWORD=123456 + ports: + - "5432:5432" + volumes: + - cello-postgres:/var/lib/postgresql/data + networks: + - cello-net + + cello-api-engine: + build: + context: ./src/api-engine + image: cello/api-engine:latest + container_name: cello-api-engine + restart: unless-stopped + stdin_open: true + dns_search: . + environment: + - GODEBUG=netdns=go + - DB_NAME=api_engine + - DB_USER=postgres + - DB_PASSWORD=123456 + - DB_HOST=cello-postgres + - DB_PORT=5432 + - DEBUG=True + - CORE_VM_ENDPOINT=unix:///host/var/run/docker.sock + - FABRIC_LOGGING_SPEC=INFO + ports: + - "8080:8080" + volumes: + - /var/run/docker.sock:/var/run/docker.sock + - cello-api-engine:/cello + - cello-api-engine-chaincode:/media + networks: + - cello-net + depends_on: + - cello-postgres + +networks: + cello-net: + name: cello-net + +volumes: + cello-postgres: + cello-api-engine: + cello-api-engine-chaincode: diff --git a/src/agent/docker-rest-agent/README.md b/src/agent/docker-rest-agent/README.md deleted file mode 100644 index 61e5854c7..000000000 --- a/src/agent/docker-rest-agent/README.md +++ /dev/null @@ -1,7 +0,0 @@ -## Pre-requisite - -If you want to use agent, first use the make docker-rest-agent command to generate the image; then run it; - - “docker run -p 5001:5001 -e DOCKER_URL="http://x.x.x.x:2375" -d hyperldger/cello-agent-docker:latest” - -When you run it, you must fill in the IP address of your docker server; diff --git a/src/agent/docker-rest-agent/gunicorn.conf.py b/src/agent/docker-rest-agent/gunicorn.conf.py deleted file mode 100644 index 466f9e7a6..000000000 --- a/src/agent/docker-rest-agent/gunicorn.conf.py +++ /dev/null @@ -1,3 +0,0 @@ -workers = 1 -worker_class = "gevent" -bind = "0.0.0.0:5001" diff --git a/src/agent/docker-rest-agent/intergration-test/block.zip b/src/agent/docker-rest-agent/intergration-test/block.zip deleted file mode 100644 index 461b2e8f1..000000000 Binary files a/src/agent/docker-rest-agent/intergration-test/block.zip and /dev/null differ diff --git a/src/agent/docker-rest-agent/intergration-test/msp.zip b/src/agent/docker-rest-agent/intergration-test/msp.zip deleted file mode 100644 index 5e001613b..000000000 Binary files a/src/agent/docker-rest-agent/intergration-test/msp.zip and /dev/null differ diff --git a/src/agent/docker-rest-agent/intergration-test/orderer_config.zip b/src/agent/docker-rest-agent/intergration-test/orderer_config.zip deleted file mode 100644 index 5f2c42b7b..000000000 Binary files a/src/agent/docker-rest-agent/intergration-test/orderer_config.zip and /dev/null differ diff --git a/src/agent/docker-rest-agent/intergration-test/peer_config.zip b/src/agent/docker-rest-agent/intergration-test/peer_config.zip deleted file mode 100644 index 5fea13e82..000000000 Binary files a/src/agent/docker-rest-agent/intergration-test/peer_config.zip and /dev/null differ diff --git a/src/agent/docker-rest-agent/intergration-test/test.py b/src/agent/docker-rest-agent/intergration-test/test.py deleted file mode 100644 index 7a314a2ab..000000000 --- a/src/agent/docker-rest-agent/intergration-test/test.py +++ /dev/null @@ -1,86 +0,0 @@ -from requests import put, get, post -import base64 -import docker -import json - -client = docker.from_env() - -with open("msp.zip", "rb") as node_msp, open("tls.zip", "rb") as tls, open( - "block.zip", "rb" -) as block, open("peer_config.zip", "rb") as peer_config, open( - "orderer_config.zip", "rb" -) as orderer_config: - data = { - "msp": base64.b64encode(node_msp.read()), - "tls": base64.b64encode(tls.read()), - "bootstrap_block": base64.b64encode(block.read()), - "peer_config_file": base64.b64encode(peer_config.read()), - "orderer_config_file": base64.b64encode(orderer_config.read()), - "img": "hyperledger/cello-hlf:2.2.0", - "cmd": 'bash /tmp/init.sh "peer node start"', - "name": "cello-hlf-peer", - } -print("-" * 20) -print("Test creating a node") -print() -n = post("http://localhost:5001/api/v1/nodes", data=data) -print(n.text) -txt = json.loads(n.text) -nid = txt["data"]["id"] -print("-" * 20) - -print("Test starting a node") -print() -data = {"action": "start"} -response = post("http://localhost:5001/api/v1/nodes/" + nid, data=data) -print(response.text) -print("-" * 20) - -print("Test restarting a node") -print() -data = {"action": "restart"} -response = post("http://localhost:5001/api/v1/nodes/" + nid, data=data) -print(response.text) -print("-" * 20) - - -print("Test updating a node") -print() -# TODO(dixing): use different commands & configuration files -with open("msp.zip", "rb") as node_msp, open("tls.zip", "rb") as tls, open( - "block.zip", "rb" -) as block, open("peer_config.zip", "rb") as peer_config, open( - "orderer_config.zip", "rb" -) as orderer_config: - data = { - "action": "update", - "msp": base64.b64encode(node_msp.read()), - "tls": base64.b64encode(tls.read()), - "bootstrap_block": base64.b64encode(block.read()), - "peer_config_file": base64.b64encode(peer_config.read()), - "orderer_config_file": base64.b64encode(orderer_config.read()), - "cmd": 'bash /tmp/update.sh "peer node start"', - } -response = post("http://localhost:5001/api/v1/nodes/" + nid, data=data) -print(response.text) -print("-" * 20) - -print("Test stopping a node") -print() -data = {"action": "stop"} -response = post("http://localhost:5001/api/v1/nodes/" + nid, data=data) -print(response.text) -print("-" * 20) - - -print("Get status of a node") -print() -response = get("http://localhost:5001/api/v1/nodes/" + nid) -print(response.text) -print("-" * 20) - -print("Test deleting a node") -print() -data = {"action": "delete"} -response = post("http://localhost:5001/api/v1/nodes/" + nid, data=data) -print(response.text) diff --git a/src/agent/docker-rest-agent/intergration-test/tls.zip b/src/agent/docker-rest-agent/intergration-test/tls.zip deleted file mode 100644 index b7f4a2c9f..000000000 Binary files a/src/agent/docker-rest-agent/intergration-test/tls.zip and /dev/null differ diff --git a/src/agent/docker-rest-agent/pip.conf b/src/agent/docker-rest-agent/pip.conf deleted file mode 100644 index 89c3b57e6..000000000 --- a/src/agent/docker-rest-agent/pip.conf +++ /dev/null @@ -1,5 +0,0 @@ -[global] -index-url=http://mirrors.aliyun.com/pypi/simple/ - -[install] -trusted-host=mirrors.aliyun.com diff --git a/src/agent/docker-rest-agent/requirements.txt b/src/agent/docker-rest-agent/requirements.txt deleted file mode 100644 index 380071820..000000000 --- a/src/agent/docker-rest-agent/requirements.txt +++ /dev/null @@ -1,4 +0,0 @@ -docker>=6.0.1 -Flask>=2.2.2 -gunicorn>=20.1.0 -gevent>=22.10.2 diff --git a/src/agent/docker-rest-agent/server.py b/src/agent/docker-rest-agent/server.py deleted file mode 100644 index 2ccd6fd3c..000000000 --- a/src/agent/docker-rest-agent/server.py +++ /dev/null @@ -1,211 +0,0 @@ -from flask import Flask, jsonify, request -import docker -import sys -import logging -import os -import ast - -logging.basicConfig(level=logging.INFO) - -app = Flask(__name__) -PASS_CODE = "OK" -FAIL_CODE = "Fail" - -docker_url = os.getenv("DOCKER_URL") -storage_path = os.getenv("STORAGE_PATH") - -client = docker.DockerClient(docker_url) -res = {"code": "", "data": {}, "msg": ""} - - -@app.route("/api/v1/networks", methods=["GET"]) -def get_network(): - container_list = client.containers.list() - containers = {} - for container in container_list: - containers[container.id] = { - "id": container.id, - "short_id": container.short_id, - "name": container.name, - "status": container.status, - "image": str(container.image), - "attrs": container.attrs, - } - res = {"code": PASS_CODE, "data": containers, "msg": ""} - return jsonify({"res": res}) - - -@app.route("/api/v1/nodes", methods=["POST"]) -def create_node(): - node_name = request.form.get("name") - env = { - "HLF_NODE_MSP": request.form.get("msp"), - "HLF_NODE_TLS": request.form.get("tls"), - "HLF_NODE_PEER_CONFIG": request.form.get("peer_config_file"), - "HLF_NODE_ORDERER_CONFIG": request.form.get("orderer_config_file"), - "platform": "linux/amd64", - } - port_map = ast.literal_eval(request.form.get("port_map")) - volumes = [ - "{}/fabric/{}:/etc/hyperledger/fabric".format(storage_path, node_name), - "{}/production/{}:/var/hyperledger/production".format( - storage_path, node_name - ), - "/var/run/docker.sock:/host/var/run/docker.sock", - ] - if request.form.get("type") == "peer": - peer_envs = { - "CORE_VM_ENDPOINT": "unix:///host/var/run/docker.sock", - "CORE_VM_DOCKER_HOSTCONFIG_NETWORKMODE": "cello-net", - "FABRIC_LOGGING_SPEC": "INFO", - "CORE_PEER_TLS_ENABLED": "true", - "CORE_PEER_PROFILE_ENABLED": "false", - "CORE_PEER_TLS_CERT_FILE": "/etc/hyperledger/fabric/tls/server.crt", - "CORE_PEER_TLS_KEY_FILE": "/etc/hyperledger/fabric/tls/server.key", - "CORE_PEER_TLS_ROOTCERT_FILE": "/etc/hyperledger/fabric/tls/ca.crt", - "CORE_PEER_ID": node_name, - "CORE_PEER_ADDRESS": node_name + ":7051", - "CORE_PEER_LISTENADDRESS": "0.0.0.0:7051", - "CORE_PEER_CHAINCODEADDRESS": node_name + ":7052", - "CORE_PEER_CHAINCODELISTENADDRESS": "0.0.0.0:7052", - "CORE_PEER_GOSSIP_BOOTSTRAP": node_name + ":7051", - "CORE_PEER_GOSSIP_EXTERNALENDPOINT": node_name + ":7051", - "CORE_PEER_LOCALMSPID": node_name.split(".")[1].capitalize() - + "MSP", - "CORE_PEER_MSPCONFIGPATH": "/etc/hyperledger/fabric/msp", - "CORE_OPERATIONS_LISTENADDRESS": node_name + ":9444", - "CORE_METRICS_PROVIDER": "prometheus", - } - env.update(peer_envs) - else: - order_envs = { - "FABRIC_LOGGING_SPEC": "INFO", - "ORDERER_GENERAL_LISTENADDRESS": "0.0.0.0", - "ORDERER_GENERAL_LISTENPORT": "7050", - "ORDERER_GENERAL_LOCALMSPID": "OrdererMSP", - "ORDERER_GENERAL_LOCALMSPDIR": "/etc/hyperledger/fabric/msp", - "ORDERER_GENERAL_TLS_ENABLED": "true", - "ORDERER_GENERAL_TLS_PRIVATEKEY": "/etc/hyperledger/fabric/tls/server.key", - "ORDERER_GENERAL_TLS_CERTIFICATE": "/etc/hyperledger/fabric/tls/server.crt", - "ORDERER_GENERAL_TLS_ROOTCAS": "[/etc/hyperledger/fabric/tls/ca.crt]", - "ORDERER_GENERAL_CLUSTER_CLIENTCERTIFICATE": "/etc/hyperledger/fabric/tls/server.crt", - "ORDERER_GENERAL_CLUSTER_CLIENTPRIVATEKEY": "/etc/hyperledger/fabric/tls/server.key", - "ORDERER_GENERAL_CLUSTER_ROOTCAS": "[/etc/hyperledger/fabric/tls/ca.crt]", - "ORDERER_GENERAL_BOOTSTRAPMETHOD": "none", - "ORDERER_CHANNELPARTICIPATION_ENABLED": "true", - "ORDERER_ADMIN_TLS_ENABLED": "true", - "ORDERER_ADMIN_TLS_CERTIFICATE": "/etc/hyperledger/fabric/tls/server.crt", - "ORDERER_ADMIN_TLS_PRIVATEKEY": "/etc/hyperledger/fabric/tls/server.key", - "ORDERER_ADMIN_TLS_ROOTCAS": "[/etc/hyperledger/fabric/tls/ca.crt]", - "ORDERER_ADMIN_TLS_CLIENTROOTCAS": "[/etc/hyperledger/fabric/tls/ca.crt]", - "ORDERER_ADMIN_LISTENADDRESS": "0.0.0.0:7053", - "ORDERER_OPERATIONS_LISTENADDRESS": node_name + ":9443", - "ORDERER_METRICS_PROVIDER": "prometheus", - } - env.update(order_envs) - try: - # same as `docker run -dit yeasy/hyperledge-fabric:2.2.0 -e VARIABLES`` - container = client.containers.run( - request.form.get("img"), - request.form.get("cmd"), - detach=True, - tty=True, - stdin_open=True, - network="cello-net", - name=request.form.get("name"), - dns_search=["."], - volumes=volumes, - environment=env, - ports=port_map, - ) - except: - res["code"] = FAIL_CODE - res["data"] = sys.exc_info()[0] - res["msg"] = "creation failed" - logging.error(res) - raise - - res["code"] = PASS_CODE - res["data"]["status"] = "created" - res["data"]["id"] = container.id - res["data"][ - "public-grpc" - ] = "127.0.0.1:7050" # TODO: read the info from config file - res["data"]["public-raft"] = "127.0.0.1:7052" - res["msg"] = "node created" - - return jsonify(res) - - -@app.route("/api/v1/nodes/", methods=["GET", "POST"]) -def operate_node(id): - container = client.containers.get(id) - if request.method == "POST": - act = request.form.get("action") # only with POST - - try: - if act == "start": - container.start() - res["msg"] = "node started" - elif act == "restart": - container.restart() - res["msg"] = "node restarted" - elif act == "stop": - container.stop() - res["msg"] = "node stopped" - elif act == "delete": - container.remove() - res["msg"] = "node deleted" - elif act == "update": - - env = {} - - if "msp" in request.form: - env["HLF_NODE_MSP"] = request.form.get("msp") - - if "tls" in request.form: - env["HLF_NODE_TLS"] = request.form.get("tls") - - if "bootstrap_block" in request.form: - env["HLF_NODE_BOOTSTRAP_BLOCK"] = request.form.get( - "bootstrap_block" - ) - - if "peer_config_file" in request.form: - env["HLF_NODE_PEER_CONFIG"] = request.form.get( - "peer_config_file" - ) - - if "orderer_config_file" in request.form: - env["HLF_NODE_ORDERER_CONFIG"] = request.form.get( - "orderer_config_file" - ) - - container.exec_run( - request.form.get("cmd"), - detach=True, - tty=True, - stdin=True, - environment=env, - ) - container.restart() - res["msg"] = "node updated" - - else: - res["msg"] = "undefined action" - except: - res["code"] = FAIL_CODE - res["data"] = sys.exc_info()[0] - res["msg"] = act + "failed" - logging.error(res) - raise - else: - # GET - res["data"]["status"] = container.status - - res["code"] = PASS_CODE - return jsonify(res) - - -if __name__ == "__main__": - app.run(host="0.0.0.0", port=5001) diff --git a/src/agent/k8s-rest-agent/Dockerfile b/src/agent/k8s-rest-agent/Dockerfile deleted file mode 100644 index 88e3750e0..000000000 --- a/src/agent/k8s-rest-agent/Dockerfile +++ /dev/null @@ -1,24 +0,0 @@ -FROM python:3.8 - -COPY requirements.txt / -COPY pip /root/.pip - -RUN pip install -r /requirements.txt - -COPY src /var/www/server -COPY entrypoint.sh / -COPY uwsgi/server.ini /etc/uwsgi/apps-enabled/ -RUN mkdir /var/log/supervisor - -ENV WEBROOT / -ENV WEB_CONCURRENCY 10 -ENV DEBUG False -ENV UWSGI_WORKERS 1 -ENV UWSGI_PROCESSES 1 -ENV UWSGI_OFFLOAD_THREADS 10 -ENV UWSGI_MODULE server.wsgi:application - -WORKDIR /var/www/server -RUN python manage.py collectstatic --noinput - -CMD bash /entrypoint.sh diff --git a/src/agent/k8s-rest-agent/entrypoint.sh b/src/agent/k8s-rest-agent/entrypoint.sh deleted file mode 100644 index 9325749ff..000000000 --- a/src/agent/k8s-rest-agent/entrypoint.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env bash - -if [[ "$RUN_TYPE" == "SERVER" ]]; then - uwsgi --ini /etc/uwsgi/apps-enabled/server.ini; -else - if [[ "$RUN_TYPE" == "TASK" ]]; then - celery -A server worker --autoscale=20,6 -l info - elif [[ "$RUN_TYPE" == "BEAT_TASK" ]]; then - celery -A server beat -l info --scheduler django_celery_beat.schedulers:DatabaseScheduler --pidfile=/opt/celeryd.pid - fi -fi diff --git a/src/agent/k8s-rest-agent/pip/pip.conf b/src/agent/k8s-rest-agent/pip/pip.conf deleted file mode 100644 index 1c12d133f..000000000 --- a/src/agent/k8s-rest-agent/pip/pip.conf +++ /dev/null @@ -1,5 +0,0 @@ -[global] -index-url=http://mirrors.cloud.aliyuncs.com/pypi/simple/ - -[install] -trusted-host=mirrors.cloud.aliyuncs.com diff --git a/src/agent/k8s-rest-agent/requirements.txt b/src/agent/k8s-rest-agent/requirements.txt deleted file mode 100644 index a53c40ac7..000000000 --- a/src/agent/k8s-rest-agent/requirements.txt +++ /dev/null @@ -1,18 +0,0 @@ -Django>=3.0 -uwsgi -enum34 -djangorestframework -holdup>1.5.0,<=1.6.0 -drf-yasg<=1.17.0 -swagger_spec_validator<=2.4.1 -psycopg2-binary -celery<5.0,>=4.4 -redis -requests -supervisor -django-celery-beat -django-celery-results -django-3-jet -djangorestframework-jwt<=1.11.0 -python-jwt # 需要安装,否则会出现token解码失败错误 -shortuuid diff --git a/src/agent/k8s-rest-agent/src/api/apps.py b/src/agent/k8s-rest-agent/src/api/apps.py deleted file mode 100644 index 14b89a829..000000000 --- a/src/agent/k8s-rest-agent/src/api/apps.py +++ /dev/null @@ -1,5 +0,0 @@ -from django.apps import AppConfig - - -class ApiConfig(AppConfig): - name = "api" diff --git a/src/agent/k8s-rest-agent/src/api/auth.py b/src/agent/k8s-rest-agent/src/api/auth.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/agent/k8s-rest-agent/src/api/management/commands/test_task.py b/src/agent/k8s-rest-agent/src/api/management/commands/test_task.py deleted file mode 100644 index 9d8b32f6d..000000000 --- a/src/agent/k8s-rest-agent/src/api/management/commands/test_task.py +++ /dev/null @@ -1,14 +0,0 @@ -from django.core.management import BaseCommand -from api.tasks import example_task -from django_celery_beat.models import IntervalSchedule, PeriodicTask - - -class Command(BaseCommand): - help = "Test Task" - - def handle(self, *args, **options): - interval = IntervalSchedule.objects.first() - PeriodicTask.objects.create( - interval=interval, name="example", task="server.tasks.example_task" - ) - # example_task.delay() diff --git a/src/agent/k8s-rest-agent/src/api/models/__init__.py b/src/agent/k8s-rest-agent/src/api/models/__init__.py deleted file mode 100644 index ef4b6058c..000000000 --- a/src/agent/k8s-rest-agent/src/api/models/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .user import User, Profile diff --git a/src/agent/k8s-rest-agent/src/api/models/user.py b/src/agent/k8s-rest-agent/src/api/models/user.py deleted file mode 100644 index 5e9419c0e..000000000 --- a/src/agent/k8s-rest-agent/src/api/models/user.py +++ /dev/null @@ -1,42 +0,0 @@ -from django.contrib.auth.models import AbstractUser -from django.db import models -from django.db.models.signals import post_save -from api.utils.db_functions import make_uuid - - -class User(AbstractUser): - roles = [] - - id = models.UUIDField( - primary_key=True, - help_text="ID of user", - default=make_uuid, - editable=True, - ) - username = models.CharField(default="", max_length=128, unique=True) - - def __str__(self): - return self.username - - -class Profile(models.Model): - user = models.OneToOneField( - User, related_name="profile", on_delete=models.CASCADE - ) - created_at = models.DateTimeField(auto_now_add=True) - - def __str__(self): - return "%s's profile" % self.user - - class Meta: - ordering = ("-created_at",) - - -def create_user_profile(sender, instance, created, **kwargs): - if created: - Profile.objects.create(user=instance) - - -post_save.connect(create_user_profile, sender=User) - -# Create your models here. diff --git a/src/agent/k8s-rest-agent/src/api/routes/hello/views.py b/src/agent/k8s-rest-agent/src/api/routes/hello/views.py deleted file mode 100644 index c8fdde30a..000000000 --- a/src/agent/k8s-rest-agent/src/api/routes/hello/views.py +++ /dev/null @@ -1,37 +0,0 @@ -import logging -import os - -from rest_framework import viewsets, status -from drf_yasg.utils import swagger_auto_schema -from rest_framework.decorators import action -from rest_framework.response import Response - -from api.utils.mixins import PermissionsPerMethodMixin - -LOG = logging.getLogger(__name__) -APP_VERSION = os.getenv("APP_VERSION", "v1") - - -class HelloViewSet(PermissionsPerMethodMixin, viewsets.ViewSet): - - @swagger_auto_schema( - operation_summary="Hello world", operation_description="Hello world" - ) - def list(self, request): - return Response( - {"hello": "world %s" % APP_VERSION}, status=status.HTTP_200_OK - ) - - @swagger_auto_schema(operation_summary="hello world need auth") - @action( - methods=["get"], - url_path="need-auth", - url_name="need-auth", - detail=False, - ) - # @permission_classes((IsAuthenticated,)) - def need_auth(self, request): - LOG.info("request user %s", request.user) - return Response( - {"hello": "auth world %s" % APP_VERSION}, status=status.HTTP_200_OK - ) diff --git a/src/agent/k8s-rest-agent/src/api/tasks/__init__.py b/src/agent/k8s-rest-agent/src/api/tasks/__init__.py deleted file mode 100644 index 1468bf421..000000000 --- a/src/agent/k8s-rest-agent/src/api/tasks/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from api.tasks.task.example import example_task diff --git a/src/agent/k8s-rest-agent/src/api/tasks/task/example.py b/src/agent/k8s-rest-agent/src/api/tasks/task/example.py deleted file mode 100644 index f29ee66ac..000000000 --- a/src/agent/k8s-rest-agent/src/api/tasks/task/example.py +++ /dev/null @@ -1,12 +0,0 @@ -import logging - -from server.celery import app - - -LOG = logging.getLogger(__name__) - - -@app.task(name="example_task") -def example_task(): - LOG.info("example task") - return True diff --git a/src/agent/k8s-rest-agent/src/api/utils/common/__init__.py b/src/agent/k8s-rest-agent/src/api/utils/common/__init__.py deleted file mode 100644 index 2da9e9042..000000000 --- a/src/agent/k8s-rest-agent/src/api/utils/common/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from .swagger import with_common_response -from .db import paginate_list diff --git a/src/agent/k8s-rest-agent/src/api/utils/common/db.py b/src/agent/k8s-rest-agent/src/api/utils/common/db.py deleted file mode 100644 index d0ac7ca0e..000000000 --- a/src/agent/k8s-rest-agent/src/api/utils/common/db.py +++ /dev/null @@ -1,26 +0,0 @@ -from django.core.paginator import Paginator -from django.db.models import Func - - -class Round(Func): - function = "ROUND" - arity = 2 - - -def paginate_list(data=None, page=1, per_page=10, limit=None): - if not data: - data = [] - - total = len(data) - - if per_page != -1: - p = Paginator(data, per_page) - last_page = p.page_range[-1] - page = page if page <= last_page else last_page - data = p.page(page) - total = p.count - else: - if limit: - data = data[:limit] - - return data, total diff --git a/src/agent/k8s-rest-agent/src/api/utils/common/swagger.py b/src/agent/k8s-rest-agent/src/api/utils/common/swagger.py deleted file mode 100644 index 7c06a7478..000000000 --- a/src/agent/k8s-rest-agent/src/api/utils/common/swagger.py +++ /dev/null @@ -1,62 +0,0 @@ -from drf_yasg import openapi -from rest_framework import serializers -from rest_framework import status - -from api.utils.serializers import BadResponseSerializer - -basic_type_info = [ - (serializers.CharField, openapi.TYPE_STRING), - (serializers.BooleanField, openapi.TYPE_BOOLEAN), - (serializers.IntegerField, openapi.TYPE_INTEGER), - (serializers.FloatField, openapi.TYPE_NUMBER), - (serializers.FileField, openapi.TYPE_FILE), - (serializers.ImageField, openapi.TYPE_FILE), -] - - -def to_form_paras(self): - custom_paras = [] - for field_name, field in self.fields.items(): - type_str = openapi.TYPE_STRING - for field_class, type_format in basic_type_info: - if isinstance(field, field_class): - type_str = type_format - help_text = getattr(field, "help_text") - default = getattr(field, "default", None) - required = getattr(field, "required") - if callable(default): - custom_paras.append( - openapi.Parameter( - field_name, - openapi.IN_FORM, - help_text, - type=type_str, - required=required, - ) - ) - else: - custom_paras.append( - openapi.Parameter( - field_name, - openapi.IN_FORM, - help_text, - type=type_str, - required=required, - default=default, - ) - ) - return custom_paras - - -def with_common_response(responses=None): - if responses is None: - responses = {} - - responses.update( - { - status.HTTP_400_BAD_REQUEST: BadResponseSerializer, - status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal Error", - } - ) - - return responses diff --git a/src/agent/k8s-rest-agent/src/api/utils/db_functions.py b/src/agent/k8s-rest-agent/src/api/utils/db_functions.py deleted file mode 100644 index aa5da3f07..000000000 --- a/src/agent/k8s-rest-agent/src/api/utils/db_functions.py +++ /dev/null @@ -1,14 +0,0 @@ -import uuid -import shortuuid - - -def make_uuid(): - return str(uuid.uuid4()) - - -def make_uuid_hex(): - return uuid.uuid4().hex - - -def make_short_uuid(): - return shortuuid.ShortUUID().random(length=16) diff --git a/src/agent/k8s-rest-agent/src/api/utils/enums.py b/src/agent/k8s-rest-agent/src/api/utils/enums.py deleted file mode 100644 index cab64c903..000000000 --- a/src/agent/k8s-rest-agent/src/api/utils/enums.py +++ /dev/null @@ -1,102 +0,0 @@ -import inspect -from enum import Enum, unique, EnumMeta - -from django.conf import settings - -ROLE_PREFIX = getattr(settings, "ROLE_PREFIX", "tea_cloud") - - -class EnumWithDisplayMeta(EnumMeta): - def __new__(mcs, name, bases, attrs): - display_strings = attrs.get("DisplayStrings") - - if display_strings is not None and inspect.isclass(display_strings): - del attrs["DisplayStrings"] - if hasattr(attrs, "_member_names"): - attrs._member_names.remove("DisplayStrings") - - obj = super().__new__(mcs, name, bases, attrs) - for m in obj: - m.display_string = getattr(display_strings, m.name, None) - - return obj - - -class ExtraEnum(Enum): - @classmethod - def get_info(cls, title="", list_str=False): - str_info = """ - """ - str_info += title - if list_str: - for name, member in cls.__members__.items(): - str_info += """ - %s - """ % ( - name.lower().replace("_", "."), - ) - else: - for name, member in cls.__members__.items(): - str_info += """ - %s: %s - """ % ( - member.value, - name, - ) - return str_info - - @classmethod - def to_choices(cls, string_as_value=False): - if string_as_value: - choices = [ - (name.lower().replace("_", "."), name) - for name, member in cls.__members__.items() - ] - else: - choices = [ - (member.value, name) - for name, member in cls.__members__.items() - ] - - return choices - - @classmethod - def values(cls): - return list(map(lambda c: c.value, cls.__members__.values())) - - @classmethod - def names(cls): - return [name.lower() for name, _ in cls.__members__.items()] - - -@unique -class ErrorCode(Enum, metaclass=EnumWithDisplayMeta): - Unknown = 20000 - ResourceNotFound = 20001 - CustomError = 20002 - ResourceExisted = 20003 - ValidationError = 20004 - ParseError = 20005 - - class DisplayStrings: - Unknown = "未知错误" - ResourceNotFound = "资源未找到" - CustomError = "自定义错误" - ResourceExisted = "资源已经存在" - ValidationError = "参数验证错误" - ParseError = "解析错误" - - @classmethod - def get_info(cls): - error_code_str = """ - Error Codes: - """ - for name, member in cls.__members__.items(): - error_code_str += """ - %s: %s - """ % ( - member.value, - member.display_string, - ) - - return error_code_str diff --git a/src/agent/k8s-rest-agent/src/api/utils/exception_handler.py b/src/agent/k8s-rest-agent/src/api/utils/exception_handler.py deleted file mode 100644 index e0e1d7c94..000000000 --- a/src/agent/k8s-rest-agent/src/api/utils/exception_handler.py +++ /dev/null @@ -1,34 +0,0 @@ -from rest_framework import status -from rest_framework.exceptions import ErrorDetail -from rest_framework.exceptions import ValidationError, ParseError -from rest_framework.views import exception_handler - -from api.utils.enums import ErrorCode - - -def custom_exception_handler(exc, context): - # Call REST framework's default exception handler first, - # to get the standard error response. - response = exception_handler(exc, context) - - # Now add the HTTP status code to the response. - if response is not None: - if ( - response.status_code == status.HTTP_400_BAD_REQUEST - and "code" not in response.data - ): - if isinstance(exc, ValidationError): - response.data["code"] = ErrorCode.ValidationError.value - response.data["detail"] = ( - ErrorCode.ValidationError.display_string - ) - elif isinstance(exc, ParseError): - response.data["code"] = ErrorCode.ParseError.value - response.data["detail"] = ErrorCode.ParseError.display_string - elif isinstance(response.data.get("detail"), ErrorDetail): - response.data["code"] = response.data.get("detail").code - else: - response.data["code"] = ErrorCode.Unknown.value - response.data["detail"] = ErrorCode.Unknown.display_string - - return response diff --git a/src/agent/k8s-rest-agent/src/api/utils/fast_enum.py b/src/agent/k8s-rest-agent/src/api/utils/fast_enum.py deleted file mode 100644 index 28b685cf6..000000000 --- a/src/agent/k8s-rest-agent/src/api/utils/fast_enum.py +++ /dev/null @@ -1,283 +0,0 @@ -import re -from functools import partial -from typing import ( - Any, - Text, - Dict, - List, - Tuple, - Type, - Optional, - Callable, - Iterable, -) - - -def _resolve_init(bases: Tuple[Type]) -> Optional[Callable]: - for bcls in bases: - for rcls in bcls.mro(): - resolved_init = getattr(rcls, "__init__") - if resolved_init and resolved_init is not object.__init__: - return resolved_init - - -def _resolve_new(bases: Tuple[Type]) -> Optional[Tuple[Callable, Type]]: - for bcls in bases: - new = getattr(bcls, "__new__", None) - if new not in { - None, - None.__new__, - object.__new__, - FastEnum.__new__, - getattr(FastEnum, "_FastEnum__new"), - }: - return new, bcls - - -class FastEnum(type): - """ - A metaclass that handles enum-classes creation. - Possible options for classes using this metaclass: - - auto-generated values (see examples.py `MixedEnum` and `LightEnum`) - - subclassing possible until actual enum is not declared - (see examples.py `ExtEnumOne` and `ExtEnumTwo`) - - late init hooking (see examples.py `HookedEnum`) - - enum modifications protection (see examples.py comment after `ExtendedEnum`) - """ - - # pylint: disable=bad-mcs-classmethod-argument,protected-access,too-many-locals - # pylint: disable=too-many-branches - def __new__(mcs, name, bases, namespace: Dict[Text, Any]): - attributes: List[Text] = [ - k - for k in namespace.keys() - if (not k.startswith("_") and k.isupper()) - ] - attributes += [ - k - for k, v in namespace.get("__annotations__", {}).items() - if (not k.startswith("_") and k.isupper() and v == name) - ] - light_val = 0 + int(not bool(namespace.get("_ZERO_VALUED"))) - for attr in attributes: - if attr in namespace: - continue - else: - namespace[attr] = light_val - light_val += 1 - - __itemsize__ = 0 - for bcls in bases: - if bcls is type: - continue - __itemsize__ = max(__itemsize__, bcls.__itemsize__) - - if not __itemsize__: - __slots__ = set(namespace.get("__slots__", tuple())) | { - "name", - "value", - "_value_to_instance_map", - "_base_typed", - } - namespace["__slots__"] = tuple(__slots__) - namespace["__new__"] = FastEnum.__new - - if "__init__" not in namespace: - namespace["__init__"] = _resolve_init(bases) or mcs.__init - if "__annotations__" not in namespace: - __annotations__ = dict(name=Text, value=Any) - for k in attributes: - __annotations__[k] = name - namespace["__annotations__"] = __annotations__ - namespace["__dir__"] = partial( - FastEnum.__dir, bases=bases, namespace=namespace - ) - typ = type.__new__(mcs, name, bases, namespace) - if attributes: - typ._value_to_instance_map = {} - for instance_name in attributes: - val = namespace[instance_name] - if not isinstance(val, tuple): - val = (val,) - if val[0] in typ._value_to_instance_map: - inst = typ._value_to_instance_map[val[0]] - else: - inst = typ(*val, name=instance_name) - typ._value_to_instance_map[inst.value] = inst - setattr(typ, instance_name, inst) - - # noinspection PyUnresolvedReferences - typ.__call__ = typ.__new__ = typ.get - del typ.__init__ - typ.__hash__ = mcs.__hash - typ.__eq__ = mcs.__eq - typ.__copy__ = mcs.__copy - typ.__deepcopy__ = mcs.__deepcopy - typ.__reduce__ = mcs.__reduce - if "__str__" not in namespace: - typ.__str__ = mcs.__str - if "__repr__" not in namespace: - typ.__repr__ = mcs.__repr - - if f"_{name}__init_late" in namespace: - fun = namespace[f"_{name}__init_late"] - for instance in typ._value_to_instance_map.values(): - fun(instance) - delattr(typ, f"_{name}__init_late") - - typ.__setattr__ = typ.__delattr__ = mcs.__restrict_modification - typ._finalized = True - return typ - - @staticmethod - def __new(cls, *values, **_): - __new__ = _resolve_new(cls.__bases__) - if __new__: - __new__, typ = __new__ - obj = __new__(cls, *values) - obj._base_typed = typ - return obj - - return object.__new__(cls) - - @staticmethod - def __init(instance, value: Any, name: Text): - base_val_type = getattr(instance, "_base_typed", None) - if base_val_type: - value = base_val_type(value) - instance.value = value - instance.name = name - - # pylint: disable=missing-docstring - @staticmethod - def get(typ, val=None): - # noinspection PyProtectedMember - if not isinstance(typ._value_to_instance_map, dict): - for cls in typ.mro(): - if cls is typ: - continue - if hasattr(cls, "_value_to_instance_map") and isinstance( - cls._value_to_instance_map, dict - ): - return cls._value_to_instance_map[val] - raise ValueError( - f"Value {val} is not found in this enum type declaration" - ) - # noinspection PyProtectedMember - member = typ._value_to_instance_map.get(val) - if member is None: - raise ValueError( - f"Value {val} is not found in this enum type declaration" - ) - return member - - @staticmethod - def __eq(val, other): - return isinstance(val, type(other)) and ( - val is other if type(other) is type(val) else val.value == other - ) - - def __hash(cls): - # noinspection PyUnresolvedReferences - return hash(cls.value) - - @staticmethod - def __restrict_modification(*a, **k): - raise TypeError( - f"Enum-like classes strictly prohibit changing any attribute/property" - f" after they are once set" - ) - - def __iter__(cls): - return iter(cls._value_to_instance_map.values()) - - def __setattr__(cls, key, value): - if hasattr(cls, "_finalized"): - cls.__restrict_modification() - super().__setattr__(key, value) - - def __delattr__(cls, item): - if hasattr(cls, "_finalized"): - cls.__restrict_modification() - super().__delattr__(item) - - def __getitem__(cls, item): - return getattr(cls, item) - - def has_value(cls, value): - return value in cls._value_to_instance_map - - def to_choices(cls): - return [(key, key) for key in cls._value_to_instance_map.keys()] - - def values(cls): - return cls._value_to_instance_map.keys() - - def key_description_list(cls): - result = [] - for key in cls._value_to_instance_map.keys(): - enum_key = "_".join( - re.sub( - "([A-Z][a-z]+)", r" \1", re.sub("([A-Z]+)", r" \1", key) - ).split() - ).upper() - result.append((key, cls[enum_key].description)) - return result - - # pylint: disable=unused-argument - # noinspection PyUnusedLocal,SpellCheckingInspection - def __deepcopy(cls, memodict=None): - return cls - - def __copy(cls): - return cls - - def __reduce(cls): - typ = type(cls) - # noinspection PyUnresolvedReferences - return typ.get, (typ, cls.value) - - @staticmethod - def __str(clz): - return f"{clz.__class__.__name__}.{clz.name}" - - @staticmethod - def __repr(clz): - return f"<{clz.__class__.__name__}.{clz.name}: {repr(clz.value)}>" - - def __dir__(self) -> Iterable[str]: - return [ - k - for k in super().__dir__() - if k not in ("_finalized", "_value_to_instance_map") - ] - - # def __choices__(self) -> Iterable[str]: - # return [()] - - @staticmethod - def __dir(bases, namespace, *_, **__): - keys = [ - k - for k in namespace.keys() - if k in ("__annotations__", "__module__", "__qualname__") - or not k.startswith("_") - ] - for bcls in bases: - keys.extend(dir(bcls)) - return list(set(keys)) - - -class KeyDescriptionEnum(metaclass=FastEnum): - description: Text - __slots__ = ("description",) - - def __init__(self, value, description, name): - # noinspection PyDunderSlots,PyUnresolvedReferences - self.value = value - # noinspection PyDunderSlots,PyUnresolvedReferences - self.name = name - self.description = description - - def describe(self): - return self.description diff --git a/src/agent/k8s-rest-agent/src/api/utils/jwt.py b/src/agent/k8s-rest-agent/src/api/utils/jwt.py deleted file mode 100644 index 54b7e0741..000000000 --- a/src/agent/k8s-rest-agent/src/api/utils/jwt.py +++ /dev/null @@ -1,30 +0,0 @@ -import logging - -from django.contrib.auth import get_user_model -from rest_framework import serializers - -User = get_user_model() -LOG = logging.getLogger(__name__) - - -class UserSerializer(serializers.ModelSerializer): - id = serializers.CharField(source="username") - - class Meta: - model = User - fields = ("id",) - extra_kwargs = {"id": {"validators": []}} - - -def jwt_response_payload_handler(token, user=None, request=None): - return { - "token": token, - "user": UserSerializer(user, context={"request": request}).data, - } - - -def jwt_get_username_from_payload_handler(payload): - """ - Override this function if username is formatted differently in payload - """ - return payload.get("sub") diff --git a/src/agent/k8s-rest-agent/src/api/utils/mixins.py b/src/agent/k8s-rest-agent/src/api/utils/mixins.py deleted file mode 100644 index 41becc97f..000000000 --- a/src/agent/k8s-rest-agent/src/api/utils/mixins.py +++ /dev/null @@ -1,12 +0,0 @@ -class PermissionsPerMethodMixin(object): - def get_permissions(self): - """ - Allows overriding default permissions with @permission_classes - """ - view = getattr(self, self.action) - if hasattr(view, "permission_classes"): - return [ - permission_class() - for permission_class in view.permission_classes - ] - return super().get_permissions() diff --git a/src/agent/k8s-rest-agent/src/api/utils/serializers.py b/src/agent/k8s-rest-agent/src/api/utils/serializers.py deleted file mode 100644 index 832e15231..000000000 --- a/src/agent/k8s-rest-agent/src/api/utils/serializers.py +++ /dev/null @@ -1,33 +0,0 @@ -import textwrap - -from rest_framework import serializers -from api.utils.enums import ErrorCode - - -class PaginationSerializer(serializers.Serializer): - page = serializers.IntegerField( - default=1, min_value=1, help_text="查询第几页" - ) - per_page = serializers.IntegerField( - default=10, - min_value=-1, - help_text="查询分页的每页数量, 如果为-1则不限制分页数量", - ) - limit = serializers.IntegerField( - min_value=1, help_text="限制最大数量", required=False - ) - - -class PaginationResultSerializer(serializers.Serializer): - total = serializers.IntegerField( - min_value=0, help_text="Total Number of result" - ) - - -class BadResponseSerializer(serializers.Serializer): - code = serializers.IntegerField( - help_text=textwrap.dedent(ErrorCode.get_info()) - ) - detail = serializers.CharField( - required=False, help_text="Error Messages", allow_blank=True - ) diff --git a/src/agent/k8s-rest-agent/src/api/views.py b/src/agent/k8s-rest-agent/src/api/views.py deleted file mode 100644 index 91ea44a21..000000000 --- a/src/agent/k8s-rest-agent/src/api/views.py +++ /dev/null @@ -1,3 +0,0 @@ -from django.shortcuts import render - -# Create your views here. diff --git a/src/agent/k8s-rest-agent/src/manage.py b/src/agent/k8s-rest-agent/src/manage.py deleted file mode 100755 index 4546cf051..000000000 --- a/src/agent/k8s-rest-agent/src/manage.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python -"""Django's command-line utility for administrative tasks.""" -import os -import sys - - -def main(): - os.environ.setdefault("DJANGO_SETTINGS_MODULE", "server.settings") - try: - from django.core.management import execute_from_command_line - except ImportError as exc: - raise ImportError( - "Couldn't import Django. Are you sure it's installed and " - "available on your PYTHONPATH environment variable? Did you " - "forget to activate a virtual environment?" - ) from exc - execute_from_command_line(sys.argv) - - -if __name__ == "__main__": - main() diff --git a/src/agent/k8s-rest-agent/src/server/__init__.py b/src/agent/k8s-rest-agent/src/server/__init__.py deleted file mode 100644 index 0165ba0dd..000000000 --- a/src/agent/k8s-rest-agent/src/server/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -from __future__ import absolute_import, unicode_literals - -# This will make sure the app is always imported when -# Django starts so that shared_task will use this app. -from .celery import app as celery_app - -__all__ = ("celery_app",) diff --git a/src/agent/k8s-rest-agent/src/server/asgi.py b/src/agent/k8s-rest-agent/src/server/asgi.py deleted file mode 100644 index 9fadff8ce..000000000 --- a/src/agent/k8s-rest-agent/src/server/asgi.py +++ /dev/null @@ -1,16 +0,0 @@ -""" -ASGI config for server project. - -It exposes the ASGI callable as a module-level variable named ``application``. - -For more information on this file, see -https://docs.djangoproject.com/en/3.0/howto/deployment/asgi/ -""" - -import os - -from django.core.asgi import get_asgi_application - -os.environ.setdefault("DJANGO_SETTINGS_MODULE", "server.settings") - -application = get_asgi_application() diff --git a/src/agent/k8s-rest-agent/src/server/celery.py b/src/agent/k8s-rest-agent/src/server/celery.py deleted file mode 100644 index 2393692e3..000000000 --- a/src/agent/k8s-rest-agent/src/server/celery.py +++ /dev/null @@ -1,15 +0,0 @@ -from __future__ import absolute_import, unicode_literals -import os -from celery import Celery - -# set the default Django settings module for the 'celery' program. -from django.conf import settings - -os.environ.setdefault("DJANGO_SETTINGS_MODULE", "server.settings") - -app = Celery("server") - -app.config_from_object(settings, namespace="CELERY") - -# Load task modules from all registered Django app configs. -app.autodiscover_tasks() diff --git a/src/agent/k8s-rest-agent/src/server/settings.py b/src/agent/k8s-rest-agent/src/server/settings.py deleted file mode 100644 index fe064797b..000000000 --- a/src/agent/k8s-rest-agent/src/server/settings.py +++ /dev/null @@ -1,224 +0,0 @@ -""" -Django settings for server project. - -Generated by 'django-admin startproject' using Django 3.0.7. - -For more information on this file, see -https://docs.djangoproject.com/en/3.0/topics/settings/ - -For the full list of settings and their values, see -https://docs.djangoproject.com/en/3.0/ref/settings/ -""" - -import os -from django.utils.translation import gettext_lazy as _ - -WEBROOT = os.getenv("WEBROOT", "/") -WEBROOT = WEBROOT if WEBROOT != "/" else "" -DB_HOST = os.getenv("DB_HOST", "") -DB_PORT = int(os.getenv("DB_PORT", "5432")) -DB_NAME = os.getenv("DB_NAME", "") -DB_USER = os.getenv("DB_USER", "") -DB_PASSWORD = os.getenv("DB_PASSWORD", "") -DEBUG = os.getenv("DEBUG", "False") -DEBUG = DEBUG == "True" -# Build paths inside the project like this: os.path.join(BASE_DIR, ...) -BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) -KEYCLOAK_PUBLIC_KEY = os.getenv("KEYCLOAK_PUBLIC_KEY", "") - - -# Quick-start development settings - unsuitable for production -# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/ - -# SECURITY WARNING: keep the secret key used in production secret! -SECRET_KEY = "xdpfxz9)__^3azxs2(59$j&chmo#6&gi*pu3#wpt^$m!vff)0w" - -# SECURITY WARNING: don't run with debug turned on in production! -# DEBUG = True - -ALLOWED_HOSTS = ["*"] - - -# Application definition - -INSTALLED_APPS = [ - "jet", - "django.contrib.admin", - "django.contrib.auth", - "django.contrib.contenttypes", - "django.contrib.sessions", - "django.contrib.messages", - "django.contrib.staticfiles", - "rest_framework", - "server", - "api", - "drf_yasg", - "django_celery_beat", - "django_celery_results", -] - -MIDDLEWARE = [ - "django.middleware.security.SecurityMiddleware", - "django.contrib.sessions.middleware.SessionMiddleware", - "django.middleware.common.CommonMiddleware", - "django.middleware.csrf.CsrfViewMiddleware", - "django.contrib.auth.middleware.AuthenticationMiddleware", - "django.contrib.messages.middleware.MessageMiddleware", - "django.middleware.clickjacking.XFrameOptionsMiddleware", -] - -ROOT_URLCONF = "server.urls" - -TEMPLATES = [ - { - "BACKEND": "django.template.backends.django.DjangoTemplates", - "DIRS": [], - "APP_DIRS": True, - "OPTIONS": { - "context_processors": [ - "django.template.context_processors.debug", - "django.template.context_processors.request", - "django.contrib.auth.context_processors.auth", - "django.contrib.messages.context_processors.messages", - ], - }, - }, -] - -WSGI_APPLICATION = "server.wsgi.application" - - -# Password validation -# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators - -AUTH_PASSWORD_VALIDATORS = [ - { - "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator", - }, - { - "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator", - }, - { - "NAME": "django.contrib.auth.password_validation.CommonPasswordValidator", - }, - { - "NAME": "django.contrib.auth.password_validation.NumericPasswordValidator", - }, -] - - -# Internationalization -# https://docs.djangoproject.com/en/3.0/topics/i18n/ - -LANGUAGE_CODE = "zh-hans" -USE_I18N = True -USE_L10N = True - -LANGUAGES = [ - ("en", _("English")), - ("zh-hans", _("Simplified Chinese")), -] - -TIME_ZONE = "Asia/Shanghai" -USE_TZ = True - - -# Static files (CSS, JavaScript, Images) -# https://docs.djangoproject.com/en/3.0/howto/static-files/ - -STATIC_URL = WEBROOT + "/static/" -STATIC_ROOT = "/var/www/static" -MEDIA_ROOT = "/data/media" -MEDIA_URL = WEBROOT + "/media/" - -USE_X_FORWARDED_HOST = True -FORCE_SCRIPT_NAME = WEBROOT if WEBROOT != "" else "/" - -DATABASES = { - "default": { - "ENGINE": "django.db.backends.postgresql", - "NAME": DB_NAME, - "USER": DB_USER, - "PASSWORD": DB_PASSWORD, - "HOST": DB_HOST, - "PORT": DB_PORT, - } -} - -REST_FRAMEWORK = { - "DEFAULT_VERSIONING_CLASS": "rest_framework.versioning.AcceptHeaderVersioning", - "DEFAULT_METADATA_CLASS": "rest_framework.metadata.SimpleMetadata", - "DEFAULT_PARSER_CLASSES": [ - "rest_framework.parsers.FormParser", - "rest_framework.parsers.MultiPartParser", - "rest_framework.parsers.JSONParser", - ], - "EXCEPTION_HANDLER": "api.utils.exception_handler.custom_exception_handler", -} - -SWAGGER_SETTINGS = { - "VALIDATOR_URL": None, - "DEFAULT_INFO": "server.urls.swagger_info", - "SECURITY_DEFINITIONS": { - "Bearer": {"type": "apiKey", "name": "Authorization", "in": "header"} - }, - "USE_SESSION_AUTH": False, -} - -CELERY_BROKER_URL = os.getenv("CELERY_BROKER_URL", "") -CELERY_RESULT_BACKEND = "django-db" -CELERY_BEAT_SCHEDULER = "django_celery_beat.schedulers:DatabaseScheduler" - -JWT_AUTH = { - "JWT_AUTH_HEADER_PREFIX": "Bearer", - "JWT_PUBLIC_KEY": """-----BEGIN PUBLIC KEY----- -%s ------END PUBLIC KEY-----""" - % KEYCLOAK_PUBLIC_KEY, - "JWT_ALGORITHM": "RS256", - "JWT_AUDIENCE": "account", - "JWT_PAYLOAD_GET_USERNAME_HANDLER": "api.utils.jwt.jwt_get_username_from_payload_handler", - "JWT_RESPONSE_PAYLOAD_HANDLER": "api.utils.jwt.jwt_response_payload_handler", -} - -AUTH_USER_MODEL = "api.User" -AUTH_PROFILE_MODULE = "api.Profile" - -LOGGING = { - "version": 1, - "disable_existing_loggers": False, - "formatters": { - "verbose": { - "format": "%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s" - }, - "simple": {"format": "%(levelname)s %(message)s"}, - }, - "handlers": { - "null": { - "level": "DEBUG", - "class": "logging.NullHandler", - }, - "console": { - "level": "DEBUG", - "class": "logging.StreamHandler", - "formatter": "simple", - }, - }, - "loggers": { - "django": { - "handlers": ["null"], - "propagate": True, - "level": "INFO", - }, - "django.request": { - "handlers": ["console"], - "level": "DEBUG", - "propagate": False, - }, - "api": { - "handlers": ["console"], - "level": "DEBUG", - "propagate": False, - }, - }, -} diff --git a/src/agent/k8s-rest-agent/src/server/urls.py b/src/agent/k8s-rest-agent/src/server/urls.py deleted file mode 100644 index e7d0a7139..000000000 --- a/src/agent/k8s-rest-agent/src/server/urls.py +++ /dev/null @@ -1,70 +0,0 @@ -"""server URL Configuration - -The `urlpatterns` list routes URLs to views. For more information please see: - https://docs.djangoproject.com/en/3.0/topics/http/urls/ -Examples: -Function views - 1. Add an import: from my_app import views - 2. Add a URL to urlpatterns: path('', views.home, name='home') -Class-based views - 1. Add an import: from other_app.views import Home - 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') -Including another URLconf - 1. Import the include() function: from django.urls import include, path - 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) -""" - -import os - -from api.routes.hello.views import HelloViewSet -from django.conf import settings -from django.contrib import admin -from django.urls import path, include -from drf_yasg import openapi -from drf_yasg.views import get_schema_view -from rest_framework import permissions -from rest_framework.routers import DefaultRouter - -DEBUG = getattr(settings, "DEBUG", False) -VERSION = os.getenv("API_VERSION", "v1") - -router = DefaultRouter(trailing_slash=False) -router.register("hello", HelloViewSet, basename="hello") - -router.include_root_view = False - -urlpatterns = router.urls - -swagger_info = openapi.Info( - title="Django Example API", - default_version=VERSION, - description=""" - Django Example API - """, -) - -SchemaView = get_schema_view( - info=swagger_info, - validators=["flex"], - public=True, - permission_classes=(permissions.AllowAny,), -) - -urlpatterns += [ - path("admin/", admin.site.urls), - path("jet/", include("jet.urls", "jet")), -] - -if DEBUG: - urlpatterns += [ - path( - "docs/", - SchemaView.with_ui("swagger", cache_timeout=0), - name="docs", - ), - path( - "redoc/", - SchemaView.with_ui("redoc", cache_timeout=0), - name="redoc", - ), - ] diff --git a/src/agent/k8s-rest-agent/src/server/wsgi.py b/src/agent/k8s-rest-agent/src/server/wsgi.py deleted file mode 100644 index 11efb9c4d..000000000 --- a/src/agent/k8s-rest-agent/src/server/wsgi.py +++ /dev/null @@ -1,16 +0,0 @@ -""" -WSGI config for server project. - -It exposes the WSGI callable as a module-level variable named ``application``. - -For more information on this file, see -https://docs.djangoproject.com/en/3.0/howto/deployment/wsgi/ -""" - -import os - -from django.core.wsgi import get_wsgi_application - -os.environ.setdefault("DJANGO_SETTINGS_MODULE", "server.settings") - -application = get_wsgi_application() diff --git a/src/agent/k8s-rest-agent/supervisor/conf.d/server.conf b/src/agent/k8s-rest-agent/supervisor/conf.d/server.conf deleted file mode 100644 index f4f1ad384..000000000 --- a/src/agent/k8s-rest-agent/supervisor/conf.d/server.conf +++ /dev/null @@ -1,8 +0,0 @@ -[program:beat_task] -environment=C_FORCE_ROOT="yes" -command=celery -A server beat -l info -directory=/var/www/server/ -autostart=true -autorestart=true -stdout_logfile=/var/log/supervisor/server.log -redirect_stderr=true \ No newline at end of file diff --git a/src/agent/k8s-rest-agent/supervisor/supervisord.conf b/src/agent/k8s-rest-agent/supervisor/supervisord.conf deleted file mode 100644 index d6bf70c31..000000000 --- a/src/agent/k8s-rest-agent/supervisor/supervisord.conf +++ /dev/null @@ -1,28 +0,0 @@ -; supervisor config file - -[unix_http_server] -file=/var/run/supervisor.sock ; (the path to the socket file) -chmod=0700 ; sockef file mode (default 0700) - -[supervisord] -logfile=/var/log/supervisor/supervisord.log ; (main log file;default $CWD/supervisord.log) -pidfile=/var/run/supervisord.pid ; (supervisord pidfile;default supervisord.pid) -childlogdir=/var/log/supervisor ; ('AUTO' child log dir, default $TEMP) - -; the below section must remain in the config file for RPC -; (supervisorctl/web interface) to work, additional interfaces may be -; added by defining them in separate rpcinterface: sections -[rpcinterface:supervisor] -supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface - -[supervisorctl] -serverurl=unix:///var/run/supervisor.sock ; use a unix:// URL for a unix socket - -; The [include] section can just contain the "files" setting. This -; setting can list multiple files (separated by whitespace or -; newlines). It can also contain wildcards. The filenames are -; interpreted as relative to this file. Included files *cannot* -; include files themselves. - -[include] -files = /etc/supervisor/conf.d/*.conf \ No newline at end of file diff --git a/src/agent/k8s-rest-agent/uwsgi/server.ini b/src/agent/k8s-rest-agent/uwsgi/server.ini deleted file mode 100644 index 72607ef42..000000000 --- a/src/agent/k8s-rest-agent/uwsgi/server.ini +++ /dev/null @@ -1,71 +0,0 @@ -[uwsgi] -module = $(UWSGI_MODULE) -processes = $(UWSGI_PROCESSES) -threads = $(UWSGI_THREADS) -procname-prefix-spaced = uwsgi: $(UWSGI_MODULE) - -http-socket = :80 -http-enable-proxy-protocol = 1 -http-auto-chunked = true -http-keepalive = 75 -http-timeout = 75 -stats = :1717 -stats-http = 1 -offload-threads = $(UWSGI_OFFLOAD_THREADS) - -# Better startup/shutdown in docker: -die-on-term = 1 -lazy-apps = 0 - -vacuum = 1 -master = 1 -enable-threads = true -thunder-lock = 1 -buffer-size = 65535 - -# Logging -log-x-forwarded-for = true -#memory-report = true -#disable-logging = true -#log-slow = 200 -#log-date = true - -# Avoid errors on aborted client connections -ignore-sigpipe = true -ignore-write-errors = true -disable-write-exception = true - -#listen=1000 -#max-fd=120000 -no-defer-accept = 1 - -# Limits, Kill requests after 120 seconds -harakiri = 120 -harakiri-verbose = true -post-buffering = 4096 - -# Custom headers -add-header = X-Content-Type-Options: nosniff -add-header = X-XSS-Protection: 1; mode=block -add-header = Strict-Transport-Security: max-age=16070400 -add-header = Connection: Keep-Alive - -# Static file serving with caching headers and gzip -static-map = /static=/var/www/static -static-map = /media=/data/media -static-safe = /usr/local/lib/python3.7/site-packages/ -static-safe = /var/www/static/ -static-gzip-dir = /var/www/static/ -static-expires = /var/www/static/CACHE/* 2592000 -static-expires = /data/media/cache/* 2592000 -static-expires = /var/www/static/frontend/img/* 2592000 -static-expires = /var/www/static/frontend/fonts/* 2592000 -static-expires = /var/www/* 3600 -route-uri = ^/static/ addheader:Vary: Accept-Encoding -error-route-uri = ^/static/ addheader:Cache-Control: no-cache - -# Cache stat() calls -cache2 = name=statcalls,items=30 -static-cache-paths = 86400 - -touch-reload = /tmp/server.txt \ No newline at end of file diff --git a/src/agent/kubernetes-agent/Dockerfile b/src/agent/kubernetes-agent/Dockerfile deleted file mode 100644 index 911b5fcd4..000000000 --- a/src/agent/kubernetes-agent/Dockerfile +++ /dev/null @@ -1,15 +0,0 @@ -FROM python:3.6 - -COPY requirements.txt / -RUN pip install -r /requirements.txt -RUN curl -LO https://storage.googleapis.com/kubernetes-release/release/$(curl -s https://storage.googleapis.com/kubernetes-release/release/stable.txt)/bin/linux/amd64/kubectl && chmod +x kubectl && \ - mv kubectl /usr/local/bin/kubectl - -COPY src /app - -WORKDIR /app - -ENV KUBECONFIG /app/.kube/config -ENV PYTHONPATH /app:$PATHONPATH - -CMD python main.py diff --git a/src/agent/kubernetes-agent/requirements.txt b/src/agent/kubernetes-agent/requirements.txt deleted file mode 100644 index e98ba28a9..000000000 --- a/src/agent/kubernetes-agent/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -kubernetes<=9.0.0 -requests<=2.32.3 diff --git a/src/agent/kubernetes-agent/src/__init__.py b/src/agent/kubernetes-agent/src/__init__.py deleted file mode 100644 index 0480730a5..000000000 --- a/src/agent/kubernetes-agent/src/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# diff --git a/src/agent/kubernetes-agent/src/main.py b/src/agent/kubernetes-agent/src/main.py deleted file mode 100644 index 4efbce25f..000000000 --- a/src/agent/kubernetes-agent/src/main.py +++ /dev/null @@ -1,13 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -from operations import create_node, delete_node, fabric_ca_register -from utils.env import OPERATION, AgentOperation - -if __name__ == "__main__": - if OPERATION == AgentOperation.Create.value: - create_node() - elif OPERATION == AgentOperation.Delete.value: - delete_node() - elif OPERATION == AgentOperation.FabricCARegister.value: - fabric_ca_register() diff --git a/src/agent/kubernetes-agent/src/network/__init__.py b/src/agent/kubernetes-agent/src/network/__init__.py deleted file mode 100644 index a5270fa8c..000000000 --- a/src/agent/kubernetes-agent/src/network/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -from .fabric import FabricNetwork diff --git a/src/agent/kubernetes-agent/src/network/fabric/__init__.py b/src/agent/kubernetes-agent/src/network/fabric/__init__.py deleted file mode 100644 index a5d51955e..000000000 --- a/src/agent/kubernetes-agent/src/network/fabric/__init__.py +++ /dev/null @@ -1,283 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -import os -import logging -from utils.env import ( - PEER_CONFIG, - CA_CONFIG, - CA_ADMIN_NAME, - CA_ADMIN_PASSWORD, - AGENT_IP, - FabricNodeType, - FabricImages, -) - -LOG = logging.getLogger(__name__) -CA_HOSTS = CA_CONFIG.get("hosts", "").split(",") -# Set fabric ca hosts from agent ip and user customize hosts. -CA_HOSTS.append(AGENT_IP) - - -class FabricNetwork(object): - def __init__(self, *args, **kwargs): - self._version = kwargs.get("version") - self._type = kwargs.get("node_type") - self._agent_id = kwargs.get("agent_id") - self._node_id = kwargs.get("node_id") - self._deploy_name = "deploy-%s" % str(self._node_id) - self._service_name = "service-%s" % str(self._node_id) - self._ingress_name = "ingress-%s" % str(self._node_id) - self._container_image = "" - self._container_environments = None - self._container_command = None - self._container_command_args = None - self._initial_containers = None - self._container_volume_mounts = None - self._containers = None - self._initial_containers = None - self._volumes = None - - if self._type == FabricNodeType.Ca.value: - self._container_ports = [7054] - self._service_ports = [{"port": 7054, "name": "server"}] - self._image_name = "%s:%s" % (FabricImages.Ca.value, self._version) - self._pod_name = "ca-server" - self._init_ca_deployment() - elif self._type == FabricNodeType.Peer.value: - self._container_ports = [7051, 7052] - self._service_ports = [ - {"port": 7051, "name": "server"}, - {"port": 7052, "name": "grpc"}, - ] - self._image_name = "%s:%s" % ( - FabricImages.Peer.value, - self._version, - ) - self._pod_name = "peer" - self._init_peer_deployment() - else: - self._container_ports = [] - self._service_ports = [] - self._image_name = "" - self._pod_name = "" - - def _init_ca_deployment(self): - self._container_environments = [ - { - "name": "FABRIC_CA_HOME", - "value": "/etc/hyperledger/fabric-ca-server", - }, - { - "name": "FABRIC_CA_SERVER_HOME", - "value": "/etc/hyperledger/fabric-ca-server/crypto", - }, - {"name": "FABRIC_CA_SERVER_TLS_ENABLED", "value": "true"}, - { - "name": "FABRIC_CA_SERVER_CSR_HOSTS", - "value": ",".join(CA_HOSTS), - }, - ] - self._container_command = ["fabric-ca-server"] - self._container_command_args = [ - "start", - "-b", - "%s:%s" % (CA_ADMIN_NAME, CA_ADMIN_PASSWORD), - "-d", - ] - - def _init_peer_deployment(self): - gossip_use_leader_reflection = PEER_CONFIG.get( - "gossip_use_leader_reflection", True - ) - gossip_org_leader = PEER_CONFIG.get("gossip_org_leader", False) - gossip_skip_handshake = PEER_CONFIG.get("gossip_skip_handshake", True) - name = PEER_CONFIG.get("name") - local_msp_id = PEER_CONFIG.get("local_msp_id") - ca_list = PEER_CONFIG.get("ca_list", []) - - initial_container_work_dir = "/work-dir" - # TODO: find a policy for peer directory definition - initial_container_environments = [ - { - "name": "FABRIC_CA_CLIENT_HOME", - "value": "%s/hyperledger/org1/peer1" - % initial_container_work_dir, - }, - {"name": "PEER_NAME", "value": name}, - ] - for ca_node in ca_list: - ca_address = ca_node.get("address") - ca_certificate_url = ca_node.get("certificate") - ca_certificate_file_name = ca_certificate_url.split("/")[-1] - ca_certificate_file_type = ( - "archive" - if ca_certificate_file_name.endswith((".tgz", "tar.gz")) - else "file" - ) - ca_type = ca_node.get("type").upper() - users = ca_node.get("users", []) - ca_environments = [ - {"name": "%s_CA_ADDRESS" % ca_type, "value": ca_address}, - { - "name": "%s_CA_CERTIFICATE_URL" % ca_type, - "value": ca_certificate_url, - }, - { - "name": "%s_CA_CERTIFICATE_FILE_NAME" % ca_type, - "value": ca_certificate_file_name, - }, - { - "name": "%s_CA_CERTIFICATE_FILE_TYPE" % ca_type, - "value": ca_certificate_file_type, - }, - ] - for user in users: - user_type = user.get("type").upper() - username = user.get("username") - password = user.get("password") - ca_environments += [ - { - "name": "%s_%s_USER_NAME" % (ca_type, user_type), - "value": username, - }, - { - "name": "%s_%s_USER_PASSWORD" % (ca_type, user_type), - "value": password, - }, - ] - initial_container_environments = ( - initial_container_environments + ca_environments - ) - - initial_container_command = ["bash", "-c"] - script_file_path = os.path.join( - os.path.dirname(os.path.abspath(__file__)), "peer_initial_ca.sh" - ) - # read script for peer initial - with open(script_file_path, "r") as initial_ca_script: - shell_script = initial_ca_script.read() - initial_container_command_args = [shell_script] - ca_image = "%s:%s" % (FabricImages.Ca.value, self._version) - - self._initial_containers = [ - { - "image": ca_image, - "environments": initial_container_environments, - "name": "initial-ca", - "command": initial_container_command, - "command_args": initial_container_command_args, - "volume_mounts": [{"name": "workdir", "path": "/work-dir"}], - } - ] - self._volumes = [ - {"name": "workdir", "empty_dir": {}}, - {"name": "docker-run", "host_path": "/var/run"}, - ] - self._container_environments = [ - {"name": "CORE_PEER_ID", "value": name}, - {"name": "CORE_PEER_LOCALMSPID", "value": local_msp_id}, - { - "name": "CORE_PEER_MSPCONFIGPATH", - "value": "/work-dir/hyperledger/org1/peer1/msp", - }, - { - "name": "CORE_VM_ENDPOINT", - "value": "unix:///host/var/run/docker.sock", - }, - { - "name": "CORE_VM_DOCKER_HOSTCONFIG_NETWORKMODE", - "value": "guide_fabric-ca", - }, - {"name": "FABRIC_LOGGING_SPEC", "value": "debug"}, - {"name": "CORE_PEER_TLS_ENABLED", "value": "true"}, - { - "name": "CORE_PEER_TLS_CERT_FILE", - "value": "/work-dir/hyperledger/org1/peer1/tls-msp/signcerts/cert.pem", - }, - { - "name": "CORE_PEER_TLS_KEY_FILE", - "value": "/work-dir/hyperledger/org1/peer1/tls-msp/keystore/key.pem", - }, - { - "name": "CORE_PEER_GOSSIP_USELEADERELECTION", - "value": "true" if gossip_use_leader_reflection else "false", - }, - { - "name": "CORE_PEER_GOSSIP_ORGLEADER", - "value": "true" if gossip_org_leader else "false", - }, - { - "name": "CORE_PEER_GOSSIP_SKIPHANDSHAKE", - "value": "true" if gossip_skip_handshake else "false", - }, - { - "name": "CORE_PEER_TLS_ROOTCERT_FILE", - "value": "/work-dir/hyperledger/org1/peer1/tls-msp/tlscacerts/tls.pem", - }, - ] - self._container_volume_mounts = [ - {"name": "workdir", "path": "/work-dir"}, - {"name": "docker-run", "path": "/host/var/run"}, - ] - - # def _generate_ingress(self): - # ingress_name = "ingress-%s" % str(self._node_id) - # annotations = {"nginx.ingress.kubernetes.io/ssl-redirect": "false"} - # if self._type == FabricNodeType.Ca.name.lower(): - # ingress_paths = [ - # {"port": 7054, "path": "/%s" % str(self._node_id)} - # ] - # else: - # ingress_paths = [] - # - # return { - # "name": ingress_name, - # "service_name": self._service_name, - # "ingress_paths": ingress_paths, - # "annotations": annotations, - # } - - def add_environments(self, environments=None): - if environments is None: - environments = [] - - self._container_environments += environments - - def deployment(self): - deployment = {"name": self._deploy_name} - if self._volumes is not None: - deployment.update({"volumes": self._volumes}) - if self._initial_containers is not None: - deployment.update({"initial_containers": self._initial_containers}) - container_dict = { - "image": self._image_name, - "name": self._pod_name, - "ports": self._container_ports, - } - if self._container_environments is not None: - container_dict.update( - {"environments": self._container_environments} - ) - if self._container_volume_mounts is not None: - container_dict.update( - {"volume_mounts": self._container_volume_mounts} - ) - if self._container_command is not None: - container_dict.update({"command": self._container_command}) - if self._container_command_args is not None: - container_dict.update( - {"command_args": self._container_command_args} - ) - containers = [container_dict] - deployment.update({"containers": containers}) - - return deployment - - def service(self): - return { - "name": self._service_name, - "ports": self._service_ports, - "selector": {"app": self._deploy_name}, - "service_type": "NodePort", - } diff --git a/src/agent/kubernetes-agent/src/network/fabric/peer_initial_ca.sh b/src/agent/kubernetes-agent/src/network/fabric/peer_initial_ca.sh deleted file mode 100644 index 314760420..000000000 --- a/src/agent/kubernetes-agent/src/network/fabric/peer_initial_ca.sh +++ /dev/null @@ -1,22 +0,0 @@ -cd /work-dir; -mkdir tls; -wget -c $TLS_CA_CERTIFICATE_URL; -if [ "$TLS_CA_CERTIFICATE_FILE_TYPE" = "archive" ]; then tar -zxf $TLS_CA_CERTIFICATE_FILE_NAME -C tls --strip-components 1; else cp $TLS_CA_CERTIFICATE_FILE_NAME tls/ca-cert.pem; fi; -rm -rf $$TLS_CA_CERTIFICATE_FILE_NAME; -mkdir signature; -wget -c $SIGNATURE_CA_CERTIFICATE_URL; -if [ "$SIGNATURE_CA_CERTIFICATE_FILE_TYPE" = "archive" ]; then tar -zxf $SIGNATURE_CA_CERTIFICATE_FILE_NAME -C signature --strip-components 1; else cp $SIGNATURE_CA_CERTIFICATE_FILE_NAME signature/ca-cert.pem; fi; -rm -rf $SIGNATURE_CA_CERTIFICATE_FILE_NAME; -export FABRIC_CA_CLIENT_TLS_CERTFILES=/work-dir/signature/ca-cert.pem; -fabric-ca-client enroll -d -u https://$SIGNATURE_PEER_USER_NAME:$SIGNATURE_PEER_USER_PASSWORD@$SIGNATURE_CA_ADDRESS; -export FABRIC_CA_CLIENT_MSPDIR=tls-msp; -export FABRIC_CA_CLIENT_TLS_CERTFILES=/work-dir/tls/ca-cert.pem; -fabric-ca-client enroll -d -u https://$TLS_PEER_USER_NAME:$TLS_PEER_USER_PASSWORD@$TLS_CA_ADDRESS --enrollment.profile tls --csr.hosts $PEER_NAME; -cd /work-dir/hyperledger/org1/peer1/tls-msp/keystore && mv * key.pem; -cd /work-dir/hyperledger/org1/peer1/tls-msp/tlscacerts && mv * tls.pem; -export FABRIC_CA_CLIENT_HOME=/work-dir/hyperledger/org1/admin; -export FABRIC_CA_CLIENT_TLS_CERTFILES=/work-dir/signature/ca-cert.pem; -export FABRIC_CA_CLIENT_MSPDIR=msp; -fabric-ca-client enroll -d -u https://$SIGNATURE_USER_USER_NAME:$SIGNATURE_USER_USER_PASSWORD@$SIGNATURE_CA_ADDRESS; -mkdir -p /work-dir/hyperledger/org1/peer1/msp/admincerts; -cp /work-dir/hyperledger/org1/admin/msp/signcerts/cert.pem /work-dir/hyperledger/org1/peer1/msp/admincerts/org1-admin-cert.pem; diff --git a/src/agent/kubernetes-agent/src/operations/__init__.py b/src/agent/kubernetes-agent/src/operations/__init__.py deleted file mode 100644 index b59d71e00..000000000 --- a/src/agent/kubernetes-agent/src/operations/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -from .create_node import create_node -from .delete_node import delete_node -from .fabric_ca_register import fabric_ca_register diff --git a/src/agent/kubernetes-agent/src/operations/create_node.py b/src/agent/kubernetes-agent/src/operations/create_node.py deleted file mode 100644 index 4cfd41928..000000000 --- a/src/agent/kubernetes-agent/src/operations/create_node.py +++ /dev/null @@ -1,161 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -import json -import subprocess -import tarfile -from time import sleep - -import requests - -from network import FabricNetwork -from utils.env import ( - NODE_ID, - NODE_DETAIL_URL, - AGENT_ID, - AGENT_IP, - NETWORK_VERSION, - NODE_TYPE, - MAX_QUERY_RETRY, - headers, - NodeStatus, - NODE_UPLOAD_FILE_URL, - NETWORK_TYPE, - NetworkType, - FabricNodeType, -) -from utils import get_k8s_client - - -def _upload_ca_crypto(pod): - copy_cmd = [ - "kubectl", - "cp", - "%s/%s:/etc/hyperledger/fabric-ca-server/crypto" - % (AGENT_ID, pod.metadata.name), - "crypto", - ] - subprocess.call(copy_cmd) - crypto_tar_file = "crypto.tgz" - tf = tarfile.open(crypto_tar_file, mode="w:gz") - tf.add("crypto") - tf.close() - - files = {"file": open(crypto_tar_file, "rb")} - del headers["Content-Type"] - r = requests.post(NODE_UPLOAD_FILE_URL, headers=headers, files=files) - - -def _generate_peer_env_from_ports(ports=None): - if ports is None: - ports = [] - - environments = [] - for port in ports: - internal_port = port.get("internal") - external_port = port.get("external") - if internal_port == 7051: - environments += [ - { - "name": "CORE_PEER_ADDRESS", - "value": "%s:%s" % (AGENT_IP, external_port), - }, - { - "name": "CORE_PEER_GOSSIP_EXTERNALENDPOINT", - "value": "%s:%s" % (AGENT_IP, external_port), - }, - ] - elif internal_port == 7052: - environments += [ - { - "name": "CORE_PEER_CHAINCODEADDRESS", - "value": "%s:%s" % (AGENT_IP, external_port), - }, - { - "name": "CORE_PEER_CHAINCODELISTENADDRESS", - "value": "0.0.0.0:%s" % external_port, - }, - ] - - return environments - - -def _create_fabric_node(): - k8s_client = get_k8s_client() - - network = FabricNetwork( - version=NETWORK_VERSION, - node_type=NODE_TYPE, - agent_id=AGENT_ID, - node_id=NODE_ID, - ) - - service = network.service() - - deploy_name = None - ports = [] - new_environments = [] - if service: - success, service_response = k8s_client.create_service( - AGENT_ID, **service - ) - if service.get("service_type") == "NodePort" and success: - ports = service_response.spec.ports - ports = [ - {"external": port.node_port, "internal": port.port} - for port in ports - ] - if NODE_TYPE == FabricNodeType.Peer.value: - new_environments = _generate_peer_env_from_ports(ports) - - # add new environments depend on service result - if len(new_environments) > 0: - network.add_environments(new_environments) - - deployment = network.deployment() - for key, value in deployment.items(): - print(key, value) - if deployment: - k8s_client.create_deployment(AGENT_ID, **deployment) - deploy_name = deployment.get("name") - # if service: - # success, service_response = k8s_client.create_service( - # AGENT_ID, **service - # ) - # if service.get("service_type") == "NodePort" and success: - # ports = service_response.spec.ports - # ports = [ - # {"external": port.node_port, "internal": port.port} - # for port in ports - # ] - # if ingress: - # k8s_client.create_ingress(AGENT_ID, **ingress) - # - # The pod of node deployed in kubernetes - pod = None - # Query pod status if is Running - node_status = NodeStatus.Error.value - for i in range(1, MAX_QUERY_RETRY): - pod = k8s_client.get_pod(AGENT_ID, deploy_name) - if pod and pod.status.phase == "Running": - node_status = NodeStatus.Running.value - break - sleep(5) - - # Update node status - ret = requests.put( - url=NODE_DETAIL_URL, - headers=headers, - data=json.dumps({"status": node_status, "ports": ports}), - ) - - if node_status == NodeStatus.Running.value: - # if deploy success and node type is ca, - # will upload the crypto files to api engine - if NODE_TYPE == "ca": - _upload_ca_crypto(pod) - - -def create_node(): - if NETWORK_TYPE == NetworkType.Fabric.value: - _create_fabric_node() diff --git a/src/agent/kubernetes-agent/src/operations/delete_node.py b/src/agent/kubernetes-agent/src/operations/delete_node.py deleted file mode 100644 index 4f4779bcb..000000000 --- a/src/agent/kubernetes-agent/src/operations/delete_node.py +++ /dev/null @@ -1,65 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -import json -from time import sleep - -import requests - -from network import FabricNetwork -from utils.env import ( - AGENT_ID, - NODE_TYPE, - NETWORK_VERSION, - NODE_ID, - NETWORK_TYPE, - NetworkType, - NODE_DETAIL_URL, - headers, - MAX_QUERY_RETRY, -) -from utils import get_k8s_client - - -def _delete_fabric_node(): - k8s_client = get_k8s_client() - - network = FabricNetwork( - version=NETWORK_VERSION, - node_type=NODE_TYPE, - agent_id=AGENT_ID, - node_id=NODE_ID, - ) - deployment = network.deployment() - service = network.service() - # config = network.generate_config() - # - # deployment = config.get("deployment") - # service = config.get("service") - # ingress = config.get("ingress") - - deploy_name = None - if service: - k8s_client.delete_service(namespace=AGENT_ID, name=service.get("name")) - if deployment: - k8s_client.delete_deployment( - namespace=AGENT_ID, name=deployment.get("name") - ) - deploy_name = deployment.get("name") - - for i in range(1, MAX_QUERY_RETRY): - pod = k8s_client.get_pod(AGENT_ID, deploy_name) - if pod is None: - requests.put( - url=NODE_DETAIL_URL, - headers=headers, - data=json.dumps({"status": "deleted"}), - ) - requests.delete(url=NODE_DETAIL_URL, headers=headers) - break - sleep(5) - - -def delete_node(): - if NETWORK_TYPE == NetworkType.Fabric.value: - _delete_fabric_node() diff --git a/src/agent/kubernetes-agent/src/operations/fabric_ca_register.py b/src/agent/kubernetes-agent/src/operations/fabric_ca_register.py deleted file mode 100644 index dc10eee6e..000000000 --- a/src/agent/kubernetes-agent/src/operations/fabric_ca_register.py +++ /dev/null @@ -1,82 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -from time import sleep -import os -from uuid import uuid4 -from utils.env import ( - NODE_FILE_URL, - FABRIC_CA_USER, - SERVICE_PORTS, - CA_ADMIN_NAME, - CA_ADMIN_PASSWORD, - AGENT_IP, - USER_PATCH_URL, - FabricImages, - TOKEN, - AGENT_ID, - NETWORK_VERSION, - MAX_QUERY_RETRY, -) -from utils import get_k8s_client - - -def fabric_ca_register(): - if NODE_FILE_URL: - ca_service_port = SERVICE_PORTS.get("7054") - pod_environments = [ - {"name": "NODE_FILE_URL", "value": NODE_FILE_URL}, - {"name": "CA_ADMIN_NAME", "value": CA_ADMIN_NAME}, - {"name": "CA_ADMIN_PASSWORD", "value": CA_ADMIN_PASSWORD}, - {"name": "CA_USER_NAME", "value": FABRIC_CA_USER.get("name")}, - { - "name": "CA_USER_PASSWORD", - "value": FABRIC_CA_USER.get("secret"), - }, - {"name": "CA_USER_TYPE", "value": FABRIC_CA_USER.get("type")}, - { - "name": "CA_USER_ATTRS", - "value": FABRIC_CA_USER.get("attrs", ""), - }, - {"name": "TOKEN", "value": TOKEN}, - {"name": "USER_PATCH_URL", "value": USER_PATCH_URL}, - { - "name": "FABRIC_CA_CLIENT_TLS_CERTFILES", - "value": "/tmp/crypto/ca-cert.pem", - }, - {"name": "FABRIC_CA_CLIENT_HOME", "value": "/tmp/admin"}, - { - "name": "CA_SERVER", - "value": "%s:%s" % (AGENT_IP, ca_service_port), - }, - ] - pod_command = ["bash", "-c"] - script_file_path = os.path.join( - os.path.dirname(os.path.abspath(__file__)), "fabric_ca_register.sh" - ) - with open(script_file_path, "r") as initial_ca_script: - shell_script = initial_ca_script.read() - pod_command_args = [shell_script] - job_name = "register-ca-%s" % uuid4().hex - template = { - "name": job_name, - "containers": [ - { - "name": "register", - "image": "%s:%s" - % (FabricImages.Ca.value, NETWORK_VERSION), - "command": pod_command, - "command_args": pod_command_args, - "environments": pod_environments, - } - ], - } - client = get_k8s_client() - client.create_job(AGENT_ID, **template) - - for i in range(1, MAX_QUERY_RETRY): - pod = client.get_pod(AGENT_ID, job_name) - if pod.status.phase == "Succeeded": - client.delete_job(AGENT_ID, job_name) - break - sleep(5) diff --git a/src/agent/kubernetes-agent/src/operations/fabric_ca_register.sh b/src/agent/kubernetes-agent/src/operations/fabric_ca_register.sh deleted file mode 100755 index 1d1cb1c34..000000000 --- a/src/agent/kubernetes-agent/src/operations/fabric_ca_register.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/env bash -# shellcheck disable=SC2164 -cd /tmp; -wget "${NODE_FILE_URL}"; -# shellcheck disable=SC2034 -NODE_FILE="${NODE_FILE_URL##*/}" -tar -zxf "${NODE_FILE}" -fabric-ca-client enroll -d -u https://"${CA_ADMIN_NAME}":"${CA_ADMIN_PASSWORD}"@"${CA_SERVER}" -if [ "${CA_USER_ATTRS}" -eq "" ]; then - fabric-ca-client register -d --id.name "${CA_USER_NAME}" --id.secret "${CA_USER_PASSWORD}" --id.type "${CA_USER_TYPE}" -else - fabric-ca-client register -d --id.name "${CA_USER_NAME}" --id.secret "${CA_USER_PASSWORD}" --id.type "${CA_USER_TYPE}" --id.attrs "${CA_USER_ATTRS}" -fi -# shellcheck disable=SC2034 -# shellcheck disable=SC2181 -if [ $? -eq 0 ]; then - # shellcheck disable=SC2034 - user_status="registered" -else - # shellcheck disable=SC2034 - user_status="fail" -fi -wget --method=PATCH --body-data "{\"status\": \"${user_status}\"}" --header "Authorization: JWT ${TOKEN}" --header "Content-Type: application/json" "${USER_PATCH_URL}" diff --git a/src/agent/kubernetes-agent/src/utils/__init__.py b/src/agent/kubernetes-agent/src/utils/__init__.py deleted file mode 100644 index 1f11a21ce..000000000 --- a/src/agent/kubernetes-agent/src/utils/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -import os -import zipfile -from .env import AGENT_CONFIG_FILE, AGENT_ID -from .download import download_file -from .client import KubernetesClient - - -def prepare_config(): - config_path = "/app/.kube/config" - - if os.path.exists(config_path): - return config_path - - config_file = download_file(AGENT_CONFIG_FILE, "/tmp") - ext = os.path.splitext(config_file)[-1].lower() - - if ext == ".zip": - with zipfile.ZipFile(config_file, "r") as zip_ref: - zip_ref.extractall("/app") - - return config_path - - -def get_k8s_client(): - k8s_config = prepare_config() - - k8s_client = KubernetesClient(config_file=k8s_config) - k8s_client.get_or_create_namespace(name=AGENT_ID) - - return k8s_client diff --git a/src/agent/kubernetes-agent/src/utils/client.py b/src/agent/kubernetes-agent/src/utils/client.py deleted file mode 100644 index 734534676..000000000 --- a/src/agent/kubernetes-agent/src/utils/client.py +++ /dev/null @@ -1,373 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -import logging - -from kubernetes import client, config -from kubernetes.client.rest import ApiException - -LOG = logging.getLogger(__name__) - - -class KubernetesClient(object): - def __init__(self, config_file=None): - super(KubernetesClient, self).__init__() - self._config_file = config_file - config.load_kube_config(config_file) - - self._major, self._minor = self._get_version_code() - - def list_pods(self): - v1 = client.CoreV1Api() - print("Listing pods with their IPs:") - ret = v1.list_pod_for_all_namespaces(watch=False) - for i in ret.items: - print( - "%s\t%s\t%s" - % (i.status.pod_ip, i.metadata.namespace, i.metadata.name) - ) - - def _get_version_code(self): - version_api = client.VersionApi() - major = 1 - minor = 16 - - try: - api_response = version_api.get_code() - major = int(api_response.major) - minor = int(api_response.minor) - except ApiException as e: - LOG.error("Exception when calling VersionApi->get_code: %s", e) - - return major, minor - - def get_pod(self, namespace=None, deploy_name=None): - v1 = client.CoreV1Api() - pod = None - try: - api_response = v1.list_namespaced_pod( - namespace, label_selector="app=%s" % deploy_name - ) - except ApiException as e: - LOG.error( - "Exception when calling CoreV1Api->list_namespaced_pod: %s", e - ) - else: - for item in api_response.items: - pod_name = item.metadata.name - pod = item - if pod_name.startswith(deploy_name): - break - - return pod - - def get_or_create_namespace(self, name=None): - if name: - v1 = client.CoreV1Api() - try: - v1.read_namespace(name=name) - except ApiException: - body = client.V1Namespace( - kind="Namespace", - api_version="v1", - metadata=client.V1ObjectMeta(name=name), - ) - try: - v1.create_namespace(body=body) - except ApiException as e: - LOG.error( - "Exception when calling CoreV1Api->read_namespace: %s", - e, - ) - - def _generate_container_pods(self, containers=None): - if containers is None or len(containers) == 0: - return None - - container_pods = [] - for container in containers: - ports = container.get("ports", []) - environments = container.get("environments", []) - command = container.get("command", []) - command_args = container.get("command_args", []) - volume_mounts = container.get("volume_mounts") - volume_mounts = ( - [ - client.V1VolumeMount( - mount_path=volume_mount.get("path"), - name=volume_mount.get("name"), - ) - for volume_mount in volume_mounts - ] - if volume_mounts - else None - ) - - environments = [ - client.V1EnvVar(name=env.get("name"), value=env.get("value")) - for env in environments - ] - ports = [ - client.V1ContainerPort(container_port=port) for port in ports - ] - container_parameter = { - "name": container.get("name"), - "image": container.get("image"), - "image_pull_policy": "IfNotPresent", - } - if environments is not None and len(environments) > 0: - container_parameter.update({"env": environments}) - if command is not None and len(command) > 0: - container_parameter.update({"command": command}) - if command_args is not None and len(command_args) > 0: - container_parameter.update({"args": command_args}) - if ports is not None and len(ports) > 0: - container_parameter.update({"ports": ports}) - if volume_mounts is not None and len(volume_mounts) > 0: - container_parameter.update({"volume_mounts": volume_mounts}) - container_pods.append(client.V1Container(**container_parameter)) - - return container_pods - - def _generate_pod_template(self, *args, **kwargs): - containers = kwargs.get("containers", []) - initial_containers = kwargs.get("initial_containers", []) - volumes_json = kwargs.get("volumes", []) - deploy_name = kwargs.get("name") - labels = kwargs.get("labels", {}) - labels.update({"app": deploy_name}) - restart_policy = kwargs.get("restart_policy", "Always") - volumes = [] - for volume in volumes_json: - volume_name = volume.get("name") - host_path = volume.get("host_path", None) - empty_dir = volume.get("empty_dir", None) - parameters = {} - if host_path: - host_path = client.V1HostPathVolumeSource(path=host_path) - parameters.update({"host_path": host_path}) - if empty_dir: - empty_dir = client.V1EmptyDirVolumeSource(**empty_dir) - parameters.update({"empty_dir": empty_dir}) - persistent_volume_claim = volume.get("pvc", None) - if persistent_volume_claim: - persistent_volume_claim = ( - client.V1PersistentVolumeClaimVolumeSource( - claim_name=persistent_volume_claim - ) - ) - parameters.update( - {"persistent_volume_claim": persistent_volume_claim} - ) - volumes.append(client.V1Volume(name=volume_name, **parameters)) - initial_container_pods = self._generate_container_pods( - initial_containers - ) - container_pods = self._generate_container_pods(containers) - pod_spec = client.V1PodSpec( - init_containers=initial_container_pods, - containers=container_pods, - volumes=volumes, - restart_policy=restart_policy, - ) - spec_metadata = client.V1ObjectMeta(labels=labels) - template_spec = client.V1PodTemplateSpec( - metadata=spec_metadata, spec=pod_spec - ) - - LOG.info("template spec %s", template_spec) - - return template_spec - - def create_deployment(self, namespace=None, *args, **kwargs): - deploy_name = kwargs.get("name") - deployment_metadata = client.V1ObjectMeta(name=deploy_name) - template_spec = self._generate_pod_template(*args, **kwargs) - body = client.V1Deployment( - api_version="apps/v1", - kind="Deployment", - metadata=deployment_metadata, - spec=client.V1DeploymentSpec( - selector=client.V1LabelSelector( - match_labels={ - "app": kwargs.get("name"), - } - ), - template=template_spec, - ), - ) - - api_instance = client.AppsV1Api() - - try: - api_instance.create_namespaced_deployment( - namespace=namespace, body=body, pretty="true" - ) - except ApiException as e: - LOG.error("Exception when call AppsV1beta1Api: %s", e) - raise e - - return True - - def create_job(self, namespace=None, *args, **kwargs): - job_name = kwargs.get("name") - job_metadata = client.V1ObjectMeta(name=job_name) - template_spec = self._generate_pod_template( - *args, **kwargs, restart_policy="Never" - ) - body = client.V1Job( - api_version="batch/v1", - kind="Job", - metadata=job_metadata, - spec=client.V1JobSpec(template=template_spec), - ) - api_instance = client.BatchV1Api() - - try: - api_instance.create_namespaced_job( - namespace=namespace, body=body, pretty="true" - ) - except ApiException as e: - LOG.error("Exception when call AppsV1beta1Api: %s", e) - raise e - - return True - - def create_service( - self, - namespace=None, - name=None, - selector=None, - ports=None, - service_type="ClusterIP", - ): - if selector is None: - selector = {} - if ports is None: - ports = [] - - metadata = client.V1ObjectMeta(name=name, labels={"app": name}) - ports = [ - client.V1ServicePort(port=port.get("port"), name=port.get("name")) - for port in ports - ] - spec = client.V1ServiceSpec( - ports=ports, selector=selector, type=service_type - ) - body = client.V1Service( - metadata=metadata, spec=spec, kind="Service", api_version="v1" - ) - - api_instance = client.CoreV1Api() - try: - response = api_instance.create_namespaced_service(namespace, body) - except ApiException as e: - LOG.error("Exception when call CoreV1Api: %s", e) - raise e - - return True, response - - def create_ingress( - self, - namespace=None, - name=None, - service_name=None, - ingress_paths=None, - annotations=None, - ): - if ingress_paths is None: - ingress_paths = [] - if annotations is None: - annotations = {} - - api_instance = client.ExtensionsV1beta1Api() - metadata = client.V1ObjectMeta(name=name, annotations=annotations) - path_list = [] - for ing_path in ingress_paths: - ing_backend = client.V1beta1IngressBackend( - service_name=service_name, service_port=ing_path.get("port", 0) - ) - path_list.append( - client.V1beta1HTTPIngressPath( - path=ing_path.get("path", ""), backend=ing_backend - ) - ) - http_dict = client.V1beta1HTTPIngressRuleValue(paths=path_list) - rule_list = [client.V1beta1IngressRule(http=http_dict, host="")] - ingress_spec = client.V1beta1IngressSpec(rules=rule_list) - body = client.V1beta1Ingress( - api_version="extensions/v1beta1", - metadata=metadata, - spec=ingress_spec, - kind="Ingress", - ) - - try: - api_instance.create_namespaced_ingress( - namespace=namespace, body=body, pretty="true" - ) - except ApiException as e: - LOG.error("Create ingress failed %s", e) - raise e - - return True - - def delete_job(self, namespace=None, name=None): - api_instance = client.BatchV1Api() - delete_options = client.V1DeleteOptions( - propagation_policy="Foreground" - ) - grace_period_seconds = 10 - - try: - api_instance.delete_namespaced_job( - name=name, - namespace=namespace, - body=delete_options, - grace_period_seconds=grace_period_seconds, - pretty="true", - ) - except ApiException as e: - LOG.error("Exception when call AppsV1beta1Api: %s", e) - - def delete_deployment(self, namespace=None, name=None): - api_instance = client.AppsV1Api() - delete_options = client.V1DeleteOptions( - propagation_policy="Foreground" - ) - grace_period_seconds = 10 - - try: - api_instance.delete_namespaced_deployment( - name=name, - namespace=namespace, - body=delete_options, - grace_period_seconds=grace_period_seconds, - pretty="true", - ) - except ApiException as e: - LOG.error("Exception when call AppsV1beta1Api: %s", e) - - def delete_service(self, namespace=None, name=None): - api_instance = client.CoreV1Api() - - try: - api_instance.delete_namespaced_service( - name=name, namespace=namespace - ) - except ApiException as e: - LOG.error("Exception when call CoreV1Api: %s", e) - - def delete_ingress(self, namespace=None, name=None): - api_instance = client.ExtensionsV1beta1Api() - delete_options = client.V1DeleteOptions() - - try: - api_instance.delete_namespaced_ingress( - name=name, - namespace=namespace, - body=delete_options, - pretty="true", - ) - except ApiException as e: - LOG.error("Exception when call AppsV1beta1Api: %s\n" % e) diff --git a/src/agent/kubernetes-agent/src/utils/download.py b/src/agent/kubernetes-agent/src/utils/download.py deleted file mode 100644 index 2d5a2ebbe..000000000 --- a/src/agent/kubernetes-agent/src/utils/download.py +++ /dev/null @@ -1,22 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -import requests -import mimetypes -import os -from uuid import uuid4 - - -def download_file(url, target_dir): - r = requests.get(url, allow_redirects=True) - content_type = r.headers["content-type"] - extension = mimetypes.guess_extension(content_type) - file_name = "%s%s" % (uuid4().hex, extension) - target_file = os.path.join(target_dir, file_name) - - if not os.path.exists(target_dir): - os.makedirs(target_dir) - - open(target_file, "wb").write(r.content) - - return target_file diff --git a/src/agent/kubernetes-agent/src/utils/env.py b/src/agent/kubernetes-agent/src/utils/env.py deleted file mode 100644 index 5a0d7f2ed..000000000 --- a/src/agent/kubernetes-agent/src/utils/env.py +++ /dev/null @@ -1,83 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -import os -import json -from enum import Enum, unique - -# deploy name in kubernetes -DEPLOY_NAME = os.getenv("DEPLOY_NAME") -# network type to deploy, support fabric -NETWORK_TYPE = os.getenv("NETWORK_TYPE") -# network version, for fabric support 1.4 -NETWORK_VERSION = os.getenv("NETWORK_VERSION") -# node type -# fabric: ca, peer, orderer -NODE_TYPE = os.getenv("NODE_TYPE") -# configuration file for kubernetes agent -AGENT_CONFIG_FILE = os.getenv("AGENT_CONFIG_FILE") -AGENT_ID = os.getenv("AGENT_ID") -AGENT_IP = os.getenv("AGENT_IP") -NODE_ID = os.getenv("NODE_ID") -OPERATION = os.getenv("OPERATION") -TOKEN = os.getenv("TOKEN") -NODE_DETAIL_URL = os.getenv("NODE_DETAIL_URL") -NODE_FILE_URL = os.getenv("NODE_FILE_URL") -NODE_SERVICE_PORT = os.getenv("NODE_SERVICE_PORT") -NODE_UPLOAD_FILE_URL = os.getenv("NODE_UPLOAD_FILE_URL") -MAX_QUERY_RETRY = 30 -FABRIC_CA_USER = json.loads(os.getenv("FABRIC_CA_USER", "{}")) -SERVICE_PORTS = json.loads(os.getenv("SERVICE_PORTS", "{}")) -USER_PATCH_URL = os.getenv("USER_PATCH_URL") - -CA_CONFIG = json.loads(os.getenv("FABRIC_CA_CONFIG", "{}")) -PEER_CONFIG = json.loads(os.getenv("FABRIC_PEER_CONFIG", "{}")) -# Initial admin name/password for ca server -CA_ADMIN_NAME = CA_CONFIG.get("admin_name", "admin") -CA_ADMIN_PASSWORD = CA_CONFIG.get("admin_password", "adminpw") - -FABRIC_IMAGE_BASE_NAME = "hyperledger/fabric" - -headers = { - "Authorization": "JWT %s" % TOKEN, - "Content-Type": "application/json", -} - - -@unique -class AgentOperation(Enum): - Create = "create" - Start = "start" - Stop = "stop" - Query = "query" - Update = "update" - Delete = "delete" - FabricCARegister = "fabric:ca:register" - - -@unique -class NodeStatus(Enum): - Deploying = "deploying" - Running = "running" - Stopped = "stopped" - Deleting = "deleting" - Error = "error" - - -@unique -class NetworkType(Enum): - Fabric = "fabric" - - -@unique -class FabricNodeType(Enum): - Ca = "ca" - Orderer = "orderer" - Peer = "peer" - - -@unique -class FabricImages(Enum): - Ca = "%s-ca" % FABRIC_IMAGE_BASE_NAME - Peer = "%s-peer" % FABRIC_IMAGE_BASE_NAME - Orderer = "%s-orderer" % FABRIC_IMAGE_BASE_NAME diff --git a/src/api-engine/.dockerignore b/src/api-engine/.dockerignore new file mode 100644 index 000000000..67c0ff5af --- /dev/null +++ b/src/api-engine/.dockerignore @@ -0,0 +1,2 @@ +cello +!cello/node diff --git a/src/api-engine/Dockerfile b/src/api-engine/Dockerfile new file mode 100644 index 000000000..69efd5608 --- /dev/null +++ b/src/api-engine/Dockerfile @@ -0,0 +1,27 @@ +FROM python:3.8 + +# Install software +RUN apt-get update\ + && apt-get install -y gettext-base graphviz libgraphviz-dev vim\ + && apt-get autoclean\ + && apt-get clean\ + && apt-get autoremove\ + && rm -rf /var/cache/apt/ + +# Copy source code to the working dir +COPY . . + +# Install compiled code tools from Artifactory and copy it to opt folder. +RUN curl -L --retry 5 --retry-delay 3 "https://github.com/hyperledger/fabric/releases/download/v2.5.13/hyperledger-fabric-linux-amd64-2.5.13.tar.gz" | tar xz -C ./cello/ + +# Install python dependencies +RUN pip3 install -r requirements.txt + +# Install go +ENV GO_VERSION=1.24.0 +RUN wget https://go.dev/dl/go${GO_VERSION}.linux-amd64.tar.gz \ + && tar -C /usr/local -xzf go${GO_VERSION}.linux-amd64.tar.gz \ + && rm go${GO_VERSION}.linux-amd64.tar.gz +ENV PATH="${PATH}:/usr/local/go/bin" + +CMD ["bash", "entrypoint.sh"] diff --git a/src/api-engine/api/auth.py b/src/api-engine/api/auth.py deleted file mode 100644 index 86fa665a5..000000000 --- a/src/api-engine/api/auth.py +++ /dev/null @@ -1,53 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -import logging -import os - -from django.conf import settings -from django.core.exceptions import ObjectDoesNotExist -from rest_framework import authentication -from rest_framework.permissions import BasePermission -from api.common.enums import UserRole -from api.models import UserProfile - -LOG = logging.getLogger(__name__) -TOKEN_INFO_URL = getattr(settings, "TOKEN_INFO_URL", "") -SUPER_USER_TOKEN = os.environ.get("ADMIN_TOKEN", "") -ADMIN_NAME = os.getenv("ADMIN_USERNAME") - - -class IsAdminAuthenticated(BasePermission): - """ - Allows access only to authenticated users. - """ - - def has_permission(self, request, view): - return ( - request.user and request.user.role == UserRole.Admin.name.lower() - ) - - -class IsOperatorAuthenticated(BasePermission): - """ - Allows access only to operators. - """ - - def has_permission(self, request, view): - return ( - request.user - and request.user.role == UserRole.Operator.name.lower() - ) - - -class IsSuperUserAuthenticated(BasePermission): - """ - Allows access only to authenticated users. - """ - - def has_permission(self, request, view): - return ( - request.user - and request.user.is_authenticated - and request.user.is_super_user - ) diff --git a/src/api-engine/api/common/enums.py b/src/api-engine/api/common/enums.py index 085c279c7..d500c7bde 100644 --- a/src/api-engine/api/common/enums.py +++ b/src/api-engine/api/common/enums.py @@ -69,146 +69,6 @@ def names(cls): return [name.lower() for name, _ in cls.__members__.items()] -@unique -class HostStatus(ExtraEnum): - Inactive = 0 - Active = 1 - - -@unique -class NetworkStatus(ExtraEnum): - Stopped = 0 - Running = 1 - Error = 2 - - -@unique -class LogLevel(ExtraEnum): - Info = 0 - Warning = 1 - Debug = 2 - Error = 3 - Critical = 4 - - -@unique -class Operation(ExtraEnum): - Start = 0 - Stop = 1 - Restart = 2 - - -@unique -class NetworkOperation(ExtraEnum): - Join = 0 - Leave = 1 - - -@unique -class HostType(ExtraEnum): - Docker = 0 - Kubernetes = 1 - - -@unique -class ChannelType(ExtraEnum): - System = 0 - Normal = 1 - - -@unique -class NetworkType(ExtraEnum): - Fabric = "fabric" - - -@unique -class FabricCAServerType(ExtraEnum): - # every company only can create one TLS type ca server - TLS = "tls" - Signature = "signature" - - -@unique -class FabricVersions(ExtraEnum): - V1_4 = "1.4.2" - V1_5 = "1.5" - - -@unique -class FabricNodeType(ExtraEnum): - Ca = 0 - Orderer = 1 - Peer = 2 - - -@unique -class NodeStatus(ExtraEnum): - Created = 0 - Restarting = 1 - Running = 2 - Removing = 3 - Paused = 4 - Exited = 5 - Dead = 6 - - -@unique -class FabricCAUserType(ExtraEnum): - Peer = "peer" - Orderer = "orderer" - User = "user" - - -@unique -class FabricCAUserStatus(ExtraEnum): - Registering = "registering" - Registered = "registered" - Fail = "fail" - - -@unique -class NetworkCreateType(ExtraEnum): - New = 0 - Import = 1 - - -@unique -class K8SCredentialType(ExtraEnum): - CertKey = 0 - Config = 1 - UsernamePassword = 2 - - -@unique -class ConsensusPlugin(ExtraEnum): - Solo = 0 - Kafka = 1 - - -@unique -class UserRole(ExtraEnum): - Admin = 0 - Operator = 1 - User = 2 - - -@unique -class FileType(ExtraEnum): - Certificate = 0 - - -@unique -class AgentOperation(ExtraEnum): - Create = "create" - Start = "start" - Stop = "stop" - Query = "query" - Update = "update" - Delete = "delete" - FabricCARegister = "fabric:ca:register" - NewNetwork = "new:network" - - class EnumWithDisplayMeta(EnumMeta): def __new__(mcs, name, bases, attrs): display_strings = attrs.get("DisplayStrings") diff --git a/src/api-engine/api/common/response.py b/src/api-engine/api/common/response.py index 0fa609dd1..be4e1e1ca 100644 --- a/src/api-engine/api/common/response.py +++ b/src/api-engine/api/common/response.py @@ -9,11 +9,40 @@ } """ +import enum +from typing import Type, Dict +from rest_framework import serializers -def ok(data): - return {"data": data, "msg": None, "status": "successful"} +class Status(enum.Enum): + SUCCESSFUL = "SUCCESSFUL" + FAILED = "FAILED" -def err(msg): - return {"data": None, "msg": msg, "status": "fail"} + +def make_response_serializer(data_serializer: Type[serializers.Serializer]): + class _ResponseBody(serializers.Serializer): + status = serializers.ChoiceField( + choices=[(s.value, s.name) for s in Status] + ) + msg = serializers.CharField(required=False, allow_null=True, allow_blank=True) + data = data_serializer(required=False, allow_null=True) + + _ResponseBody.__name__ = f"ResponseBody[{data_serializer.__name__}]" + return _ResponseBody + + +def ok(data: Dict[str, any]) -> Dict[str, any]: + return { + "status": Status.SUCCESSFUL.value, + "msg": None, + "data": data + } + + +def err(msg: str) -> Dict[str, any]: + return { + "status": Status.FAILED.value, + "msg": msg, + "data": None + } diff --git a/src/api-engine/api/config.py b/src/api-engine/api/config.py deleted file mode 100644 index d63a6c5b1..000000000 --- a/src/api-engine/api/config.py +++ /dev/null @@ -1,16 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -CELLO_HOME = "/opt/cello" -FABRIC_TOOL = "/opt/bin" -FABRIC_CFG = "/opt/node" -FABRIC_NODE = "/opt/hyperledger/fabric" -PRODUCTION_NODE = "/opt/hyperledger/production" - -FABRIC_PEER_CFG = "/opt/node/peer.yaml.bak" -FABRIC_ORDERER_CFG = "/opt/node/orderer.yaml.bak" -FABRIC_CA_CFG = "/opt/node/ca.yaml.bak" - -FABRIC_CHAINCODE_STORE = "/opt/cello/chaincode" - -FABRIC_VERSION = "2.5.13" diff --git a/src/api-engine/api/lib/agent/__init__.py b/src/api-engine/api/lib/agent/__init__.py deleted file mode 100644 index 0997bf3fe..000000000 --- a/src/api-engine/api/lib/agent/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -from api.lib.agent.handler import AgentHandler diff --git a/src/api-engine/api/lib/agent/base.py b/src/api-engine/api/lib/agent/base.py deleted file mode 100644 index 9ac0ccf84..000000000 --- a/src/api-engine/api/lib/agent/base.py +++ /dev/null @@ -1,33 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -import abc - - -class AgentBase(object): - __metaclass__ = abc.ABCMeta - - @abc.abstractmethod - def create(self, *args, **kwargs): - """ - Create a new cluster - Args: - *args: args - **kwargs: keyword args - - Returns: - - """ - pass - - @abc.abstractmethod - def delete(self, *args, **kwargs): - pass - - @abc.abstractmethod - def start(self, *args, **kwargs): - pass - - @abc.abstractmethod - def stop(self, *args, **kwargs): - pass diff --git a/src/api-engine/api/lib/agent/docker/__init__.py b/src/api-engine/api/lib/agent/docker/__init__.py deleted file mode 100644 index 0ad59ed4c..000000000 --- a/src/api-engine/api/lib/agent/docker/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -from api.lib.agent.docker.handler import DockerAgent diff --git a/src/api-engine/api/lib/agent/docker/fabric/__init__.py b/src/api-engine/api/lib/agent/docker/fabric/__init__.py deleted file mode 100644 index 71ebe8d3c..000000000 --- a/src/api-engine/api/lib/agent/docker/fabric/__init__.py +++ /dev/null @@ -1,64 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -import logging - -from api.lib.agent.network_base import NetworkBase -from api.common.enums import FabricNodeType -from api.utils.port_picker import find_available_ports, set_ports_mapping - -LOG = logging.getLogger(__name__) - -CA_IMAGE_NAME = "hyperledger/fabric-ca" - - -class FabricNetwork(NetworkBase): - def __init__(self, *args, **kwargs): - super(FabricNetwork, self).__init__(*args, **kwargs) - - self._version = kwargs.get("version") - self._type = kwargs.get("node_type") - self._agent_id = kwargs.get("agent_id") - self._node_id = kwargs.get("node_id") - self._docker_host = kwargs.get("docker_host") - self._docker_host_ip = self._docker_host.split(":")[1].split("//")[-1] - self._compose_file_version = kwargs.get("compose_file_version", "3.2") - self._template = {"version": self._compose_file_version} - - def _generate_ca_compose_yaml(self): - environment = ["FABRIC_CA_HOME=/etc/hyperledger/fabric-ca-server"] - internal_ports = [7054] - ports = find_available_ports( - self._docker_host_ip, self._node_id, self._agent_id - ) - if not len(ports): - return None - ports_mapping = [ - {"external": ports[i], "internal": internal_ports[i]} - for i in range(len(ports)) - ] - set_ports_mapping(self._node_id, ports_mapping) - self._template.update( - { - "services": { - self._type: { - "image": "%s:%s" % (CA_IMAGE_NAME, self._version), - "environment": environment, - "ports": ["%s:7054" % ports[0]], - "command": "sh -c 'fabric-ca-server start " - "--ca.certfile " - "/etc/hyperledger/fabric-ca-server-config" - "/ca.org1.example.com-cert.pem " - "--ca.keyfile " - "/etc/hyperledger/fabric-ca-server-config/" - "CA1_PRIVATE_KEY -b admin:adminpw -d'", - } - } - } - ) - - return self._template - - def generate_config(self, *args, **kwargs): - if self._type == FabricNodeType.Ca.name.lower(): - return self._generate_ca_compose_yaml() diff --git a/src/api-engine/api/lib/agent/docker/handler.py b/src/api-engine/api/lib/agent/docker/handler.py deleted file mode 100644 index 58f6342e8..000000000 --- a/src/api-engine/api/lib/agent/docker/handler.py +++ /dev/null @@ -1,157 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -import logging -from requests import get, post -import json - -from api.lib.agent.base import AgentBase - -LOG = logging.getLogger(__name__) - - -class DockerAgent(AgentBase): - """Class represents docker agent.""" - - def __init__(self, node=None): - """init DockerAgent - param: - node:Information needed to create, start, and delete nodes, such as organizations, nodes, and so on - return:null - """ - if node is None: - node = {} - self._id = node.get("id") - self._name = node.get("name") - self._urls = node.get("urls") - self._cname = node.get("container_name") - - def create(self, info): - """ - Create node - :param node: Information needed to create nodes - :return: container ID - :rtype: string - """ - try: - port_map = { - str(port.internal): str(port.external) - for port in info.get("ports") - } - - data = { - "msp": info.get("msp")[2:-1], - "tls": info.get("tls")[2:-1], - "peer_config_file": info.get("config_file")[2:-1], - "orderer_config_file": info.get("config_file")[2:-1], - "img": "hyperledger/fabric:2.5.13", - "cmd": ( - 'bash /tmp/init.sh "peer node start"' - if info.get("type") == "peer" - else 'bash /tmp/init.sh "orderer"' - ), - "name": info.get("name"), - "type": info.get("type"), - "port_map": port_map.__repr__(), - "action": "create", - } - - response = post("{}/api/v1/nodes".format(self._urls), data=data) - - if response.status_code == 200: - txt = json.loads(response.text) - return txt["data"]["id"] - else: - return None - except Exception as e: - LOG.exception("DockerAgent Not Created") - raise e - - def delete(self, *args, **kwargs): - try: - response = post( - "{}/api/v1/nodes/{}".format(self._urls, self._cname), - data={"action": "delete"}, - ) - if response.status_code == 200: - return True - else: - raise response.reason - except Exception as e: - LOG.exception("DockerAgent Not Deleted") - raise e - - def start(self, *args, **kwargs): - try: - response = post( - "{}/api/v1/nodes/{}".format(self._urls, self._cname), - data={"action": "start"}, - ) - if response.status_code == 200: - return True - else: - raise response.reason - except Exception as e: - LOG.exception("DockerAgent Not Started") - raise e - - def restart(self, *args, **kwargs): - try: - response = post( - "{}/api/v1/nodes/{}".format(self._urls, self._cname), - data={"action": "restart"}, - ) - if response.status_code == 200: - return True - else: - raise response.reason - except Exception as e: - LOG.exception("DockerAgent Not Restarted") - raise e - - def stop(self, *args, **kwargs): - try: - response = post( - "{}/api/v1/nodes/{}".format(self._urls, self._cname), - data={"action": "stop"}, - ) - if response.status_code == 200: - return True - else: - raise response.reason - except Exception as e: - LOG.exception("DockerAgent Not Stopped") - raise e - - def get(self, *args, **kwargs): - try: - response = get( - "{}/api/v1/nodes/{}".format(self._urls, self._cname) - ) - if response.status_code == 200: - return True - else: - raise response.reason - except Exception as e: - LOG.exception("DockerAgent Not Found") - raise e - - def update_config(self, config_file, node_type): - try: - cmd = 'bash /tmp/update.sh "{} node start"'.format(node_type) - data = { - "peer_config_file": config_file, - "orderer_config_file": config_file, - "action": "update", - "cmd": cmd, - } - response = post( - "{}/api/v1/nodes/{}".format(self._urls, self._cname), data=data - ) - if response.status_code == 200: - return True - else: - raise response.reason - except Exception as e: - LOG.exception("Config Update Failed") - raise e diff --git a/src/api-engine/api/lib/agent/handler.py b/src/api-engine/api/lib/agent/handler.py deleted file mode 100644 index faa8eb962..000000000 --- a/src/api-engine/api/lib/agent/handler.py +++ /dev/null @@ -1,76 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -import logging - -from django.conf import settings - -from api.lib.agent.docker import DockerAgent -from api.lib.agent.kubernetes import KubernetesAgent -from api.common.enums import HostType - -LOG = logging.getLogger(__name__) -MEDIA_ROOT = getattr(settings, "MEDIA_ROOT") - - -class AgentHandler(object): - def __init__(self, node=None): - self._network_type = node.get("network_type") - self._network_version = node.get("network_version") - self._node_type = node.get("type") - self._agent_type = node.get("agent_type") - self._node = node - if self._agent_type == HostType.Docker.name.lower(): - self._agent = DockerAgent(node) - elif self._agent_type == HostType.Kubernetes.name.lower(): - self._agent = KubernetesAgent(node) - - @property - def node(self): - return self._node - - @node.setter - def node(self, value): - self._node = value - - @property - def config(self): - return self._agent.generate_config() - - def create(self, info): - try: - cid = self._agent.create(info) - if cid: - return cid - else: - return None - except Exception as e: - LOG.error("Agent Not Created", exc_info=True) - raise e - - def delete(self): - self._agent.delete() - - return True - - def start(self): - self._agent.start() - - return True - - def stop(self): - self._agent.stop() - - return True - - def update_config(self, config_file, node_type): - self._agent.update_config(config_file, node_type) - - return True - - def get(self): - try: - return self._agent.get() - except Exception as e: - LOG.error("Agent Not Found: %s", e, exc_info=True) - return False diff --git a/src/api-engine/api/lib/agent/kubernetes/__init__.py b/src/api-engine/api/lib/agent/kubernetes/__init__.py deleted file mode 100644 index 96fa6dc55..000000000 --- a/src/api-engine/api/lib/agent/kubernetes/__init__.py +++ /dev/null @@ -1,83 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -import logging - -from api.lib.agent.base import AgentBase -from api.lib.agent.kubernetes.common import KubernetesClient -from api.lib.agent.kubernetes.fabric import FabricNetwork -from api.utils.port_picker import set_ports_mapping - - -LOG = logging.getLogger(__name__) - - -class KubernetesAgent(AgentBase): - def __init__(self, node=None): - if node is None: - node = {} - config_file = node.get("k8s_config_file") - self._project_name = node.get("name") - self._network_type = node.get("network_type") - self._network_version = node.get("network_version") - self._node_type = node.get("type") - self._node_id = node.get("id") - self._agent_id = node.get("agent_id") - - self._client = KubernetesClient(config_file=config_file) - self._network = FabricNetwork( - version=self._network_version, - node_type=self._node_type, - agent_id=self._agent_id, - node_id=self._node_id, - ) - self._client.get_or_create_namespace(name=self._agent_id) - self._config = self._network.generate_config() - - def create(self, *args, **kwargs): - deployment = self._config.get("deployment") - service = self._config.get("service") - ingress = self._config.get("ingress") - - if deployment: - self._client.create_deployment(self._agent_id, **deployment) - if service: - success, service_response = self._client.create_service( - self._agent_id, **service - ) - if service.get("service_type") == "NodePort" and success: - ports = service_response.spec.ports - ports = [ - {"external": port.node_port, "internal": port.port} - for port in ports - ] - set_ports_mapping(self._node_id, ports, True) - if ingress: - self._client.create_ingress(self._agent_id, **ingress) - - def start(self, *args, **kwargs): - pass - - def stop(self, *args, **kwargs): - pass - - def delete(self, *args, **kwargs): - deployment = self._config.get("deployment") - service = self._config.get("service") - ingress = self._config.get("ingress") - - if ingress: - self._client.delete_ingress( - namespace=self._agent_id, name=ingress.get("name") - ) - if service: - self._client.delete_service( - namespace=self._agent_id, name=service.get("name") - ) - if deployment: - self._client.delete_deployment( - namespace=self._agent_id, name=deployment.get("name") - ) - - def generate_config(self, *args, **kwargs): - return self._config diff --git a/src/api-engine/api/lib/agent/kubernetes/common.py b/src/api-engine/api/lib/agent/kubernetes/common.py deleted file mode 100644 index 543aac66b..000000000 --- a/src/api-engine/api/lib/agent/kubernetes/common.py +++ /dev/null @@ -1,221 +0,0 @@ -import logging - -from django.conf import settings -from kubernetes import client, config -from kubernetes.client.rest import ApiException - -LOG = logging.getLogger(__name__) - -K8S_NAMESPACE = getattr(settings, "K8S_NAMESPACE", "cello") - - -class KubernetesClient(object): - def __init__(self, config_file=None): - super(KubernetesClient, self).__init__() - self._config_file = config_file - config.load_kube_config(config_file) - - def list_pods(self): - v1 = client.CoreV1Api() - print("Listing pods with their IPs:") - ret = v1.list_pod_for_all_namespaces(watch=False) - for i in ret.items: - print( - "%s\t%s\t%s" - % (i.status.pod_ip, i.metadata.namespace, i.metadata.name) - ) - - def get_or_create_namespace(self, name=None): - if name: - v1 = client.CoreV1Api() - try: - v1.read_namespace(name=name) - except ApiException: - body = client.V1Namespace( - kind="Namespace", - api_version="v1", - metadata=client.V1ObjectMeta(name=name), - ) - try: - v1.create_namespace(body=body) - except ApiException as e: - LOG.error( - "Exception when calling CoreV1Api->read_namespace: %s", - e, - ) - - def create_deployment(self, namespace=K8S_NAMESPACE, *args, **kwargs): - containers = kwargs.get("containers", []) - deploy_name = kwargs.get("name") - labels = kwargs.get("labels", {}) - labels.update({"app": deploy_name}) - container_pods = [] - for container in containers: - name = container.get("name") - image = container.get("image") - ports = container.get("ports", []) - environments = container.get("environments", []) - command = container.get("command", []) - command_args = container.get("command_args", []) - - environments = [ - client.V1EnvVar(name=env.get("name"), value=env.get("value")) - for env in environments - ] - ports = [ - client.V1ContainerPort(container_port=port) for port in ports - ] - container_pods.append( - client.V1Container( - name=name, - image=image, - env=environments, - command=command, - args=command_args, - ports=ports, - image_pull_policy="IfNotPresent", - ) - ) - deployment_metadata = client.V1ObjectMeta(name=deploy_name) - pod_spec = client.V1PodSpec(containers=container_pods) - spec_metadata = client.V1ObjectMeta(labels=labels) - template_spec = client.V1PodTemplateSpec( - metadata=spec_metadata, spec=pod_spec - ) - spec = client.ExtensionsV1beta1DeploymentSpec(template=template_spec) - body = client.ExtensionsV1beta1Deployment( - api_version="extensions/v1beta1", - kind="Deployment", - metadata=deployment_metadata, - spec=spec, - ) - - api_instance = client.ExtensionsV1beta1Api() - - try: - api_instance.create_namespaced_deployment( - namespace=namespace, body=body, pretty="true" - ) - except ApiException as e: - LOG.error("Exception when call AppsV1beta1Api: %s", e) - raise e - - return True - - def create_service( - self, - namespace=K8S_NAMESPACE, - name=None, - selector=None, - ports=None, - service_type="ClusterIP", - ): - if selector is None: - selector = {} - if ports is None: - ports = [] - - metadata = client.V1ObjectMeta(name=name, labels={"app": name}) - ports = [client.V1ServicePort(port=port) for port in ports] - spec = client.V1ServiceSpec( - ports=ports, selector=selector, type=service_type - ) - body = client.V1Service( - metadata=metadata, spec=spec, kind="Service", api_version="v1" - ) - - api_instance = client.CoreV1Api() - try: - response = api_instance.create_namespaced_service(namespace, body) - except ApiException as e: - LOG.error("Exception when call CoreV1Api: %s", e) - raise e - - return True, response - - def create_ingress( - self, - namespace=K8S_NAMESPACE, - name=None, - service_name=None, - ingress_paths=None, - annotations=None, - ): - if ingress_paths is None: - ingress_paths = [] - if annotations is None: - annotations = {} - - api_instance = client.ExtensionsV1beta1Api() - metadata = client.V1ObjectMeta(name=name, annotations=annotations) - path_list = [] - for ing_path in ingress_paths: - ing_backend = client.V1beta1IngressBackend( - service_name=service_name, service_port=ing_path.get("port", 0) - ) - path_list.append( - client.V1beta1HTTPIngressPath( - path=ing_path.get("path", ""), backend=ing_backend - ) - ) - http_dict = client.V1beta1HTTPIngressRuleValue(paths=path_list) - rule_list = [client.V1beta1IngressRule(http=http_dict, host="")] - ingress_spec = client.V1beta1IngressSpec(rules=rule_list) - body = client.V1beta1Ingress( - api_version="extensions/v1beta1", - metadata=metadata, - spec=ingress_spec, - kind="Ingress", - ) - - try: - api_instance.create_namespaced_ingress( - namespace=namespace, body=body, pretty="true" - ) - except ApiException as e: - LOG.error("Create ingress failed %s", e) - raise e - - return True - - def delete_deployment(self, namespace=K8S_NAMESPACE, name=None): - api_instance = client.ExtensionsV1beta1Api() - delete_options = client.V1DeleteOptions( - propagation_policy="Foreground" - ) - grace_period_seconds = 10 - - try: - api_instance.delete_namespaced_deployment( - name=name, - namespace=namespace, - body=delete_options, - grace_period_seconds=grace_period_seconds, - pretty="true", - ) - except ApiException as e: - LOG.error("Exception when call AppsV1beta1Api: %s", e) - - def delete_service(self, namespace=K8S_NAMESPACE, name=None): - api_instance = client.CoreV1Api() - - try: - api_instance.delete_namespaced_service( - name=name, namespace=namespace - ) - except ApiException as e: - LOG.error("Exception when call CoreV1Api: %s", e) - - def delete_ingress(self, namespace=K8S_NAMESPACE, name=None): - api_instance = client.ExtensionsV1beta1Api() - delete_options = client.V1DeleteOptions() - - try: - api_instance.delete_namespaced_ingress( - name=name, - namespace=namespace, - body=delete_options, - pretty="true", - ) - except ApiException as e: - LOG.error("Exception when call AppsV1beta1Api: %s\n" % e) diff --git a/src/api-engine/api/lib/agent/kubernetes/fabric/__init__.py b/src/api-engine/api/lib/agent/kubernetes/fabric/__init__.py deleted file mode 100644 index c24920151..000000000 --- a/src/api-engine/api/lib/agent/kubernetes/fabric/__init__.py +++ /dev/null @@ -1,89 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -import logging - -from api.lib.agent.network_base import NetworkBase -from api.common.enums import FabricNodeType -from api.utils.port_picker import find_available_ports, set_ports_mapping - -LOG = logging.getLogger(__name__) - -CA_IMAGE_NAME = "hyperledger/fabric-ca" - - -class FabricNetwork(NetworkBase): - def __init__(self, *args, **kwargs): - super(FabricNetwork, self).__init__(*args, **kwargs) - - self._version = kwargs.get("version") - self._type = kwargs.get("node_type") - self._agent_id = kwargs.get("agent_id") - self._node_id = kwargs.get("node_id") - - def _generate_deployment(self): - containers = [] - name = str(self._node_id) - name = "deploy-%s" % name - if self._type == FabricNodeType.Ca.name.lower(): - image = "%s:%s" % (CA_IMAGE_NAME, self._version) - environments = [ - { - "name": "FABRIC_CA_HOME", - "value": "/etc/hyperledger/fabric-ca-server", - } - ] - ports = [7054] - command = ["fabric-ca-server"] - command_args = ["start", "-b", "admin:adminpw", "-d"] - containers.append( - { - "image": image, - "environments": environments, - "name": "ca", - "ports": ports, - "command": command, - "command_args": command_args, - } - ) - return {"containers": containers, "name": name} - - def _generate_service(self): - name = str(self._node_id) - deploy_name = "deploy-%s" % name - service_name = "service-%s" % name - ports = [] - if self._type == FabricNodeType.Ca.name.lower(): - ports = [7054] - - return { - "name": service_name, - "ports": ports, - "selector": {"app": deploy_name}, - "service_type": "NodePort", - } - - def _generate_ingress(self): - name = str(self._node_id) - service_name = "service-%s" % name - ingress_name = "ingress-%s" % name - ingress_paths = [] - annotations = {"nginx.ingress.kubernetes.io/ssl-redirect": "false"} - if self._type == FabricNodeType.Ca.name.lower(): - ingress_paths = [{"port": 7054, "path": "/%s" % name}] - - return { - "name": ingress_name, - "service_name": service_name, - "ingress_paths": ingress_paths, - "annotations": annotations, - } - - def generate_config(self, *args, **kwargs): - config = { - "deployment": self._generate_deployment(), - "service": self._generate_service(), - # "ingress": self._generate_ingress(), - } - - return config diff --git a/src/api-engine/api/lib/agent/network_base.py b/src/api-engine/api/lib/agent/network_base.py deleted file mode 100644 index 4a03fed8e..000000000 --- a/src/api-engine/api/lib/agent/network_base.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -import abc - - -class NetworkBase(object): - __metaclass__ = abc.ABCMeta - - def __init__(self, *args, **kwargs): - pass - - @abc.abstractmethod - def generate_config(self, *args, **kwargs): - pass diff --git a/src/api-engine/api/lib/configtxgen/__init__.py b/src/api-engine/api/lib/configtxgen/__init__.py deleted file mode 100644 index 14f2016a7..000000000 --- a/src/api-engine/api/lib/configtxgen/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -from .configtx import ConfigTX -from .configtxgen import ConfigTxGen diff --git a/src/api-engine/api/lib/configtxgen/configtx.py b/src/api-engine/api/lib/configtxgen/configtx.py deleted file mode 100644 index 742f83183..000000000 --- a/src/api-engine/api/lib/configtxgen/configtx.py +++ /dev/null @@ -1,272 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -import yaml -import os -from copy import deepcopy -from api.config import CELLO_HOME - - -def load_configtx(filepath): - with open(filepath, "r", encoding="utf-8") as f: - return yaml.load(f, Loader=yaml.FullLoader) - - -class ConfigTX: - """Class represents crypto-config yaml.""" - - def __init__( - self, - network, - filepath=CELLO_HOME, - orderer=None, - raft_option=None, - template_path="/opt/config/configtx.yaml", - ): - """init ConfigTX - param: - network: network's name - orderer: configuration of output block - raft_option: configuration of raft - filepath: cello's working directory - return: - """ - self.filepath = filepath - self.network = network - self.template = load_configtx(template_path) - - def create( - self, - name, - consensus, - orderers, - peers, - orderer_cfg=None, - application=None, - option=None, - ): - """create the configtx.yaml - param: - consensus:consensus - orderers:the list of orderer - peers: the list of peer - orderer_cfg: the config of orderer - application: application - option: option - return: - """ - OrdererDefaults = self.template["Orderer"] - ChannelDefaults = self.template["Channel"] - ApplicationDefaults = self.template["Application"] - ChannelCapabilities = self.template["Capabilities"]["Channel"] - OrdererCapabilities = self.template["Capabilities"]["Orderer"] - ApplicationCapabilities = self.template["Capabilities"]["Application"] - - OrdererOrganizations = [] - OrdererAddress = [] - Consenters = [] - - for orderer in orderers: - OrdererMSP = "OrdererMSP" - OrdererOrg = dict( - Name="Orderer", - ID=OrdererMSP, - MSPDir="{}/{}/crypto-config/ordererOrganizations/{}/msp".format( - self.filepath, - orderer["name"], - orderer["name"].split(".", 1)[1], - ), - Policies=dict( - Readers=dict( - Type="Signature", - Rule="OR('{}.member')".format(OrdererMSP), - ), - Writers=dict( - Type="Signature", - Rule="OR('{}.member')".format(OrdererMSP), - ), - Admins=dict( - Type="Signature", - Rule="OR('{}.admin')".format(OrdererMSP), - ), - ), - ) - for host in orderer["hosts"]: - OrdererAddress.append( - "{}.{}:{}".format( - host["name"], orderer["name"].split(".", 1)[1], 7050 - ) - ) - Consenters.append( - dict( - Host="{}.{}".format( - host["name"], orderer["name"].split(".", 1)[1] - ), - Port=7050, - ClientTLSCert="{}/{}/crypto-config/ordererOrganizations/{}/orderers/{}.{}/tls/server.crt".format( - self.filepath, - orderer["name"], - orderer["name"].split(".", 1)[1], - host["name"], - orderer["name"].split(".", 1)[1], - ), - ServerTLSCert="{}/{}/crypto-config/ordererOrganizations/{}/orderers/{}.{}/tls/server.crt".format( - self.filepath, - orderer["name"], - orderer["name"].split(".", 1)[1], - host["name"], - orderer["name"].split(".", 1)[1], - ), - ) - ) - OrdererOrg["OrdererEndpoints"] = deepcopy(OrdererAddress) - OrdererOrganizations.append(OrdererOrg) - - PeerOrganizations = [] - - for peer in peers: - PeerMSP = peer["name"].split(".", 1)[0].capitalize() + "MSP" - PeerOrganizations.append( - dict( - Name=peer["name"].split(".", 1)[0].capitalize(), - ID=PeerMSP, - MSPDir="{}/{}/crypto-config/peerOrganizations/{}/msp".format( - self.filepath, peer["name"], peer["name"] - ), - Policies=dict( - Readers=dict( - Type="Signature", - Rule="OR('{}.admin', '{}.peer', '{}.client')".format( - PeerMSP, PeerMSP, PeerMSP - ), - ), - Writers=dict( - Type="Signature", - Rule="OR('{}.admin', '{}.client')".format( - PeerMSP, PeerMSP - ), - ), - Admins=dict( - Type="Signature", - Rule="OR('{}.admin')".format(PeerMSP), - ), - Endorsement=dict( - Type="Signature", - Rule="OR('{}.peer')".format(PeerMSP), - ), - ), - ) - ) - Organizations = OrdererOrganizations + PeerOrganizations - Capabilities = dict( - Channel=ChannelCapabilities, - Orderer=OrdererCapabilities, - Application=ApplicationCapabilities, - ) - Application = deepcopy(ApplicationDefaults) - Orderer = deepcopy(OrdererDefaults) - Orderer["Addresses"] = deepcopy(OrdererAddress) - Channel = deepcopy(ChannelDefaults) - Application["Capabilities"] = Capabilities["Application"] - Channel["Capabilities"] = Capabilities["Channel"] - Orderer["Capabilities"] = Capabilities["Orderer"] - Orderer["OrdererType"] = consensus - Orderer["EtcdRaft"]["Consenters"] = deepcopy(Consenters) - - Profiles = {} - Profiles[name] = deepcopy(Channel) - Profiles[name]["Orderer"] = deepcopy(Orderer) - Profiles[name]["Application"] = deepcopy(Application) - Profiles[name]["Capabilities"] = Capabilities["Channel"] - Profiles[name]["Orderer"]["Capabilities"] = Capabilities["Orderer"] - Profiles[name]["Application"]["Capabilities"] = Capabilities[ - "Application" - ] - Profiles[name]["Orderer"]["Organizations"] = OrdererOrganizations - Profiles[name]["Application"]["Organizations"] = PeerOrganizations - - configtx = dict( - Organizations=Organizations, - Capabilities=Capabilities, - Application=Application, - Orderer=Orderer, - Channel=Channel, - Profiles=Profiles, - ) - os.system("mkdir -p {}/{}".format(self.filepath, self.network)) - - with open( - "{}/{}/configtx.yaml".format(self.filepath, self.network), - "w", - encoding="utf-8", - ) as f: - yaml.dump(configtx, f, sort_keys=False) - - def createChannel(self, name, organizations): - """create the channel.tx - param: - name: name of channel - organizations: Organizations ready to join the channel - return: - """ - try: - with open( - "{}/{}/{}".format( - self.filepath, self.network, "configtx.yaml" - ), - "r+", - encoding="utf-8", - ) as f: - configtx = yaml.load(f, Loader=yaml.FullLoader) - Profiles = configtx["Profiles"] - Channel = configtx["Channel"] - Orderer = configtx["Orderer"] - Application = configtx["Application"] - PeerOrganizations = [] - for org in configtx["Organizations"]: - for item in organizations: - if org["ID"] == item.capitalize() + "MSP": - PeerOrganizations.append(org) - if PeerOrganizations == []: - raise Exception("can't find organnization") - Profiles[name] = deepcopy(Channel) - Profiles[name]["Orderer"] = Orderer - Profiles[name]["Application"] = Application - - with open( - "{}/{}/{}".format( - self.filepath, self.network, "configtx.yaml" - ), - "w", - encoding="utf-8", - ) as f: - yaml.safe_dump(configtx, f, sort_keys=False) - - except Exception as e: - err_msg = "Configtx create channel failed for {}!".format(e) - raise Exception(err_msg) - - -if __name__ == "__main__": - orderers = [ - { - "name": "org1.cello.com", - "hosts": [{"name": "orderer1", "port": 8051}], - } - ] - # peers = [{"name": "org1.cello.com", "hosts": [{"name": "foo", "port": 7051},{"name": "car", "port": 7052}]}, - # {"name": "org2.cello.com", "hosts": [{"name": "zoo", "port": 7053}]}] - peers = [ - { - "name": "org1.cello.com", - "hosts": [ - {"name": "foo", "port": 7051}, - {"name": "car", "port": 7052}, - ], - } - ] - ConfigTX("test3").create( - consensus="etcdraft", orderers=orderers, peers=peers - ) - # tx = ConfigTX("test3") - # print(tx.template) diff --git a/src/api-engine/api/lib/configtxgen/configtxgen.py b/src/api-engine/api/lib/configtxgen/configtxgen.py deleted file mode 100644 index d9668e0b8..000000000 --- a/src/api-engine/api/lib/configtxgen/configtxgen.py +++ /dev/null @@ -1,76 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -from api.config import CELLO_HOME, FABRIC_TOOL, FABRIC_VERSION - -import subprocess -import logging - -LOG = logging.getLogger(__name__) - - -class ConfigTxGen: - """Class represents cryptotxgen.""" - - def __init__( - self, - network, - filepath=CELLO_HOME, - configtxgen=FABRIC_TOOL, - version=FABRIC_VERSION, - ): - """init CryptoGen - param: - network: network's name - configtxgen: tool path - version: version - filepath: cello's working directory - return: - """ - self.network = network - self.configtxgen = configtxgen + "/configtxgen" - self.filepath = filepath - self.version = version - - def genesis(self, profile="", channelid="", outputblock="genesis.block"): - """generate gensis - param: - profile: profile - channelid: channelid - outputblock: outputblock - return: - """ - try: - command = [ - self.configtxgen, - "-configPath", - "{}/{}/".format(self.filepath, self.network), - "-profile", - "{}".format(profile), - "-outputBlock", - "{}/{}/{}".format(self.filepath, self.network, outputblock), - "-channelID", - "{}".format(channelid), - ] - - LOG.info(" ".join(command)) - - subprocess.run(command, check=True) - - except subprocess.CalledProcessError as e: - err_msg = "configtxgen genesis fail! " - raise Exception(err_msg + str(e)) - - except Exception as e: - err_msg = "configtxgen genesis fail! " - raise Exception(err_msg + str(e)) - - def anchorpeer(self, profile, channelid, outputblock): - """set anchorpeer - param: - profile: profile - channelid: channelid - outputblock: outputblock - return: - """ - pass diff --git a/src/api-engine/api/lib/configtxlator/configtxlator.py b/src/api-engine/api/lib/configtxlator/configtxlator.py deleted file mode 100644 index ec444db19..000000000 --- a/src/api-engine/api/lib/configtxlator/configtxlator.py +++ /dev/null @@ -1,99 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -from subprocess import call, run -from api.config import FABRIC_TOOL, FABRIC_VERSION - -import logging - -LOG = logging.getLogger(__name__) - - -class ConfigTxLator: - """ - Class represents configtxlator CLI. - """ - - def __init__(self, configtxlator=FABRIC_TOOL, version=FABRIC_VERSION): - self.configtxlator = configtxlator + "/configtxlator" - self.version = version - - def proto_encode(self, input, type, output): - """ - Converts a JSON document to protobuf. - - params: - input: A file containing the JSON document. - type: The type of protobuf structure to encode to. For example, 'common.Config'. - output: A file to write the output to. - """ - try: - command = [ - self.configtxlator, - "proto_encode", - "--input={}".format(input), - "--type={}".format(type), - "--output={}".format(output), - ] - - LOG.info(" ".join(command)) - - call(command) - except Exception as e: - err_msg = "configtxlator proto decode fail! " - raise Exception(err_msg + str(e)) - - def proto_decode(self, input, type, output): - """ - Converts a proto message to JSON. - - params: - input: A file containing the JSON document. - type: The type of protobuf structure to decode to. For example, 'common.Config'. - return: - config - """ - try: - command = [ - self.configtxlator, - "proto_decode", - "--type={}".format(type), - "--input={}".format(input), - "--output={}".format(output), - ] - - LOG.info(" ".join(command)) - - call(command) - - except Exception as e: - err_msg = "configtxlator proto decode fail! " - raise Exception(err_msg + str(e)) - - def compute_update(self, original, updated, channel_id, output): - """ - Takes two marshaled common.Config messages and computes the config update which - transitions between the two. - - params: - original: The original config message. - updated: The updated config message. - channel_id: The name of the channel for this update. - output: A file to write the JSON document to. - """ - try: - command = [ - self.configtxlator, - "compute_update", - "--original={}".format(original), - "--updated={}".format(updated), - "--channel_id={}".format(channel_id), - "--output={}".format(output), - ] - - LOG.info(" ".join(command)) - - call(command) - except Exception as e: - err_msg = "configtxlator compute update fail! " - raise Exception(err_msg + str(e)) diff --git a/src/api-engine/api/lib/peer/chaincode.py b/src/api-engine/api/lib/peer/chaincode.py deleted file mode 100644 index f5ab6ca64..000000000 --- a/src/api-engine/api/lib/peer/chaincode.py +++ /dev/null @@ -1,631 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -import os -import json -import subprocess -from api.lib.peer.command import Command -from api.config import FABRIC_TOOL, FABRIC_CFG, FABRIC_VERSION -import logging - -LOG = logging.getLogger(__name__) - - -class ChainCode(Command): - def __init__(self, version=FABRIC_VERSION, peer=FABRIC_TOOL, **kwargs): - self.peer = peer + "/peer" - super(ChainCode, self).__init__(version, **kwargs) - - def lifecycle_package(self, cc_name, cc_version, cc_path, language): - """ - package the chaincode to a tar.gz file. - :param cc_name: chaincode name - :param cc_version: chaincode version - :param cc_path: where the chaincode is - :param language: Chain code development language, default: golang - :return 0 means success. - """ - try: - label = cc_name + "_" + cc_version - res = os.system( - "{} lifecycle chaincode package {}.tar.gz --path {} --lang {} --label {}".format( - self.peer, cc_name, cc_path, language, label - ) - ) - res = res >> 8 - except Exception as e: - err_msg = "package chaincode failed for {}!".format(e) - raise Exception(err_msg) - return res - - def lifecycle_install(self, cc_targz): - """ - install the chaincode to peer. - :param cc_targz: chaincode name wich accessible path - :return: 0 means success. - """ - try: - command = [ - self.peer, - "lifecycle", - "chaincode", - "install", - cc_targz, - ] - LOG.info(" ".join(command)) - res = os.system(" ".join(command)) - res = res >> 8 - except Exception as e: - err_msg = "install chaincode failed for {}!".format(e) - raise Exception(err_msg) - return res - - def lifecycle_query_installed(self, timeout): - """ - get the chaincode info installed in peer. - :param timeout: - :return: res 0 means success - installed_chaincodes: the json format of installed_chaincodes info - """ - - try: - command = [ - self.peer, - "lifecycle", - "chaincode", - "queryinstalled", - "--output", - "json", - "--connTimeout", - timeout, - ] - LOG.info(" ".join(command)) - res = subprocess.Popen( - command, - shell=False, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - - stdout, stderr = res.communicate() - return_code = res.returncode - - if return_code == 0: - content = str(stdout, encoding="utf-8") - installed_chaincodes = json.loads(content) - else: - stderr = str(stderr, encoding="utf-8") - return return_code, stderr - except Exception as e: - err_msg = "query_installed chaincode info failed for {}!".format(e) - raise Exception(err_msg) - return return_code, installed_chaincodes - - def lifecycle_get_installed_package(self, timeout): - """ - lifecycle_query_installed will return a list installed in peer. - then execute cmd to get all chaincode with tar.gz format installed in peer. - :param timeout: - :return: res_return: 0 means success get all chaincode in peers. - """ - try: - res, installed = self.lifecycle_query_installed("3s") - res_return = 0 - if res == 0: - for item in installed["installed_chaincodes"]: - res_get = os.system( - "{} lifecycle chaincode getinstalledpackage --package-id {} " - "--output-directory {} --connTimeout {}".format( - self.peer, item["package_id"], FABRIC_CFG, timeout - ) - ) - res_get = res_get >> 8 - res_return = res_return or res_get - else: - print("package_id get failed.") - return 1, {} - except Exception as e: - err_msg = "get_installed_package failed for {}!".format(e) - raise Exception(err_msg) - return res_return - - def lifecycle_approve_for_my_org( - self, - orderer_url, - channel_name, - cc_name, - chaincode_version, - sequence, - policy, - init_flag, - ): - """ - The administrator can use the peer lifecycle chaincode approveformyorg subcommand to approve the chain code on - behalf of the organization. - :param orderer_url: orderer accessable url - :param channel_name: channel name - :param cc_name: chaincode name - :param chaincode_version: chaincode version - :param sequence: The channel chain code defines the serial number. The default value is 1 - :param policy: chaincode policy - :param init_flag: if the chaincode is first init. - :return: - """ - try: - res, installed = self.lifecycle_query_installed("3s") - cc_label = cc_name + "_" + chaincode_version - package_id = "" - for each in installed["installed_chaincodes"]: - if each["label"] == cc_label: - package_id = each["package_id"] - break - if package_id == "": - return ( - 1, - "not exist the chaincode, please check chaincode_name and chaincode_version", - ) - - command = [] - if ( - os.getenv("CORE_PEER_TLS_ENABLED") == "false" - or os.getenv("CORE_PEER_TLS_ENABLED") is None - ): - command = [ - self.peer, - "lifecycle", - "chaincode", - "approveformyorg", - "-o", - orderer_url, - "--channelID", - channel_name, - "--name", - cc_name, - "--version", - chaincode_version, - "--package-id", - package_id, - "--sequence", - str(sequence), - ] - else: - ORDERER_CA = os.getenv("ORDERER_CA") - command = [ - self.peer, - "lifecycle", - "chaincode", - "approveformyorg", - "-o", - orderer_url, - "--ordererTLSHostnameOverride", - orderer_url.split(":")[0], - "--channelID", - channel_name, - "--name", - cc_name, - "--version", - chaincode_version, - "--package-id", - package_id, - "--sequence", - str(sequence), - "--tls", - "--cafile", - ORDERER_CA, - ] - - if init_flag: - command.append("--init-required") - if policy: - command.append("--signature-policy") - command.append(policy) - - LOG.info(" ".join(command)) - res = subprocess.Popen( - command, - shell=False, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - stdout, stderr = res.communicate() - return_code = res.returncode - - if return_code == 0: - content = str(stdout, encoding="utf-8") - else: - stderr = str(stderr, encoding="utf-8") - return return_code, stderr - except Exception as e: - err_msg = "lifecycle_approve_for_my_org failed for {}!".format(e) - raise Exception(err_msg) - return return_code, content - - def lifecycle_query_approved(self, channel_name, cc_name): - """ - query_approved chaincode information. - :param channel_name: channel name - :param cc_name: chaincode name - :return: - """ - - try: - res = subprocess.Popen( - "{} lifecycle chaincode queryapproved --output json --channelID {}" - " --name {}".format(self.peer, channel_name, cc_name), - shell=True, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - stdout, stderr = res.communicate() - return_code = res.returncode - if return_code == 0: - content = str(stdout, encoding="utf-8") - chaincodes_info = json.loads(content) - else: - stderr = str(stderr, encoding="utf-8") - return return_code, stderr - except Exception as e: - err_msg = "lifecycle_query_approved failed for {}!".format(e) - raise Exception(err_msg) - - return return_code, chaincodes_info - - def lifecycle_check_commit_readiness( - self, channel_name, cc_name, cc_version, sequence=1 - ): - """ - :param channel_name:channel name - :param cc_name: chaincode name - :param cc_version: chaincode version - :param sequence:The channel chain code defines the serial number. The default value is 1 - :return: - """ - try: - ORDERER_CA = os.getenv("ORDERER_CA") - command = [] - if ( - os.getenv("CORE_PEER_TLS_ENABLED") == "false" - or os.getenv("CORE_PEER_TLS_ENABLED") is None - ): - command = [ - self.peer, - "lifecycle", - "chaincode", - "checkcommitreadiness", - "--channelID", - channel_name, - "--name", - cc_name, - "--version", - cc_version, - "--sequence", - str(sequence), - "--output", - "json", - ] - else: - command = [ - self.peer, - "lifecycle", - "chaincode", - "checkcommitreadiness", - "--channelID", - channel_name, - "--name", - cc_name, - "--version", - cc_version, - "--sequence", - str(sequence), - "--tls", - "--cafile", - ORDERER_CA, - "--output", - "json", - ] - - LOG.info(" ".join(command)) - - res = subprocess.Popen( - command, - shell=False, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - stdout, stderr = res.communicate() - return_code = res.returncode - if return_code == 0: - content = str(stdout, encoding="utf-8") - chaincodes_info = json.loads(content) - return return_code, chaincodes_info - else: - stderr = str(stderr, encoding="utf-8") - return return_code, stderr - except Exception as e: - err_msg = "lifecycle_check_commit_readiness failed for {}!".format( - e - ) - raise Exception(err_msg) - - def lifecycle_commit( - self, - orderer_url, - channel_name, - cc_name, - chaincode_version, - sequence, - policy, - peer_list=[], - peer_root_certs=[], - init_flag=False, - ): - """ - The administrator can submit the chain code definition to the specified channel by using the peer lifecycle - chain code commit subcommand - :param orderer_url: orderer accessable url - :param channel_name:channel name - :param cc_name:chaincode name - :param chaincode_version:chaincode version - :param sequence:The channel chain code defines the serial number. The default value is 1 - :param policy:chaincode policy - :param peer_list: the list of peerAddress - :param peer_root_certs: the list of peer_root_certs, the orderer should be same as peerlist's. - :param init_flag:if the chaincode is first init. - :return: - """ - try: - command = [] - if ( - os.getenv("CORE_PEER_TLS_ENABLED") == "false" - or os.getenv("CORE_PEER_TLS_ENABLED") is None - ): - command = [ - self.peer, - "lifecycle", - "chaincode", - "commit", - "-o", - orderer_url, - "--channelID", - channel_name, - "--name", - cc_name, - "--version", - chaincode_version, - "--sequence", - str(sequence), - ] - else: - ORDERER_CA = os.getenv("ORDERER_CA") - command = [ - self.peer, - "lifecycle", - "chaincode", - "commit", - "-o", - orderer_url, - "--ordererTLSHostnameOverride", - orderer_url.split(":")[0], - "--channelID", - channel_name, - "--name", - cc_name, - "--version", - chaincode_version, - "--sequence", - str(sequence), - "--tls", - "--cafile", - ORDERER_CA, - ] - - for i in range(len(peer_list)): - command.append("--peerAddresses") - command.append(peer_list[i]) - command.append("--tlsRootCertFiles") - command.append(peer_root_certs[i]) - - if init_flag: - command.append("--init-required") - if policy: - command.append("--signature-policy") - command.append(policy) - - LOG.info(" ".join(command)) - res = os.system(" ".join(command)) - res = res >> 8 - return res - - except Exception as e: - err_msg = "lifecycle_commit failed for {}!".format(e) - raise Exception(err_msg) - - def lifecycle_query_committed(self, channel_name, cc_name): - """ - - :param channel_name:channel name - :param cc_name:chaincode name - :return: chaincodes info has commited in channel of the cc_name - """ - try: - command = [ - self.peer, - "lifecycle", - "chaincode", - "querycommitted", - "--channelID", - channel_name, - "--output", - "json", - "--name", - cc_name, - ] - LOG.info(" ".join(command)) - res = subprocess.Popen( - command, - shell=False, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - stdout, stderr = res.communicate() - return_code = res.returncode - if return_code == 0: - content = str(stdout, encoding="utf-8") - chaincodes_commited = json.loads(content) - return return_code, chaincodes_commited - else: - stderr = str(stderr, encoding="utf-8") - return return_code, stderr - except Exception as e: - err_msg = "lifecycle_query_committed failed for {}!".format(e) - raise Exception(err_msg) - - def invoke( - self, - orderer_url, - orderer_tls_rootcert, - channel_name, - cc_name, - args, - init=False, - ): - """ - :param orderer_url:orderer accessable url - :param orderer_tls_rootcert: orderer tls certificate - :param channel_name: channel name - :param cc_name: chaincode name - :param args: args to invoke - :param init: if the chaincode is first init. - :return: - if success: 0, '' - else: 1, stderr - """ - try: - if init: - invoke_command = "{} chaincode invoke -I -o {} --channelID {} --name {} -c '{}'" - invoke_command_tls = "{} chaincode invoke -I -o {} --tls --cafile {} --channelID {} --name {} -c '{}'" - else: - invoke_command = "{} chaincode invoke -o {} --channelID {} --name {} -c '{}'" - invoke_command_tls = "{} chaincode invoke -o {} --tls --cafile {} --channelID {} --name {} -c '{}'" - - if ( - os.getenv("CORE_PEER_TLS_ENABLED") == "false" - or os.getenv("CORE_PEER_TLS_ENABLED") is None - ): - res = subprocess.Popen( - invoke_command.format( - self.peer, orderer_url, channel_name, cc_name, args - ), - shell=True, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - _, stderr = res.communicate() - return_code = res.returncode - if return_code == 0: - return return_code, "" - else: - stderr = str(stderr, encoding="utf-8") - return return_code, stderr - else: - res = subprocess.Popen( - invoke_command_tls.format( - self.peer, - orderer_url, - orderer_tls_rootcert, - channel_name, - cc_name, - args, - ), - shell=True, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - _, stderr = res.communicate() - return_code = res.returncode - if return_code == 0: - return return_code, "" - else: - stderr = str(stderr, encoding="utf-8") - return return_code, stderr - except Exception as e: - err_msg = "invoke failed for {}!".format(e) - raise Exception(err_msg) - - def query( - self, orderer_url, orderer_tls_rootcert, channel_name, cc_name, args - ): - try: - if ( - os.getenv("CORE_PEER_TLS_ENABLED") == "false" - or os.getenv("CORE_PEER_TLS_ENABLED") is None - ): - res = subprocess.Popen( - "{} chaincode query -o {} --channelID {} --name {} -c '{}'".format( - self.peer, orderer_url, channel_name, cc_name, args - ), - shell=True, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - stdout, stderr = res.communicate() - return_code = res.returncode - if return_code == 0: - return return_code, "" - else: - stderr = str(stderr, encoding="utf-8") - return return_code, stderr - else: - res = subprocess.Popen( - "{} chaincode query -o {} --tls --cafile {} --channelID {}" - " --name {} -c '{}'".format( - self.peer, - orderer_url, - orderer_tls_rootcert, - channel_name, - cc_name, - args, - ), - shell=True, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - stdout, stderr = res.communicate() - return_code = res.returncode - if return_code == 0: - content = str(stdout, encoding="utf-8") - query_result = json.loads(content) - return return_code, query_result - else: - stderr = str(stderr, encoding="utf-8") - return return_code, stderr - except Exception as e: - err_msg = "query failed for {}!".format(e) - raise Exception(err_msg) - - def lifecycle_calculatepackageid(self, cc_path): - """ - calculate the chaincode packageid. - :param cc_path: where the chaincode package is - :return: calculated packageid - """ - try: - res = subprocess.Popen( - "{} lifecycle chaincode calculatepackageid {} ".format( - self.peer, cc_path - ), - shell=True, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - stdout, stderr = res.communicate() - return_code = res.returncode - if return_code == 0: - content = str(stdout, encoding="utf-8") - return return_code, content - else: - stderr = str(stderr, encoding="utf-8") - return return_code, stderr - except Exception as e: - err_msg = "calculated chaincode packageid failed for {}!".format(e) - raise Exception(err_msg) diff --git a/src/api-engine/api/lib/peer/channel.py b/src/api-engine/api/lib/peer/channel.py deleted file mode 100644 index b6acbb5bf..000000000 --- a/src/api-engine/api/lib/peer/channel.py +++ /dev/null @@ -1,281 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -import os -import json -import subprocess -import time -from api.lib.peer.command import Command -from api.config import FABRIC_TOOL, FABRIC_VERSION -import logging - -LOG = logging.getLogger(__name__) - - -class Channel(Command): - """Call CMD to perform channel create, join and other related operations""" - - def __init__(self, version=FABRIC_VERSION, peer=FABRIC_TOOL, **kwargs): - self.peer = peer + "/peer" - self.osnadmin = peer + "/osnadmin" - super(Channel, self).__init__(version, **kwargs) - - def create(self, channel, orderer_admin_url, block_path, time_out="90s"): - try: - command = [] - - if ( - os.getenv("CORE_PEER_TLS_ENABLED") == "false" - or os.getenv("CORE_PEER_TLS_ENABLED") is None - ): - command = [ - self.osnadmin, - "channel", - "join", - "--channelID", - channel, - "--config-block", - block_path, - "-o", - orderer_admin_url, - ] - else: - ORDERER_CA = os.getenv("ORDERER_CA") - ORDERER_ADMIN_TLS_SIGN_CERT = os.getenv( - "ORDERER_ADMIN_TLS_SIGN_CERT" - ) - ORDERER_ADMIN_TLS_PRIVATE_KEY = os.getenv( - "ORDERER_ADMIN_TLS_PRIVATE_KEY" - ) - command = [ - self.osnadmin, - "channel", - "join", - "--channelID", - channel, - "--config-block", - block_path, - "-o", - orderer_admin_url, - "--ca-file", - ORDERER_CA, - "--client-cert", - ORDERER_ADMIN_TLS_SIGN_CERT, - "--client-key", - ORDERER_ADMIN_TLS_PRIVATE_KEY, - ] - - LOG.info(" ".join(command)) - - res = subprocess.run(command, check=True) - - except subprocess.CalledProcessError as e: - err_msg = "create channel failed for {}!".format(e) - raise Exception(err_msg + str(e)) - - except Exception as e: - err_msg = "create channel failed for {}!".format(e) - raise Exception(err_msg) - return res - - def list(self): - try: - res = subprocess.Popen( - "{} channel list".format(self.peer), - shell=True, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - - stdout, stderr = res.communicate() - return_code = res.returncode - - if return_code == 0: - content = str(stdout, encoding="utf-8") - content = content.split("\n") - else: - stderr = str(stderr, encoding="utf-8") - return return_code, stderr - except Exception as e: - err_msg = "get channel list failed for {}!".format(e) - raise Exception(err_msg) - return return_code, content[1:-1] - - def update(self, channel, channel_tx, orderer_url): - """ - Send a configtx update. - params: - channel: channel id. - channel_tx: Configuration transaction file generated by a tool such as configtxgen for submitting to orderer - orderer_url: Ordering service endpoint. - """ - try: - ORDERER_CA = os.getenv("ORDERER_CA") - - command = [ - self.peer, - "channel", - "update", - "-f", - channel_tx, - "-c", - channel, - "-o", - orderer_url, - "--ordererTLSHostnameOverride", - orderer_url.split(":")[0], - "--tls", - "--cafile", - ORDERER_CA, - ] - LOG.info(" ".join(command)) - - res = subprocess.run(command, check=True) - - except Exception as e: - err_msg = "update channel failed for {}!".format(e) - raise Exception(err_msg) - return res - - def fetch( - self, - block_path, - channel, - orderer_general_url, - max_retries=5, - retry_interval=1, - ): - """ - Fetch a specified block, writing it to a file e.g. .block. - params: - option: block option newest|oldest|config|(block number). - channel: channel id. - """ - res = 0 - command = [] - if ( - os.getenv("CORE_PEER_TLS_ENABLED") == "false" - or os.getenv("CORE_PEER_TLS_ENABLED") is None - ): - command = [ - self.peer, - "channel", - "fetch", - "config", - block_path, - "-o", - orderer_general_url, - "-c", - channel, - ] - else: - ORDERER_CA = os.getenv("ORDERER_CA") - orderer_address = orderer_general_url.split(":")[0] - command = [ - self.peer, - "channel", - "fetch", - "config", - block_path, - "-o", - orderer_general_url, - "--ordererTLSHostnameOverride", - orderer_address, - "-c", - channel, - "--tls", - "--cafile", - ORDERER_CA, - ] - - LOG.info(" ".join(command)) - - # Retry fetching the block up to max_retries times - for attempt in range(1, max_retries + 1): - try: - LOG.debug("Attempt %d/%d to fetch block", attempt, max_retries) - - res = subprocess.run(command, check=True) - - LOG.info("Successfully fetched block") - break - - except subprocess.CalledProcessError as e: - LOG.debug(f"Attempt {attempt}/{max_retries} failed") - - if attempt <= max_retries: - time.sleep(retry_interval) - else: - LOG.error( - f"Failed to fetch block after {max_retries} attempts" - ) - raise e - - return res - - def signconfigtx(self, channel_tx): - """ - Signs a configtx update. - params: - channel_tx: Configuration transaction file generated by a tool such as configtxgen for submitting to orderer - """ - try: - res = os.system( - "{} channel signconfigtx -f {}".format(self.peer, channel_tx) - ) - except Exception as e: - err_msg = "signs a configtx update failed {}".format(e) - raise Exception(err_msg) - res = res >> 8 - return res - - def join(self, block_path): - """ - Joins the peer to a channel. - params: - block_path: Path to file containing genesis block. - """ - try: - command = "{} channel join -b {} ".format(self.peer, block_path) - - LOG.info(f"{command}") - - res = os.system(command) - - except Exception as e: - err_msg = "join the peer to a channel failed. {}".format(e) - raise Exception(err_msg) - res = res >> 8 - return res - - def getinfo(self, channel): - """ - Get blockchain information of a specified channel. - params: - channel: In case of a newChain command, the channel ID to create. - """ - try: - res = subprocess.Popen( - "{} channel getinfo -c {}".format(self.peer, channel), - shell=True, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - - stdout, stderr = res.communicate() - return_code = res.returncode - - if return_code == 0: - content = str(stdout, encoding="utf-8") - content = content.split("\n")[0].split(":", 1)[1] - block_info = json.loads(content) - body = {"block_info": block_info} - else: - stderr = str(stderr, encoding="utf-8") - return return_code, stderr - except Exception as e: - err_msg = "get blockchain information of a specified channel failed. {}".format( - e - ) - raise Exception(err_msg) - return return_code, body diff --git a/src/api-engine/api/lib/peer/command.py b/src/api-engine/api/lib/peer/command.py deleted file mode 100644 index 769c7aead..000000000 --- a/src/api-engine/api/lib/peer/command.py +++ /dev/null @@ -1,17 +0,0 @@ -import os -from api.config import FABRIC_CFG - - -# Command class reads local environment variables by given their names -class Command: - def __init__(self, version, **kwargs): - self.version = version - # Setting environment variables according to user input. Recommended main settings: CORE_PEER_LOCALMSPID、 - # CORE_PEER_TLS_CERT_FILE、 CORE_PEER_TLS_KEY_FILE、CORE_PEER_TLS_ROOTCERT_FILE、CORE_PEER_MSPCONFIGPATH, - # CORE_PEER_MSPCONFIGPATH、 CORE_PEER_TLS_ROOTCERT_FILE,CORE_PEER_ADDRESS and so on. - - # Please put the config configuration file of the fabric binary in the /opt/node directory - os.environ["FABRIC_CFG_PATH"] = FABRIC_CFG - # os.environ["CORE_PEER_TLS_ENABLED"] = "true" - for k, v in kwargs.items(): - os.environ[k] = v diff --git a/src/api-engine/api/lib/pki/cryptogen/cryptocfg.py b/src/api-engine/api/lib/pki/cryptogen/cryptocfg.py index 25ba0e383..c0efe8897 100644 --- a/src/api-engine/api/lib/pki/cryptogen/cryptocfg.py +++ b/src/api-engine/api/lib/pki/cryptogen/cryptocfg.py @@ -3,7 +3,8 @@ # import yaml import os -from api.config import CELLO_HOME + +from api_engine.settings import CELLO_HOME class CryptoConfig: @@ -38,61 +39,47 @@ def __init__( self.enablenodeous = enablenodeous self.file = file - def create(self, peernum, orderernum) -> None: + def create(self) -> None: """create the crypto-config.yaml param return: """ try: - network = {} - for item in ["Peer", "Orderer"]: - org = [] - ca = dict( - Country=self.country, - Locality=self.locality, - Province=self.province, - ) - specs = [] - # for host in org_info["Specs"]: - # specs.append(dict(Hostname=host)) - - if item == "Peer": - template = dict(Count=peernum) - users = dict(Count=1) - org.append( - dict( - Domain=self.name, - Name=self.name.split(".")[0].capitalize(), - CA=ca, - Specs=specs, - EnableNodeOUs=self.enablenodeous, - Template=template, - Users=users, - ) - ) - network = {"PeerOrgs": org} - else: - template = dict(Count=orderernum) - org.append( - dict( - Domain=self.name.split(".", 1)[1], - Name=item, - CA=ca, - Specs=specs, - EnableNodeOUs=self.enablenodeous, - Template=template, - ) - ) - network["OrdererOrgs"] = org - - os.system("mkdir -p {}/{}".format(self.filepath, self.name)) + org_filepath = os.path.join(self.filepath, self.name) + os.makedirs(org_filepath, exist_ok=True) with open( - "{}/{}/{}".format(self.filepath, self.name, self.file), - "w", - encoding="utf-8", + os.path.join(org_filepath, self.file), + "w", + encoding="utf-8", ) as f: - yaml.dump(network, f) + yaml.dump({ + "PeerOrgs": [dict( + Domain=self.name, + Name=self.name.split(".")[0].capitalize(), + CA=dict( + Country=self.country, + Locality=self.locality, + Province=self.province, + ), + Specs=[], + EnableNodeOUs=self.enablenodeous, + Template=dict(Count=0), + Users=dict(Count=1), + )], + "OrdererOrgs": [dict( + Domain=self.name.split(".", 1)[1], + Name="Orderer", + CA=dict( + Country=self.country, + Locality=self.locality, + Province=self.province, + ), + Specs=[], + EnableNodeOUs=self.enablenodeous, + Template=dict(Count=0), + )] + }, f) except Exception as e: err_msg = "CryptoConfig create failed for {}!".format(e) raise Exception(err_msg) @@ -110,7 +97,7 @@ def update(self, org_info: any) -> None: encoding="utf-8", ) as f: network = yaml.load(f, Loader=yaml.FullLoader) - if org_info["type"] == "peer": + if org_info["type"].lower() == "peer": orgs = network["PeerOrgs"] else: orgs = network["OrdererOrgs"] @@ -119,7 +106,13 @@ def update(self, org_info: any) -> None: # org["Template"]["Count"] += 1 specs = org["Specs"] for host in org_info["Specs"]: - specs.append(dict(Hostname=host)) + host_exists = False + for spec in specs: + if spec["Hostname"] == host: + host_exists = True + break + if not host_exists: + specs.append(dict(Hostname=host)) with open( "{}/{}/{}".format(self.filepath, self.name, self.file), diff --git a/src/api-engine/api/lib/pki/cryptogen/cryptogen.py b/src/api-engine/api/lib/pki/cryptogen/cryptogen.py index 96c0fad00..20cb8b1ed 100644 --- a/src/api-engine/api/lib/pki/cryptogen/cryptogen.py +++ b/src/api-engine/api/lib/pki/cryptogen/cryptogen.py @@ -1,11 +1,14 @@ # # SPDX-License-Identifier: Apache-2.0 # -from subprocess import call -from api.config import CELLO_HOME, FABRIC_TOOL, FABRIC_VERSION +import os +from subprocess import check_call + import logging +from api_engine.settings import CELLO_HOME, FABRIC_TOOL, FABRIC_VERSION + LOG = logging.getLogger(__name__) @@ -40,16 +43,17 @@ def generate(self, output="crypto-config", config="crypto-config.yaml"): return: """ try: + org_filepath = os.path.join(self.filepath, self.name) command = [ self.cryptogen, "generate", - "--output={}/{}/{}".format(self.filepath, self.name, output), - "--config={}/{}/{}".format(self.filepath, self.name, config), + "--output={}".format(os.path.join(org_filepath, output)), + "--config={}".format(os.path.join(org_filepath, config)), ] LOG.info(" ".join(command)) - call(command) + check_call(command) except Exception as e: err_msg = "cryptogen generate fail for {}!".format(e) @@ -72,7 +76,7 @@ def extend(self, input="crypto-config", config="crypto-config.yaml"): LOG.info(" ".join(command)) - call(command) + check_call(command) except Exception as e: err_msg = "cryptogen extend fail for {}!".format(e) diff --git a/src/api-engine/api/management/commands/create_user.py b/src/api-engine/api/management/commands/create_user.py deleted file mode 100644 index 4f5a98fbb..000000000 --- a/src/api-engine/api/management/commands/create_user.py +++ /dev/null @@ -1,58 +0,0 @@ -import logging - -from django.core.exceptions import ObjectDoesNotExist -from django.core.management.base import BaseCommand -from api.models import UserProfile - -LOG = logging.getLogger(__name__) - - -class Command(BaseCommand): - help = "Create user" - - def add_arguments(self, parser): - parser.add_argument("--username", help="Username", required=True) - parser.add_argument( - "--is_superuser", action="store_true", required=True - ) - parser.add_argument( - "--password", help="Password of new user", required=True - ) - parser.add_argument("--email", help="Email of new user", required=True) - parser.add_argument("--role", help="role of new user", required=True) - parser.add_argument( - "--force", - help="whether force create user", - required=False, - action="store_true", - ) - - def handle(self, *args, **options): - username = options.get("username") - password = options.get("password") - role = options.get("role") - email = options.get("email") - is_superuser = options.get("is_superuser", False) - force = options.get("force", False) - - try: - user = UserProfile.objects.get(email=email) - except ObjectDoesNotExist: - user = UserProfile( - username=username, - role=role, - email=email, - is_superuser=is_superuser, - ) - user.set_password(password) - user.save() - else: - if force: - user.username = username - user.role = role - user.is_superuser = is_superuser - user.set_password(password) - user.save() - self.stdout.write( - self.style.SUCCESS("Create user successfully %s" % user.id) - ) diff --git a/src/api-engine/api/migrations/0001_initial.py b/src/api-engine/api/migrations/0001_initial.py deleted file mode 100644 index fc151d931..000000000 --- a/src/api-engine/api/migrations/0001_initial.py +++ /dev/null @@ -1,1162 +0,0 @@ -# Generated by Django 4.2.16 on 2025-06-15 06:26 - -import api.models -import api.utils.common -from django.conf import settings -import django.contrib.auth.models -import django.contrib.postgres.fields -import django.core.validators -from django.db import migrations, models -import django.db.models.deletion -import django.utils.timezone - - -class Migration(migrations.Migration): - - initial = True - - dependencies = [ - ("auth", "0012_alter_user_first_name_max_length"), - ] - - operations = [ - migrations.CreateModel( - name="UserProfile", - fields=[ - ( - "password", - models.CharField(max_length=128, verbose_name="password"), - ), - ( - "last_login", - models.DateTimeField( - blank=True, null=True, verbose_name="last login" - ), - ), - ( - "is_superuser", - models.BooleanField( - default=False, - help_text="Designates that this user has all permissions without explicitly assigning them.", - verbose_name="superuser status", - ), - ), - ( - "first_name", - models.CharField( - blank=True, max_length=150, verbose_name="first name" - ), - ), - ( - "last_name", - models.CharField( - blank=True, max_length=150, verbose_name="last name" - ), - ), - ( - "is_staff", - models.BooleanField( - default=False, - help_text="Designates whether the user can log into this admin site.", - verbose_name="staff status", - ), - ), - ( - "is_active", - models.BooleanField( - default=True, - help_text="Designates whether this user should be treated as active. Unselect this instead of deleting accounts.", - verbose_name="active", - ), - ), - ( - "date_joined", - models.DateTimeField( - default=django.utils.timezone.now, - verbose_name="date joined", - ), - ), - ( - "id", - models.UUIDField( - default=api.utils.common.make_uuid, - help_text="ID of user", - primary_key=True, - serialize=False, - ), - ), - ( - "email", - models.EmailField( - db_index=True, max_length=254, unique=True - ), - ), - ( - "username", - models.CharField( - default="", help_text="Name of user", max_length=64 - ), - ), - ( - "role", - models.CharField( - choices=[ - ("admin", "Admin"), - ("operator", "Operator"), - ("user", "User"), - ], - default=2, - max_length=64, - ), - ), - ( - "groups", - models.ManyToManyField( - blank=True, - help_text="The groups this user belongs to. A user will get all permissions granted to each of their groups.", - related_name="user_set", - related_query_name="user", - to="auth.group", - verbose_name="groups", - ), - ), - ], - options={ - "verbose_name": "User Info", - "verbose_name_plural": "User Info", - "ordering": ["-date_joined"], - }, - managers=[ - ("objects", django.contrib.auth.models.UserManager()), - ], - ), - migrations.CreateModel( - name="Agent", - fields=[ - ( - "id", - models.UUIDField( - default=api.utils.common.make_uuid, - help_text="ID of agent", - primary_key=True, - serialize=False, - ), - ), - ( - "name", - models.CharField( - default="agent-0d4db641fedc458fa54e9f364e411698", - help_text="Agent name, can be generated automatically.", - max_length=64, - ), - ), - ( - "urls", - models.URLField( - blank=True, help_text="Agent URL", null=True - ), - ), - ( - "status", - models.CharField( - choices=[ - ("inactive", "Inactive"), - ("active", "Active"), - ], - default="active", - help_text="Status of agent", - max_length=10, - ), - ), - ( - "type", - models.CharField( - choices=[ - ("docker", "Docker"), - ("kubernetes", "Kubernetes"), - ], - default="docker", - help_text="Type of agent", - max_length=32, - ), - ), - ( - "config_file", - models.FileField( - blank=True, - help_text="Config file for agent", - max_length=256, - upload_to=api.models.get_agent_config_file_path, - ), - ), - ( - "created_at", - models.DateTimeField( - auto_now_add=True, help_text="Create time of agent" - ), - ), - ( - "free_ports", - django.contrib.postgres.fields.ArrayField( - base_field=models.IntegerField(blank=True), - help_text="Agent free ports.", - null=True, - size=None, - ), - ), - ], - options={ - "ordering": ("-created_at",), - }, - ), - migrations.CreateModel( - name="ChainCode", - fields=[ - ( - "id", - models.UUIDField( - default=api.utils.common.make_uuid, - editable=False, - help_text="ID of ChainCode", - primary_key=True, - serialize=False, - unique=True, - ), - ), - ( - "package_id", - models.CharField( - editable=False, - help_text="package_id of chainCode", - max_length=128, - unique=True, - ), - ), - ( - "label", - models.CharField( - help_text="label of chainCode", max_length=128 - ), - ), - ( - "creator", - models.CharField( - help_text="creator of chainCode", max_length=128 - ), - ), - ( - "language", - models.CharField( - help_text="language of chainCode", max_length=128 - ), - ), - ( - "description", - models.CharField( - blank=True, - help_text="description of chainCode", - max_length=128, - null=True, - ), - ), - ( - "create_ts", - models.DateTimeField( - auto_now_add=True, help_text="Create time of chainCode" - ), - ), - ], - ), - migrations.CreateModel( - name="FabricCA", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "admin_name", - models.CharField( - default="admin", - help_text="Admin username for ca server", - max_length=32, - ), - ), - ( - "admin_password", - models.CharField( - default="adminpw", - help_text="Admin password for ca server", - max_length=32, - ), - ), - ( - "hosts", - models.JSONField( - blank=True, - default=list, - help_text="Hosts for ca", - null=True, - ), - ), - ( - "type", - models.CharField( - choices=[("tls", "TLS"), ("signature", "Signature")], - default="signature", - help_text="Fabric ca server type", - max_length=32, - ), - ), - ], - ), - migrations.CreateModel( - name="FabricPeer", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "name", - models.CharField( - default="", - help_text="Name of peer node", - max_length=64, - ), - ), - ( - "gossip_use_leader_reflection", - models.BooleanField( - default=True, help_text="Gossip use leader reflection" - ), - ), - ( - "gossip_org_leader", - models.BooleanField( - default=False, help_text="Gossip org leader" - ), - ), - ( - "gossip_skip_handshake", - models.BooleanField( - default=True, help_text="Gossip skip handshake" - ), - ), - ( - "local_msp_id", - models.CharField( - default="", - help_text="Local msp id of peer node", - max_length=64, - ), - ), - ], - ), - migrations.CreateModel( - name="Govern", - fields=[ - ( - "id", - models.UUIDField( - default=api.utils.common.make_uuid, - help_text="ID of govern", - primary_key=True, - serialize=False, - ), - ), - ( - "name", - models.CharField( - default="", help_text="Name of govern", max_length=64 - ), - ), - ("created_at", models.DateTimeField(auto_now_add=True)), - ], - ), - migrations.CreateModel( - name="Network", - fields=[ - ( - "id", - models.UUIDField( - default=api.utils.common.make_uuid, - help_text="ID of network", - primary_key=True, - serialize=False, - ), - ), - ( - "name", - models.CharField( - default="netowrk-30858a0cf2f54ab7a5e16a94958758b5", - help_text="network name, can be generated automatically.", - max_length=64, - ), - ), - ( - "type", - models.CharField( - default="fabric", - help_text="Type of network, ['fabric']", - max_length=64, - ), - ), - ( - "version", - models.CharField( - default="", - help_text="\n Version of network.\n Fabric supported versions: ['1.4.2', '1.5']\n ", - max_length=64, - ), - ), - ( - "created_at", - models.DateTimeField( - auto_now_add=True, help_text="Create time of network" - ), - ), - ( - "consensus", - models.CharField( - default="raft", - help_text="Consensus of network", - max_length=128, - ), - ), - ( - "genesisblock", - models.TextField(help_text="genesis block", null=True), - ), - ( - "database", - models.CharField( - default="leveldb", - help_text="database of network", - max_length=128, - ), - ), - ], - options={ - "ordering": ("-created_at",), - }, - ), - migrations.CreateModel( - name="Node", - fields=[ - ( - "id", - models.UUIDField( - default=api.utils.common.make_uuid, - help_text="ID of node", - primary_key=True, - serialize=False, - ), - ), - ( - "name", - models.CharField( - default="", help_text="Node name", max_length=64 - ), - ), - ( - "type", - models.CharField( - help_text="\n Node type defined for network.\n Fabric available types: ['ca', 'orderer', 'peer']\n ", - max_length=64, - ), - ), - ( - "urls", - models.JSONField( - blank=True, - default=dict, - help_text="URL configurations for node", - null=True, - ), - ), - ( - "created_at", - models.DateTimeField( - auto_now_add=True, help_text="Create time of network" - ), - ), - ( - "status", - models.CharField( - choices=[ - ("created", "Created"), - ("restarting", "Restarting"), - ("running", "Running"), - ("removing", "Removing"), - ("paused", "Paused"), - ("exited", "Exited"), - ("dead", "Dead"), - ], - default="created", - help_text="Status of node", - max_length=64, - ), - ), - ( - "config_file", - models.TextField( - help_text="Config file of node", null=True - ), - ), - ("msp", models.TextField(help_text="msp of node", null=True)), - ("tls", models.TextField(help_text="tls of node", null=True)), - ( - "cid", - models.CharField( - default="", - help_text="id used in agent, such as container id", - max_length=256, - ), - ), - ( - "agent", - models.ForeignKey( - help_text="Agent of node", - null=True, - on_delete=django.db.models.deletion.CASCADE, - related_name="node", - to="api.agent", - ), - ), - ], - options={ - "ordering": ("-created_at",), - }, - ), - migrations.CreateModel( - name="NodeUser", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "name", - models.CharField( - default="", - help_text="User name of node", - max_length=64, - ), - ), - ( - "secret", - models.CharField( - default="", - help_text="User secret of node", - max_length=64, - ), - ), - ( - "user_type", - models.CharField( - choices=[ - ("peer", "Peer"), - ("orderer", "Orderer"), - ("user", "User"), - ], - default="peer", - help_text="User type of node", - max_length=64, - ), - ), - ( - "status", - models.CharField( - choices=[ - ("registering", "Registering"), - ("registered", "Registered"), - ("fail", "Fail"), - ], - default="registering", - help_text="Status of node user", - max_length=32, - ), - ), - ( - "attrs", - models.CharField( - default="", - help_text="Attributes of node user", - max_length=512, - ), - ), - ( - "node", - models.ForeignKey( - help_text="Node of user", - null=True, - on_delete=django.db.models.deletion.CASCADE, - to="api.node", - ), - ), - ], - options={ - "ordering": ("id",), - }, - ), - migrations.CreateModel( - name="Organization", - fields=[ - ( - "id", - models.UUIDField( - default=api.utils.common.make_uuid, - help_text="ID of organization", - primary_key=True, - serialize=False, - ), - ), - ( - "name", - models.CharField( - default="", - help_text="Name of organization", - max_length=64, - ), - ), - ("created_at", models.DateTimeField(auto_now_add=True)), - ( - "msp", - models.TextField( - help_text="msp of organization", null=True - ), - ), - ( - "tls", - models.TextField( - help_text="tls of organization", null=True - ), - ), - ( - "agents", - models.CharField( - default="", - help_text="agent of organization", - max_length=128, - ), - ), - ( - "network", - models.ForeignKey( - help_text="Network to which the organization belongs", - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="organization", - to="api.network", - ), - ), - ], - options={ - "ordering": ("-created_at",), - }, - ), - migrations.CreateModel( - name="PeerCa", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "address", - models.CharField( - default="", - help_text="Node Address of ca", - max_length=128, - ), - ), - ( - "certificate", - models.FileField( - blank=True, - help_text="Certificate file for ca node.", - max_length=256, - null=True, - upload_to=api.models.get_ca_certificate_path, - ), - ), - ( - "type", - models.CharField( - choices=[("tls", "TLS"), ("signature", "Signature")], - default="signature", - help_text="Type of ca node for peer", - max_length=64, - ), - ), - ( - "node", - models.ForeignKey( - help_text="CA node of peer", - null=True, - on_delete=django.db.models.deletion.CASCADE, - to="api.node", - ), - ), - ( - "peer", - models.ForeignKey( - help_text="Peer node", - null=True, - on_delete=django.db.models.deletion.CASCADE, - to="api.fabricpeer", - ), - ), - ], - ), - migrations.CreateModel( - name="User", - fields=[ - ( - "id", - models.UUIDField( - default=api.utils.common.make_uuid, - help_text="ID of user", - primary_key=True, - serialize=False, - ), - ), - ( - "name", - models.CharField(help_text="user name", max_length=128), - ), - ( - "roles", - models.CharField( - help_text="roles of user", max_length=128 - ), - ), - ( - "attributes", - models.CharField( - help_text="attributes of user", max_length=128 - ), - ), - ( - "revoked", - models.CharField( - help_text="revoked of user", max_length=128 - ), - ), - ( - "create_ts", - models.DateTimeField( - auto_now_add=True, help_text="Create time of user" - ), - ), - ("msp", models.TextField(help_text="msp of user", null=True)), - ("tls", models.TextField(help_text="tls of user", null=True)), - ( - "organization", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - to="api.organization", - ), - ), - ], - ), - migrations.CreateModel( - name="Port", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "external", - models.IntegerField( - default=0, - help_text="External port", - validators=[ - django.core.validators.MinValueValidator(1), - django.core.validators.MaxValueValidator(65535), - ], - ), - ), - ( - "internal", - models.IntegerField( - default=0, - help_text="Internal port", - validators=[ - django.core.validators.MinValueValidator(1), - django.core.validators.MaxValueValidator(65535), - ], - ), - ), - ( - "node", - models.ForeignKey( - help_text="Node of port", - null=True, - on_delete=django.db.models.deletion.CASCADE, - related_name="port", - to="api.node", - ), - ), - ], - options={ - "ordering": ("external",), - }, - ), - migrations.CreateModel( - name="PeerCaUser", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "username", - models.CharField( - default="", - help_text="If user not set, set username/password", - max_length=64, - ), - ), - ( - "password", - models.CharField( - default="", - help_text="If user not set, set username/password", - max_length=64, - ), - ), - ( - "type", - models.CharField( - choices=[ - ("peer", "Peer"), - ("orderer", "Orderer"), - ("user", "User"), - ], - default="user", - help_text="User type of ca", - max_length=64, - ), - ), - ( - "peer_ca", - models.ForeignKey( - help_text="Peer Ca configuration", - null=True, - on_delete=django.db.models.deletion.CASCADE, - to="api.peerca", - ), - ), - ( - "user", - models.ForeignKey( - help_text="User of ca node", - null=True, - on_delete=django.db.models.deletion.CASCADE, - to="api.nodeuser", - ), - ), - ], - ), - migrations.AddField( - model_name="node", - name="organization", - field=models.ForeignKey( - help_text="Organization of node", - null=True, - on_delete=django.db.models.deletion.CASCADE, - related_name="node", - to="api.organization", - ), - ), - migrations.AddField( - model_name="node", - name="user", - field=models.ForeignKey( - help_text="User of node", - null=True, - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL, - ), - ), - migrations.CreateModel( - name="KubernetesConfig", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "credential_type", - models.CharField( - choices=[ - ("cert_key", "CertKey"), - ("config", "Config"), - ("username_password", "UsernamePassword"), - ], - default="cert_key", - help_text="Credential type of k8s", - max_length=32, - ), - ), - ( - "enable_ssl", - models.BooleanField( - default=False, help_text="Whether enable ssl for api" - ), - ), - ( - "ssl_ca", - models.TextField( - blank=True, - default="", - help_text="Ca file content for ssl", - ), - ), - ( - "nfs_server", - models.CharField( - blank=True, - default="", - help_text="NFS server address for k8s", - max_length=256, - ), - ), - ( - "parameters", - models.JSONField( - blank=True, - default=dict, - help_text="Extra parameters for kubernetes", - null=True, - ), - ), - ( - "cert", - models.TextField( - blank=True, - default="", - help_text="Cert content for k8s", - ), - ), - ( - "key", - models.TextField( - blank=True, default="", help_text="Key content for k8s" - ), - ), - ( - "username", - models.CharField( - blank=True, - default="", - help_text="Username for k8s credential", - max_length=128, - ), - ), - ( - "password", - models.CharField( - blank=True, - default="", - help_text="Password for k8s credential", - max_length=128, - ), - ), - ( - "agent", - models.ForeignKey( - help_text="Agent of kubernetes config", - null=True, - on_delete=django.db.models.deletion.CASCADE, - to="api.agent", - ), - ), - ], - ), - migrations.CreateModel( - name="File", - fields=[ - ( - "id", - models.UUIDField( - default=api.utils.common.make_uuid, - help_text="ID of file", - primary_key=True, - serialize=False, - ), - ), - ( - "name", - models.CharField( - default="", help_text="File name", max_length=64 - ), - ), - ( - "file", - models.FileField( - blank=True, - help_text="File", - max_length=256, - upload_to=api.models.get_file_path, - ), - ), - ( - "created_at", - models.DateTimeField( - auto_now_add=True, help_text="Create time of agent" - ), - ), - ( - "type", - models.CharField( - choices=[("certificate", "Certificate")], - default="certificate", - help_text="File type", - max_length=32, - ), - ), - ( - "organization", - models.ForeignKey( - help_text="Organization of file", - null=True, - on_delete=django.db.models.deletion.CASCADE, - to="api.organization", - ), - ), - ], - options={ - "ordering": ("-created_at",), - }, - ), - migrations.CreateModel( - name="Channel", - fields=[ - ( - "id", - models.UUIDField( - default=api.utils.common.make_uuid, - editable=False, - help_text="ID of Channel", - primary_key=True, - serialize=False, - unique=True, - ), - ), - ( - "name", - models.CharField( - help_text="name of channel", max_length=128 - ), - ), - ( - "create_ts", - models.DateTimeField( - auto_now_add=True, help_text="Create time of Channel" - ), - ), - ( - "config", - models.JSONField( - blank=True, - default=dict, - help_text="Channel config", - null=True, - ), - ), - ( - "network", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - to="api.network", - ), - ), - ( - "orderers", - models.ManyToManyField( - help_text="Orderer list in the channel", to="api.node" - ), - ), - ( - "organizations", - models.ManyToManyField( - help_text="the organization of the channel", - related_name="channels", - to="api.organization", - ), - ), - ], - ), - migrations.AddField( - model_name="agent", - name="organization", - field=models.ForeignKey( - help_text="Organization of agent", - null=True, - on_delete=django.db.models.deletion.CASCADE, - related_name="agent", - to="api.organization", - ), - ), - migrations.AddField( - model_name="userprofile", - name="organization", - field=models.ForeignKey( - null=True, - on_delete=django.db.models.deletion.CASCADE, - related_name="users", - to="api.organization", - ), - ), - migrations.AddField( - model_name="userprofile", - name="user_permissions", - field=models.ManyToManyField( - blank=True, - help_text="Specific permissions for this user.", - related_name="user_set", - related_query_name="user", - to="auth.permission", - verbose_name="user permissions", - ), - ), - ] diff --git a/src/api-engine/api/migrations/0002_userprofile_created_at_alter_agent_name_and_more.py b/src/api-engine/api/migrations/0002_userprofile_created_at_alter_agent_name_and_more.py deleted file mode 100644 index 99c4d5cc7..000000000 --- a/src/api-engine/api/migrations/0002_userprofile_created_at_alter_agent_name_and_more.py +++ /dev/null @@ -1,49 +0,0 @@ -# Generated by Django 4.2.16 on 2025-06-25 04:20 - -from django.db import migrations, models -import django.utils.timezone - - -class Migration(migrations.Migration): - - dependencies = [ - ("api", "0001_initial"), - ] - - operations = [ - migrations.AddField( - model_name="userprofile", - name="created_at", - field=models.DateTimeField( - auto_now_add=True, default=django.utils.timezone.now - ), - preserve_default=False, - ), - migrations.AlterField( - model_name="agent", - name="name", - field=models.CharField( - blank=True, - help_text="Agent name, can be generated automatically.", - max_length=64, - unique=True, - ), - ), - migrations.AlterField( - model_name="network", - name="name", - field=models.CharField( - blank=True, - help_text="network name, can be generated automatically.", - max_length=64, - unique=True, - ), - ), - migrations.AlterField( - model_name="node", - name="name", - field=models.CharField( - blank=True, help_text="Node name", max_length=64, unique=True - ), - ), - ] diff --git a/src/api-engine/api/migrations/0003_alter_agent_name_alter_agent_urls.py b/src/api-engine/api/migrations/0003_alter_agent_name_alter_agent_urls.py deleted file mode 100644 index b417535dd..000000000 --- a/src/api-engine/api/migrations/0003_alter_agent_name_alter_agent_urls.py +++ /dev/null @@ -1,33 +0,0 @@ -# Generated by Django 4.2.16 on 2025-07-29 10:35 - -import api.validators -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("api", "0002_userprofile_created_at_alter_agent_name_and_more"), - ] - - operations = [ - migrations.AlterField( - model_name="agent", - name="name", - field=models.CharField( - blank=True, - help_text="Agent name, can be generated automatically.", - max_length=64, - ), - ), - migrations.AlterField( - model_name="agent", - name="urls", - field=models.CharField( - blank=True, - help_text="Agent URL", - null=True, - validators=[api.validators.validate_url], - ), - ), - ] diff --git a/src/api-engine/api/migrations/__init__.py b/src/api-engine/api/migrations/__init__.py deleted file mode 100644 index 0480730a5..000000000 --- a/src/api-engine/api/migrations/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# diff --git a/src/api-engine/api/models.py b/src/api-engine/api/models.py deleted file mode 100644 index 1c83d0477..000000000 --- a/src/api-engine/api/models.py +++ /dev/null @@ -1,842 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -import os -import shutil -import tarfile -from zipfile import ZipFile - -from django.conf import settings -from django.contrib.auth.models import AbstractUser -from django.core.exceptions import ValidationError -from django.core.validators import MaxValueValidator, MinValueValidator -from django.db import models -from django.dispatch import receiver -from django.db.models.signals import post_save -from django.contrib.postgres.fields import ArrayField - -from api.common.enums import ( - HostStatus, - HostType, - K8SCredentialType, - separate_upper_class, - NodeStatus, - FileType, - FabricCAServerType, - FabricCAUserType, - FabricCAUserStatus, -) -from api.common.enums import ( - UserRole, - NetworkType, - FabricNodeType, - FabricVersions, -) -from api.utils.common import make_uuid, random_name, hash_file -from api.config import CELLO_HOME -from api.validators import validate_url - -SUPER_USER_TOKEN = getattr(settings, "ADMIN_TOKEN", "") -MAX_CAPACITY = getattr(settings, "MAX_AGENT_CAPACITY", 100) -MAX_NODE_CAPACITY = getattr(settings, "MAX_NODE_CAPACITY", 600) -MEDIA_ROOT = getattr(settings, "MEDIA_ROOT") -LIMIT_K8S_CONFIG_FILE_MB = 100 -# Limit file upload size less than 100Mb -LIMIT_FILE_MB = 100 -MIN_PORT = 1 -MAX_PORT = 65535 - - -class Govern(models.Model): - id = models.UUIDField( - primary_key=True, - help_text="ID of govern", - default=make_uuid, - editable=True, - ) - name = models.CharField( - default="", max_length=64, help_text="Name of govern" - ) - created_at = models.DateTimeField(auto_now_add=True) - - -class Organization(models.Model): - id = models.UUIDField( - primary_key=True, - help_text="ID of organization", - default=make_uuid, - editable=True, - ) - name = models.CharField( - default="", max_length=64, help_text="Name of organization" - ) - created_at = models.DateTimeField(auto_now_add=True) - msp = models.TextField(help_text="msp of organization", null=True) - tls = models.TextField(help_text="tls of organization", null=True) - agents = models.CharField( - help_text="agent of organization", - max_length=128, - default="", - ) - network = models.ForeignKey( - "Network", - help_text="Network to which the organization belongs", - null=True, - related_name="organization", - on_delete=models.SET_NULL, - ) - # channel = models.ForeignKey( - # "Channel", - # help_text="channel to which the organization belongs", - # null=True, - # related_name="channel", - # on_delete=models.SET_NULL - # ) - - class Meta: - ordering = ("-created_at",) - - -class UserProfile(AbstractUser): - id = models.UUIDField( - primary_key=True, - help_text="ID of user", - default=make_uuid, - editable=True, - ) - email = models.EmailField(db_index=True, unique=True) - username = models.CharField( - default="", max_length=64, help_text="Name of user" - ) - role = models.CharField( - choices=UserRole.to_choices(True), - default=UserRole.User.value, - max_length=64, - ) - organization = models.ForeignKey( - Organization, - null=True, - on_delete=models.CASCADE, - related_name="users", - ) - created_at = models.DateTimeField(auto_now_add=True) - USERNAME_FIELD = "email" - REQUIRED_FIELDS = [] - - class Meta: - verbose_name = "User Info" - verbose_name_plural = verbose_name - ordering = ["-date_joined"] - - def __str__(self): - return self.username - - @property - def is_admin(self): - return self.role == UserRole.Admin.name.lower() - - @property - def is_operator(self): - return self.role == UserRole.Operator.name.lower() - - @property - def is_common_user(self): - return self.role == UserRole.User.name.lower() - - -def get_agent_config_file_path(instance, file): - file_ext = file.split(".")[-1] - filename = "%s.%s" % (hash_file(instance.config_file), file_ext) - - return os.path.join("config_files/%s" % str(instance.id), filename) - - -def validate_agent_config_file(file): - file_size = file.size - if file_size > LIMIT_K8S_CONFIG_FILE_MB * 1024 * 1024: - raise ValidationError( - "Max file size is %s MB" % LIMIT_K8S_CONFIG_FILE_MB - ) - - -class Agent(models.Model): - id = models.UUIDField( - primary_key=True, - help_text="ID of agent", - default=make_uuid, - editable=True, - ) - name = models.CharField( - help_text="Agent name, can be generated automatically.", - max_length=64, - blank=True - ) - urls = models.CharField( - help_text="Agent URL", null=True, blank=True, validators=[validate_url] - ) - organization = models.ForeignKey( - "Organization", - null=True, - on_delete=models.CASCADE, - help_text="Organization of agent", - related_name="agent", - ) - status = models.CharField( - help_text="Status of agent", - choices=HostStatus.to_choices(True), - max_length=10, - default=HostStatus.Active.name.lower(), - ) - type = models.CharField( - help_text="Type of agent", - choices=HostType.to_choices(True), - max_length=32, - default=HostType.Docker.name.lower(), - ) - config_file = models.FileField( - help_text="Config file for agent", - max_length=256, - blank=True, - upload_to=get_agent_config_file_path, - ) - created_at = models.DateTimeField( - help_text="Create time of agent", auto_now_add=True - ) - - # free_port = models.IntegerField( - # help_text="Agent free port.", - # default=30000, - # ) - free_ports = ArrayField( - models.IntegerField(blank=True), - help_text="Agent free ports.", - null=True, - ) - - def save(self, *args, **kwargs): - if not self.name: - self.name = random_name("agent") - super().save(*args, **kwargs) - - def delete(self, using=None, keep_parents=False): - if self.config_file: - if os.path.isfile(self.config_file.path): - os.remove(self.config_file.path) - shutil.rmtree( - os.path.dirname(self.config_file.path), ignore_errors=True - ) - - super(Agent, self).delete(using, keep_parents) - - class Meta: - ordering = ("-created_at",) - - -@receiver(post_save, sender=Agent) -def extract_file(sender, instance, created, *args, **kwargs): - if created: - if instance.config_file: - file_format = instance.config_file.name.split(".")[-1] - if file_format in ["tgz", "gz"]: - tar = tarfile.open(instance.config_file.path) - tar.extractall(path=os.path.dirname(instance.config_file.path)) - elif file_format == "zip": - with ZipFile(instance.config_file.path, "r") as zip_file: - zip_file.extractall( - path=os.path.dirname(instance.config_file.path) - ) - - -class KubernetesConfig(models.Model): - credential_type = models.CharField( - help_text="Credential type of k8s", - choices=K8SCredentialType.to_choices(separate_class_name=True), - max_length=32, - default=separate_upper_class(K8SCredentialType.CertKey.name), - ) - enable_ssl = models.BooleanField( - help_text="Whether enable ssl for api", default=False - ) - ssl_ca = models.TextField( - help_text="Ca file content for ssl", default="", blank=True - ) - nfs_server = models.CharField( - help_text="NFS server address for k8s", - default="", - max_length=256, - blank=True, - ) - parameters = models.JSONField( - help_text="Extra parameters for kubernetes", - default=dict, - null=True, - blank=True, - ) - cert = models.TextField( - help_text="Cert content for k8s", default="", blank=True - ) - key = models.TextField( - help_text="Key content for k8s", default="", blank=True - ) - username = models.CharField( - help_text="Username for k8s credential", - default="", - max_length=128, - blank=True, - ) - password = models.CharField( - help_text="Password for k8s credential", - default="", - max_length=128, - blank=True, - ) - agent = models.ForeignKey( - Agent, - help_text="Agent of kubernetes config", - on_delete=models.CASCADE, - null=True, - ) - - -class Network(models.Model): - id = models.UUIDField( - primary_key=True, - help_text="ID of network", - default=make_uuid, - editable=True, - ) - name = models.CharField( - help_text="network name, can be generated automatically.", - max_length=64, - unique=True, - blank=True, - ) - type = models.CharField( - help_text="Type of network, %s" % NetworkType.values(), - max_length=64, - default=NetworkType.Fabric.value, - ) - version = models.CharField( - help_text=""" - Version of network. - Fabric supported versions: %s - """ - % (FabricVersions.values()), - max_length=64, - default="", - ) - created_at = models.DateTimeField( - help_text="Create time of network", auto_now_add=True - ) - consensus = models.CharField( - help_text="Consensus of network", - max_length=128, - default="raft", - ) - genesisblock = models.TextField( - help_text="genesis block", - null=True, - ) - database = models.CharField( - help_text="database of network", - max_length=128, - default="leveldb", - ) - - class Meta: - ordering = ("-created_at",) - - def save(self, *args, **kwargs): - if not self.name: - self.name = random_name("network") - super().save(*args, **kwargs) - - -def get_compose_file_path(instance, file): - return os.path.join( - "org/%s/agent/docker/compose_files/%s" - % (str(instance.organization.id), str(instance.id)), - "docker-compose.yml", - ) - - -def get_ca_certificate_path(instance, file): - return os.path.join( - "fabric/ca/certificates/%s" % str(instance.id), file.name - ) - - -def get_node_file_path(instance, file): - """ - Get the file path where will be stored in - :param instance: database object of this db record - :param file: file object. - :return: path of file system which will store the file. - """ - file_ext = file.split(".")[-1] - filename = "%s.%s" % (hash_file(instance.file), file_ext) - - return os.path.join( - "files/%s/node/%s" % (str(instance.organization.id), str(instance.id)), - filename, - ) - - -class FabricCA(models.Model): - admin_name = models.CharField( - help_text="Admin username for ca server", - default="admin", - max_length=32, - ) - admin_password = models.CharField( - help_text="Admin password for ca server", - default="adminpw", - max_length=32, - ) - hosts = models.JSONField( - help_text="Hosts for ca", null=True, blank=True, default=list - ) - type = models.CharField( - help_text="Fabric ca server type", - default=FabricCAServerType.Signature.value, - choices=FabricCAServerType.to_choices(), - max_length=32, - ) - - -class PeerCaUser(models.Model): - user = models.ForeignKey( - "NodeUser", - help_text="User of ca node", - null=True, - on_delete=models.CASCADE, - ) - username = models.CharField( - help_text="If user not set, set username/password", - max_length=64, - default="", - ) - password = models.CharField( - help_text="If user not set, set username/password", - max_length=64, - default="", - ) - type = models.CharField( - help_text="User type of ca", - max_length=64, - choices=FabricCAUserType.to_choices(), - default=FabricCAUserType.User.value, - ) - peer_ca = models.ForeignKey( - "PeerCa", - help_text="Peer Ca configuration", - null=True, - on_delete=models.CASCADE, - ) - - -class PeerCa(models.Model): - node = models.ForeignKey( - "Node", - help_text="CA node of peer", - null=True, - on_delete=models.CASCADE, - ) - peer = models.ForeignKey( - "FabricPeer", - help_text="Peer node", - null=True, - on_delete=models.CASCADE, - ) - address = models.CharField( - help_text="Node Address of ca", default="", max_length=128 - ) - certificate = models.FileField( - help_text="Certificate file for ca node.", - max_length=256, - upload_to=get_ca_certificate_path, - blank=True, - null=True, - ) - type = models.CharField( - help_text="Type of ca node for peer", - choices=FabricCAServerType.to_choices(), - max_length=64, - default=FabricCAServerType.Signature.value, - ) - - -class FabricPeer(models.Model): - name = models.CharField( - help_text="Name of peer node", max_length=64, default="" - ) - gossip_use_leader_reflection = models.BooleanField( - help_text="Gossip use leader reflection", default=True - ) - gossip_org_leader = models.BooleanField( - help_text="Gossip org leader", default=False - ) - gossip_skip_handshake = models.BooleanField( - help_text="Gossip skip handshake", default=True - ) - local_msp_id = models.CharField( - help_text="Local msp id of peer node", max_length=64, default="" - ) - - -class Node(models.Model): - id = models.UUIDField( - primary_key=True, - help_text="ID of node", - default=make_uuid, - editable=True, - ) - name = models.CharField( - help_text="Node name", max_length=64, unique=True, blank=True - ) - type = models.CharField( - help_text=""" - Node type defined for network. - Fabric available types: %s - """ - % (FabricNodeType.names()), - max_length=64, - ) - urls = models.JSONField( - help_text="URL configurations for node", - null=True, - blank=True, - default=dict, - ) - user = models.ForeignKey( - UserProfile, - help_text="User of node", - null=True, - on_delete=models.CASCADE, - ) - organization = models.ForeignKey( - Organization, - help_text="Organization of node", - null=True, - related_name="node", - on_delete=models.CASCADE, - ) - agent = models.ForeignKey( - Agent, - help_text="Agent of node", - null=True, - related_name="node", - on_delete=models.CASCADE, - ) - # network = models.ForeignKey( - # Network, - # help_text="Network which node joined.", - # on_delete=models.CASCADE, - # null=True, - # ) - created_at = models.DateTimeField( - help_text="Create time of network", auto_now_add=True - ) - status = models.CharField( - help_text="Status of node", - choices=NodeStatus.to_choices(True), - max_length=64, - default=NodeStatus.Created.name.lower(), - ) - config_file = models.TextField( - help_text="Config file of node", - null=True, - ) - msp = models.TextField( - help_text="msp of node", - null=True, - ) - tls = models.TextField( - help_text="tls of node", - null=True, - ) - cid = models.CharField( - help_text="id used in agent, such as container id", - max_length=256, - default="", - ) - - class Meta: - ordering = ("-created_at",) - - def get_compose_file_path(self): - return "%s/org/%s/agent/docker/compose_files/%s/docker-compose.yml" % ( - MEDIA_ROOT, - str(self.organization.id), - str(self.id), - ) - - def save( - self, - force_insert=False, - force_update=False, - using=None, - update_fields=None, - ): - if not self.name: - self.name = random_name(self.type) - super(Node, self).save( - force_insert, force_update, using, update_fields - ) - - # def delete(self, using=None, keep_parents=False): - # if self.compose_file: - # compose_file_path = Path(self.compose_file.path) - # if os.path.isdir(os.path.dirname(compose_file_path)): - # shutil.rmtree(os.path.dirname(compose_file_path)) - # - # # remove related files of node - # if self.file: - # file_path = Path(self.file.path) - # if os.path.isdir(os.path.dirname(file_path)): - # shutil.rmtree(os.path.dirname(file_path)) - # - # if self.ca: - # self.ca.delete() - # - # super(Node, self).delete(using, keep_parents) - - -class NodeUser(models.Model): - name = models.CharField( - help_text="User name of node", max_length=64, default="" - ) - secret = models.CharField( - help_text="User secret of node", max_length=64, default="" - ) - user_type = models.CharField( - help_text="User type of node", - choices=FabricCAUserType.to_choices(), - default=FabricCAUserType.Peer.value, - max_length=64, - ) - node = models.ForeignKey( - Node, help_text="Node of user", on_delete=models.CASCADE, null=True - ) - status = models.CharField( - help_text="Status of node user", - choices=FabricCAUserStatus.to_choices(), - default=FabricCAUserStatus.Registering.value, - max_length=32, - ) - attrs = models.CharField( - help_text="Attributes of node user", default="", max_length=512 - ) - - class Meta: - ordering = ("id",) - - -class Port(models.Model): - node = models.ForeignKey( - Node, - help_text="Node of port", - on_delete=models.CASCADE, - null=True, - related_name="port", - ) - external = models.IntegerField( - help_text="External port", - default=0, - validators=[MinValueValidator(MIN_PORT), MaxValueValidator(MAX_PORT)], - ) - internal = models.IntegerField( - help_text="Internal port", - default=0, - validators=[MinValueValidator(MIN_PORT), MaxValueValidator(MAX_PORT)], - ) - - class Meta: - ordering = ("external",) - - -def get_file_path(instance, file): - """ - Get the file path where will be stored in - :param instance: database object of this db record - :param file: file object. - :return: path of file system which will store the file. - """ - file_ext = file.split(".")[-1] - filename = "%s.%s" % (hash_file(instance.file), file_ext) - - return os.path.join( - "files/%s/%s" % (str(instance.organization.id), str(instance.id)), - filename, - ) - - -def validate_file(file): - """ - Validate file of upload - :param file: file object - :return: raise exception if validate failed - """ - file_size = file.size - if file_size > LIMIT_FILE_MB * 1024 * 1024: - raise ValidationError("Max file size is %s MB" % LIMIT_FILE_MB) - - -class File(models.Model): - id = models.UUIDField( - primary_key=True, - help_text="ID of file", - default=make_uuid, - editable=True, - ) - organization = models.ForeignKey( - Organization, - help_text="Organization of file", - null=True, - on_delete=models.CASCADE, - ) - name = models.CharField(help_text="File name", max_length=64, default="") - file = models.FileField( - help_text="File", max_length=256, blank=True, upload_to=get_file_path - ) - created_at = models.DateTimeField( - help_text="Create time of agent", auto_now_add=True - ) - type = models.CharField( - choices=FileType.to_choices(True), - max_length=32, - help_text="File type", - default=FileType.Certificate.name.lower(), - ) - - class Meta: - ordering = ("-created_at",) - - class User(models.Model): - id = models.UUIDField( - primary_key=True, - help_text="ID of user", - default=make_uuid, - editable=True, - ) - name = models.CharField(help_text="user name", max_length=128) - roles = models.CharField(help_text="roles of user", max_length=128) - organization = models.ForeignKey( - "Organization", on_delete=models.CASCADE - ) - attributes = models.CharField( - help_text="attributes of user", max_length=128 - ) - revoked = models.CharField(help_text="revoked of user", max_length=128) - create_ts = models.DateTimeField( - help_text="Create time of user", auto_now_add=True - ) - msp = models.TextField( - help_text="msp of user", - null=True, - ) - tls = models.TextField( - help_text="tls of user", - null=True, - ) - - -class Channel(models.Model): - id = models.UUIDField( - primary_key=True, - help_text="ID of Channel", - default=make_uuid, - editable=False, - unique=True, - ) - name = models.CharField(help_text="name of channel", max_length=128) - organizations = models.ManyToManyField( - to="Organization", - help_text="the organization of the channel", - related_name="channels", - # on_delete=models.SET_NULL - ) - create_ts = models.DateTimeField( - help_text="Create time of Channel", auto_now_add=True - ) - network = models.ForeignKey("Network", on_delete=models.CASCADE) - orderers = models.ManyToManyField( - to="Node", - help_text="Orderer list in the channel", - ) - config = models.JSONField( - help_text="Channel config", - default=dict, - null=True, - blank=True, - ) - - def get_channel_config_path(self): - return "/var/www/server/" + self.name + "_config.block" - - def get_channel_artifacts_path(self, artifact): - return CELLO_HOME + "/" + self.network.name + "/" + artifact - - # class ChainCode(models.Model): - # id = models.UUIDField( - # primary_key=True, - # help_text="ID of chainCode", - # default=make_uuid, - # editable=False, - # unique=True - # ) - # name = models.CharField( - # help_text="ChainCode name", max_length=128 - # ) - # version = models.CharField( - # help_text="version of chainCode", max_length=128 - # ) - # creator = models.CharField( - # help_text="creator of chainCode", max_length=128 - # ) - # language = models.CharField( - # help_text="language of chainCode", max_length=128 - # ) - # channel = models.ManyToManyField("Channel") - # install_times = models.DateTimeField( - # help_text="Create time of install", auto_now_add=True - # ) - # instantiate_times = models.DateTimeField( - # help_text="Create time of instantiate", auto_now_add=True - # ) - # node = models.ManyToManyField("Node", related_name='node') - # status = models.CharField( - # help_text="status of chainCode", max_length=128 - # ) - - -class ChainCode(models.Model): - id = models.UUIDField( - primary_key=True, - help_text="ID of ChainCode", - default=make_uuid, - editable=False, - unique=True, - ) - package_id = models.CharField( - help_text="package_id of chainCode", - max_length=128, - editable=False, - unique=True, - ) - label = models.CharField(help_text="label of chainCode", max_length=128) - creator = models.CharField( - help_text="creator of chainCode", max_length=128 - ) - language = models.CharField( - help_text="language of chainCode", max_length=128 - ) - description = models.CharField( - help_text="description of chainCode", - max_length=128, - blank=True, - null=True, - ) - create_ts = models.DateTimeField( - help_text="Create time of chainCode", auto_now_add=True - ) diff --git a/src/api-engine/api/routes/__init__.py b/src/api-engine/api/routes/__init__.py deleted file mode 100644 index 0480730a5..000000000 --- a/src/api-engine/api/routes/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# diff --git a/src/api-engine/api/routes/agent/__init__.py b/src/api-engine/api/routes/agent/__init__.py deleted file mode 100644 index 0480730a5..000000000 --- a/src/api-engine/api/routes/agent/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# diff --git a/src/api-engine/api/routes/agent/serializers.py b/src/api-engine/api/routes/agent/serializers.py deleted file mode 100644 index 3c5039604..000000000 --- a/src/api-engine/api/routes/agent/serializers.py +++ /dev/null @@ -1,261 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -import logging - -from django.core.validators import ( - RegexValidator, - FileExtensionValidator, -) -from rest_framework import serializers - -from api.common.enums import ( - LogLevel, - K8SCredentialType, - separate_upper_class, -) -from api.common.serializers import PageQuerySerializer, ListResponseSerializer -from api.models import Agent, KubernetesConfig, validate_agent_config_file -from api.utils.common import to_form_paras - -LOG = logging.getLogger(__name__) - -NameHelpText = "Name of Agent" -WorkerApiHelpText = "API address of worker" -IDHelpText = "ID of Agent" -CapacityHelpText = "Capacity of Agent" - -NameMinLen = 4 -NameMaxLen = 36 -WorkerAPIMinLen = 6 -WorkerAPIMaxLen = 128 -CapacityMinValue = 1 - - -class AgentQuery(PageQuerySerializer, serializers.ModelSerializer): - class Meta: - model = Agent - fields = ("status", "name", "type", "page", "per_page", "organization") - - -class AgentIDSerializer(serializers.Serializer): - id = serializers.UUIDField(help_text=IDHelpText) - - -class K8SParameterSerializer(serializers.ModelSerializer): - parameters = serializers.DictField( - help_text="Extra parameters", required=False - ) - - class Meta: - model = KubernetesConfig - fields = ( - "credential_type", - "enable_ssl", - "ssl_ca", - "nfs_server", - "parameters", - "cert", - "key", - "username", - "password", - ) - extra_kwargs = { - "credential_type": {"required": True}, - "enable_ssl": {"required": True}, - "nfs_server": { - "validators": [ - RegexValidator( - regex="^\d{1,3}\.\d{1,3}\.\d{1,3}" - "\.\d{1,3}:(\/+\w{0,}){0,}$", - message="Enter a valid nfs url.", - ) - ] - }, - } - - def validate(self, attrs): - credential_type = attrs.get("credential_type") - if credential_type == separate_upper_class( - K8SCredentialType.CertKey.name - ): - cert = attrs.get("cert") - key = attrs.get("key") - if not cert or not key: - raise serializers.ValidationError("Need cert and key content") - else: - attrs["username"] = "" - attrs["password"] = "" - elif credential_type == separate_upper_class( - K8SCredentialType.UsernamePassword.name - ): - username = attrs.get("username") - password = attrs.get("password") - if not username or not password: - raise serializers.ValidationError("Need username and password") - else: - attrs["cert"] = "" - attrs["key"] = "" - elif credential_type == separate_upper_class( - K8SCredentialType.Config.name - ): - # TODO: Add config type validation - pass - - return attrs - - -class AgentCreateBody(serializers.ModelSerializer): - # organization = serializers.UUIDField(help_text=IDHelpText) - - def to_form_paras(self): - custom_paras = to_form_paras(self) - - return custom_paras - - class Meta: - model = Agent - fields = ( - "name", - "type", - "urls", - "config_file", - "organization", - ) - extra_kwargs = { - "type": {"required": True}, - "urls": {"required": True}, - "name": {"required": True}, - "organization": {"required": False}, - "config_file": { - "required": False, - "validators": [ - FileExtensionValidator( - allowed_extensions=["tgz", "gz", "zip"] - ), - validate_agent_config_file, - ], - }, - } - - def validate(self, attrs): - pass - return attrs - - -class AgentPatchBody(serializers.Serializer): - name = serializers.CharField( - min_length=NameMinLen, - max_length=NameMaxLen, - help_text=NameHelpText, - required=False, - allow_null=True, - ) - capacity = serializers.IntegerField( - min_value=CapacityMinValue, - required=False, - allow_null=True, - help_text=CapacityHelpText, - ) - log_level = serializers.ChoiceField( - choices=LogLevel.to_choices(), - required=False, - allow_null=True, - help_text=LogLevel.get_info("Log levels:"), - ) - - -class AgentUpdateBody(AgentIDSerializer, serializers.ModelSerializer): - # organization = serializers.UUIDField( - # help_text="Organization ID", required=True, allow_null=True - # ) - - class Meta: - model = Agent - fields = ( - "name", - "type", - "urls", - "organization", - ) - - -class AgentResponseSerializer(AgentIDSerializer, serializers.ModelSerializer): - organization = serializers.UUIDField( - help_text="Organization ID", required=True, allow_null=True - ) - - class Meta: - model = Agent - fields = ( - "id", - "name", - "status", - "created_at", - "type", - "urls", - "organization", - ) - extra_kwargs = { - "id": {"required": True}, - "name": {"required": True}, - "status": {"required": True}, - "created_at": {"required": True, "read_only": False}, - "type": {"required": True}, - "organization": {"required": True}, - "urls": {"required": True}, - } - - -class AgentInfoSerializer(AgentIDSerializer, serializers.ModelSerializer): - k8s_config = K8SParameterSerializer( - help_text="Config of agent which is for kubernetes", required=False - ) - organization_id = serializers.UUIDField( - help_text="Organization ID", required=False, allow_null=True - ) - - class Meta: - model = Agent - fields = ( - "id", - "name", - "url", - # "capacity", - # "node_capacity", - "status", - "created_at", - # "log_level", - "type", - # "schedulable", - "k8s_config", - "organization_id", - ) - extra_kwargs = { - "id": {"required": True}, - "name": {"required": True}, - "status": {"required": True}, - # "capacity": {"required": True}, - # "node_capacity": {"required": True}, - "created_at": {"required": True, "read_only": False}, - "type": {"required": True}, - # "log_level": {"required": True}, - # "schedulable": {"required": True}, - } - - -class AgentListResponse(ListResponseSerializer): - data = AgentResponseSerializer(many=True, help_text="Agents data") - - -class AgentApplySerializer(serializers.ModelSerializer): - class Meta: - model = Agent - fields = ( - "type", - # "capacity" - ) - extra_kwargs = { - "type": {"required": True}, - # "capacity": {"required": True}, - } diff --git a/src/api-engine/api/routes/agent/views.py b/src/api-engine/api/routes/agent/views.py deleted file mode 100644 index 6f5d06cbf..000000000 --- a/src/api-engine/api/routes/agent/views.py +++ /dev/null @@ -1,406 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -import logging - -from django.core.exceptions import ObjectDoesNotExist -from django.core.paginator import Paginator -from drf_yasg.utils import swagger_auto_schema -from rest_framework import viewsets, status -from rest_framework.decorators import action -from rest_framework.permissions import IsAuthenticated -from rest_framework.response import Response - -from api.common.enums import HostType -from api.exceptions import ( - ResourceNotFound, - ResourceExists, - CustomError, - NoResource, - ResourceInUse, -) -from api.models import Agent, KubernetesConfig -from api.routes.agent.serializers import ( - AgentQuery, - AgentListResponse, - AgentCreateBody, - AgentIDSerializer, - AgentPatchBody, - AgentUpdateBody, - AgentInfoSerializer, - AgentApplySerializer, -) -from api.utils.common import with_common_response -from api.common import ok, err - -LOG = logging.getLogger(__name__) - - -class AgentViewSet(viewsets.ViewSet): - """Class represents agent related operations.""" - - permission_classes = [ - IsAuthenticated, - ] - - @swagger_auto_schema( - query_serializer=AgentQuery, - responses=with_common_response( - with_common_response({status.HTTP_200_OK: AgentListResponse}) - ), - ) - def list(self, request): - """ - List Agents - - :param request: query parameter - :return: agent list - :rtype: list - """ - try: - serializer = AgentQuery(data=request.GET) - if serializer.is_valid(raise_exception=True): - page = serializer.validated_data.get("page") - per_page = serializer.validated_data.get("per_page") - agent_status = serializer.validated_data.get("status") - name = serializer.validated_data.get("name") - agent_type = serializer.validated_data.get("type") - organization = request.user.organization - - query_filters = {} - # if organization: - # if not request.user.is_operator: - # raise PermissionDenied() - # query_filters.update({"organization": organization}) - # else: - # org_name = ( - # request.user.organization.name - # if request.user.organization - # else "" - # ) - # if request.user.is_admin: - # query_filters.update({"organization__name": org_name}) - if name: - query_filters.update({"name__icontains": name}) - if agent_status: - query_filters.update({"status": agent_status}) - if agent_type: - query_filters.update({"type": agent_type}) - if organization: - query_filters.update({"organization": organization}) - - agents = Agent.objects.filter(**query_filters) - p = Paginator(agents, per_page) - agents = p.page(page) - # agents = [agent.__dict__ for agent in agents] - agent_list = [] - # for agent in agents: - # agent_dict = agent.__dict__ - # agent_list.append(agent_dict) - agent_list = [ - { - "id": agent.id, - "name": agent.name, - "status": agent.status, - "type": agent.type, - "urls": agent.urls, - "organization": ( - str(agent.organization.id) - if agent.organization - else None - ), - "created_at": agent.created_at, - } - for agent in agents - ] - - response = AgentListResponse( - data={"data": agent_list, "total": p.count} - ) - if response.is_valid(raise_exception=True): - return Response( - ok(response.validated_data), status=status.HTTP_200_OK - ) - except Exception as e: - return Response(err(e.args), status=status.HTTP_400_BAD_REQUEST) - - @swagger_auto_schema( - request_body=AgentCreateBody, - responses=with_common_response( - {status.HTTP_201_CREATED: AgentIDSerializer} - ), - ) - def create(self, request): - """ - Create Agent - - :param request: create parameter - :return: agent ID - :rtype: uuid - """ - try: - serializer = AgentCreateBody(data=request.data) - if serializer.is_valid(raise_exception=True): - name = serializer.validated_data.get("name") - agent_type = serializer.validated_data.get("type") - urls = serializer.validated_data.get("urls") - config_file = serializer.validated_data.get("config_file") - - body = { - "type": agent_type, - "urls": urls, - "name": name, - } - - if name: - agent_count = Agent.objects.filter(name=name).count() - if agent_count > 0: - raise ResourceExists("Agent Exists") - - body.update({"name": name}) - - if config_file is not None: - body.update({"config_file": config_file}) - - org = request.user.organization - if org.agent.all(): - raise ResourceExists("Agent Exists for the Organization") - else: - body.update({"organization": org}) - - agent = Agent(**body) - agent.save() - - response = AgentIDSerializer(data=agent.__dict__) - if response.is_valid(raise_exception=True): - return Response( - ok(response.validated_data), - status=status.HTTP_201_CREATED, - ) - except ResourceExists as e: - raise e - except Exception as e: - return Response(err(e.args), status=status.HTTP_400_BAD_REQUEST) - - @swagger_auto_schema( - responses=with_common_response( - {status.HTTP_200_OK: AgentInfoSerializer} - ) - ) - def retrieve(self, request, pk=None): - """ - Retrieve agent - - :param request: destory parameter - :param pk: primary key - :return: none - :rtype: rest_framework.status - """ - try: - if request.user.is_operator: - agent = Agent.objects.get(id=pk) - else: - agent = Agent.objects.get( - id=pk, organization=request.user.organization - ) - k8s_config = None - if agent.type == HostType.Kubernetes.name.lower(): - k8s_config = KubernetesConfig.objects.get(agent=agent) - except ObjectDoesNotExist: - raise ResourceNotFound("Agent not found") - else: - data = agent.__dict__ - if k8s_config: - data.update({"k8s_config": k8s_config.__dict__}) - response = AgentInfoSerializer(data=data) - if response.is_valid(raise_exception=True): - return Response( - ok(response.validated_data), status=status.HTTP_200_OK - ) - - @swagger_auto_schema( - request_body=AgentUpdateBody, - responses=with_common_response({status.HTTP_202_ACCEPTED: "Accepted"}), - ) - def update(self, request, pk=None): - """ - Update Agent - - Update special agent with id. - """ - try: - serializer = AgentUpdateBody(data=request.data) - if serializer.is_valid(raise_exception=True): - name = serializer.validated_data.get("name") - # urls = serializer.validated_data.get("urls") - # organization = request.user.organization - try: - if Agent.objects.get(name=name): - raise ResourceExists("Agent Exists") - except ObjectDoesNotExist: - pass - Agent.objects.filter(id=pk).update(name=name) - - return Response(ok(None), status=status.HTTP_202_ACCEPTED) - except ResourceExists as e: - raise e - except Exception as e: - return Response(err(e.args), status=status.HTTP_400_BAD_REQUEST) - - @swagger_auto_schema( - request_body=AgentPatchBody, - responses=with_common_response({status.HTTP_202_ACCEPTED: "Accepted"}), - ) - def partial_update(self, request, pk=None): - """ - Partial Update Agent - - Partial update special agent with id. - """ - try: - serializer = AgentPatchBody(data=request.data) - if serializer.is_valid(raise_exception=True): - name = serializer.validated_data.get("name") - capacity = serializer.validated_data.get("capacity") - log_level = serializer.validated_data.get("log_level") - try: - agent = Agent.objects.get(id=pk) - except ObjectDoesNotExist: - raise ResourceNotFound("Agent not found") - else: - if name: - agent.name = name - if capacity: - agent.capacity = capacity - if log_level: - agent.log_level = log_level - agent.save() - - return Response(ok(None), status=status.HTTP_202_ACCEPTED) - except ResourceNotFound as e: - raise e - except Exception as e: - return Response(err(e.args), status=status.HTTP_400_BAD_REQUEST) - - @swagger_auto_schema( - responses=with_common_response( - { - status.HTTP_204_NO_CONTENT: "No Content", - status.HTTP_404_NOT_FOUND: "Not Found", - } - ) - ) - def destroy(self, request, pk=None): - """ - Delete agent - - :param request: destory parameter - :param pk: primary key - :return: none - :rtype: rest_framework.status - """ - try: - try: - if request.user.is_admin: - agent = Agent.objects.get(id=pk) - else: - raise CustomError("User can't delete agent!") - except ObjectDoesNotExist: - raise ResourceNotFound("Agent not found") - else: - if agent.node.count(): - raise ResourceInUse("Agent in use") - agent.delete() - - return Response(ok(None), status=status.HTTP_202_ACCEPTED) - except (ResourceNotFound, ResourceInUse) as e: - raise e - except Exception as e: - return Response(err(e.args), status=status.HTTP_400_BAD_REQUEST) - - @swagger_auto_schema( - method="post", - request_body=AgentApplySerializer, - responses=with_common_response( - {status.HTTP_200_OK: AgentIDSerializer} - ), - ) - @action(methods=["post"], detail=False, url_path="organization") - def apply(self, request): - """ - Apply Agent - - Apply Agent - """ - try: - serializer = AgentApplySerializer(data=request.data) - if serializer.is_valid(raise_exception=True): - agent_type = serializer.validated_data.get("type") - capacity = serializer.validated_data.get("capacity") - - if request.user.organization is None: - raise CustomError(detail="Need join in organization") - agent_count = Agent.objects.filter( - organization=request.user.organization - ).count() - if agent_count > 0: - raise CustomError(detail="Already applied agent.") - - agents = Agent.objects.filter( - organization__isnull=True, - type=agent_type, - capacity__gte=capacity, - schedulable=True, - ).order_by("capacity") - if len(agents) == 0: - raise NoResource("No Agent") - - agent = agents[0] - agent.organization = request.user.organization - agent.save() - - response = AgentIDSerializer(data=agent.__dict__) - if response.is_valid(raise_exception=True): - return Response( - ok(response.validated_data), status=status.HTTP_200_OK - ) - except NoResource as e: - raise e - except Exception as e: - LOG.exception("Agent Not Applied") - return Response(err(e.args), status=status.HTTP_400_BAD_REQUEST) - - @swagger_auto_schema( - method="delete", - responses=with_common_response( - {status.HTTP_204_NO_CONTENT: "No Content"} - ), - ) - @action(methods=["delete"], detail=True, url_path="organization") - def release(self, request, pk=None): - """ - Release Agent - - Release Agent - """ - try: - try: - if request.user.is_operator: - agent = Agent.objects.get(id=pk) - else: - if request.user.organization is None: - raise CustomError("Need join in organization") - agent = Agent.objects.get( - id=pk, organization=request.user.organization - ) - except ObjectDoesNotExist: - raise ResourceNotFound("Agent not found") - else: - agent.organization = None - agent.save() - - return Response(ok(None), status=status.HTTP_204_NO_CONTENT) - except ResourceNotFound as e: - raise e - except Exception as e: - return Response(err(e.args), status=status.HTTP_400_BAD_REQUEST) diff --git a/src/api-engine/api/routes/chaincode/__init__.py b/src/api-engine/api/routes/chaincode/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/api-engine/api/routes/chaincode/serializers.py b/src/api-engine/api/routes/chaincode/serializers.py deleted file mode 100644 index a2ff77004..000000000 --- a/src/api-engine/api/routes/chaincode/serializers.py +++ /dev/null @@ -1,85 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -from rest_framework import serializers -from api.config import FABRIC_CHAINCODE_STORE - -from api.models import ChainCode -from api.common.serializers import ListResponseSerializer -import os - - -def upload_to(instance, filename): - return "/".join([FABRIC_CHAINCODE_STORE, instance.user_name, filename]) - - -class ChainCodeIDSerializer(serializers.Serializer): - id = serializers.UUIDField(help_text="ChainCode ID") - - -class ChainCodePackageBody(serializers.Serializer): - file = serializers.FileField() - - description = serializers.CharField(max_length=128, required=False) - - def validate(self, attrs): - extension_get = self.extension_for_file(attrs["file"]) - if not extension_get: - raise serializers.ValidationError("unsupported package type") - return super().validate(attrs) - - @staticmethod - def extension_for_file(file): - extension = file.name.endswith(".tar.gz") - return extension - - -class ChainCodeNetworkSerializer(serializers.Serializer): - id = serializers.UUIDField(help_text="Network ID") - name = serializers.CharField(max_length=128, help_text="name of Network") - - -class ChainCodeOrgListSerializer(serializers.Serializer): - id = serializers.UUIDField(help_text="Organization ID") - name = serializers.CharField( - max_length=128, help_text="name of Organization" - ) - - -class ChainCodeResponseSerializer( - ChainCodeIDSerializer, serializers.ModelSerializer -): - id = serializers.UUIDField(help_text="ID of ChainCode") - # network = ChainCodeNetworkSerializer() - # organizations = ChainCodeOrgListSerializer(many=True) - - class Meta: - model = ChainCode - fields = ( - "id", - "package_id", - "label", - "creator", - "language", - "create_ts", - "description", - ) - - -class ChaincodeListResponse(ListResponseSerializer): - data = ChainCodeResponseSerializer(many=True, help_text="ChianCode data") - - -class ChainCodeApproveForMyOrgBody(serializers.Serializer): - channel_name = serializers.CharField(max_length=128, required=True) - chaincode_name = serializers.CharField(max_length=128, required=True) - chaincode_version = serializers.CharField(max_length=128, required=True) - sequence = serializers.IntegerField(min_value=1, required=True) - policy = serializers.CharField( - max_length=128, required=False, allow_blank=True - ) - init_flag = serializers.BooleanField(required=False) - - -class ChainCodeCommitBody(ChainCodeApproveForMyOrgBody): - pass diff --git a/src/api-engine/api/routes/chaincode/views.py b/src/api-engine/api/routes/chaincode/views.py deleted file mode 100644 index aafeef4d1..000000000 --- a/src/api-engine/api/routes/chaincode/views.py +++ /dev/null @@ -1,750 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -from rest_framework import viewsets, status -from rest_framework.response import Response -from rest_framework.decorators import action -from rest_framework.permissions import IsAuthenticated -import os -import tempfile -import shutil -import tarfile -import json - -from drf_yasg.utils import swagger_auto_schema -from api.config import FABRIC_CHAINCODE_STORE -from api.config import CELLO_HOME -from api.models import Node, ChainCode, Channel -from api.utils.common import make_uuid -from django.core.paginator import Paginator - -from api.lib.peer.chaincode import ChainCode as PeerChainCode -from api.common.serializers import PageQuerySerializer -from api.utils.common import with_common_response, init_env_vars -from api.exceptions import ResourceNotFound - -from api.routes.chaincode.serializers import ( - ChainCodePackageBody, - ChainCodeIDSerializer, - ChainCodeCommitBody, - ChainCodeApproveForMyOrgBody, - ChaincodeListResponse, -) -from api.common import ok, err -import threading -import hashlib -import logging - - -LOG = logging.getLogger(__name__) - - -class ChainCodeViewSet(viewsets.ViewSet): - """Class represents Channel related operations.""" - - permission_classes = [ - IsAuthenticated, - ] - - def _read_cc_pkg(self, pk, filename, ccpackage_path): - """ - read and extract chaincode package meta info - :pk: chaincode id - :filename: uploaded chaincode package filename - :ccpackage_path: chaincode package path - """ - try: - meta_path = os.path.join(ccpackage_path, "metadata.json") - # extract metadata file - with tarfile.open( - os.path.join(ccpackage_path, filename) - ) as tared_file: - metadata_file = None - for member in tared_file.getmembers(): - if member.name.endswith("metadata.json"): - metadata_file = member - break - - if metadata_file is not None: - # Extract the metadata file - metadata_content = ( - tared_file.extractfile(metadata_file) - .read() - .decode("utf-8") - ) - metadata = json.loads(metadata_content) - language = metadata["type"] - label = metadata["label"] - - if os.path.exists(meta_path): - os.remove(meta_path) - - chaincode = ChainCode.objects.get(id=pk) - chaincode.package_id = chaincode.package_id - chaincode.language = language - chaincode.label = label - chaincode.save() - - except Exception as e: - LOG.exception("Could not read Chaincode Package") - raise e - - @swagger_auto_schema( - query_serializer=PageQuerySerializer, - responses=with_common_response( - {status.HTTP_201_CREATED: ChaincodeListResponse} - ), - ) - def list(self, request): - """ - List Chaincodes - :param request: org_id - :return: chaincode list - :rtype: list - """ - serializer = PageQuerySerializer(data=request.GET) - if serializer.is_valid(raise_exception=True): - page = serializer.validated_data.get("page") - per_page = serializer.validated_data.get("per_page") - - try: - org = request.user.organization - chaincodes = ChainCode.objects.filter( - creator=org.name - ).order_by("create_ts") - p = Paginator(chaincodes, per_page) - chaincodes_pages = p.page(page) - chanincodes_list = [ - { - "id": chaincode.id, - "package_id": chaincode.package_id, - "label": chaincode.label, - "creator": chaincode.creator, - "language": chaincode.language, - "create_ts": chaincode.create_ts, - "description": chaincode.description, - } - for chaincode in chaincodes_pages - ] - response = ChaincodeListResponse( - {"data": chanincodes_list, "total": chaincodes.count()} - ) - return Response( - data=ok(response.data), status=status.HTTP_200_OK - ) - except Exception as e: - return Response( - err(e.args), status=status.HTTP_400_BAD_REQUEST - ) - - @swagger_auto_schema( - method="post", - query_serializer=PageQuerySerializer, - responses=with_common_response( - {status.HTTP_201_CREATED: ChainCodeIDSerializer} - ), - ) - @action(detail=False, methods=["post"], url_path="chaincodeRepo") - def package(self, request): - serializer = ChainCodePackageBody(data=request.data) - if serializer.is_valid(raise_exception=True): - file = serializer.validated_data.get("file") - description = serializer.validated_data.get("description") - uuid = make_uuid() - try: - fd, temp_cc_path = tempfile.mkstemp() - # try to calculate packageid - with open(fd, "wb") as f: - for chunk in file.chunks(): - f.write(chunk) - - with tarfile.open(temp_cc_path, "r:gz") as tar: - # Locate the metadata file - metadata_file = None - for member in tar.getmembers(): - if member.name.endswith("metadata.json"): - metadata_file = member - break - - if metadata_file is not None: - # Extract the metadata file - metadata_content = ( - tar.extractfile(metadata_file) - .read() - .decode("utf-8") - ) - metadata = json.loads(metadata_content) - label = metadata.get("label") - else: - return Response( - err( - "Metadata file not found in the chaincode package." - ), - status=status.HTTP_400_BAD_REQUEST, - ) - - org = request.user.organization - # qs = Node.objects.filter(type="peer", organization=org) - # if not qs.exists(): - # return Response( - # err("at least 1 peer node is required for the chaincode package upload."), - # status=status.HTTP_400_BAD_REQUEST - # ) - # peer_node = qs.first() - # envs = init_env_vars(peer_node, org) - # peer_channel_cli = PeerChainCode("v2.5.10", **envs) - # return_code, content = peer_channel_cli.lifecycle_calculatepackageid(temp_cc_path) - # if (return_code != 0): - # return Response( - # err("calculate packageid failed for {}.".format(content)), - # status=status.HTTP_400_BAD_REQUEST - # ) - # packageid = content.strip() - - # manually calculate the package id - sha256_hash = hashlib.sha256() - with open(temp_cc_path, "rb") as f: - for byte_block in iter(lambda: f.read(4096), b""): - sha256_hash.update(byte_block) - packageid = label + ":" + sha256_hash.hexdigest() - - # check if packageid exists - cc = ChainCode.objects.filter(package_id=packageid) - if cc.exists(): - return Response( - err( - "package with id {} already exists.".format( - packageid - ) - ), - status=status.HTTP_400_BAD_REQUEST, - ) - - chaincode = ChainCode( - id=uuid, - package_id=packageid, - creator=org.name, - description=description, - ) - chaincode.save() - - # save chaincode package locally - ccpackage_path = os.path.join( - FABRIC_CHAINCODE_STORE, packageid - ) - if not os.path.exists(ccpackage_path): - os.makedirs(ccpackage_path) - ccpackage = os.path.join(ccpackage_path, file.name) - shutil.copy(temp_cc_path, ccpackage) - - # start thread to read package meta info, update db - try: - threading.Thread( - target=self._read_cc_pkg, - args=(uuid, file.name, ccpackage_path), - ).start() - except Exception as e: - LOG.exception("Failed Threading") - raise e - - return Response(ok("success"), status=status.HTTP_200_OK) - except Exception as e: - return Response( - err(e.args), status=status.HTTP_400_BAD_REQUEST - ) - finally: - os.remove(temp_cc_path) - - @swagger_auto_schema( - method="post", - responses=with_common_response( - {status.HTTP_201_CREATED: ChainCodeIDSerializer} - ), - ) - @action(detail=False, methods=["post"]) - def install(self, request): - chaincode_id = request.data.get("id") - # Get the selected node ID from request - node_id = request.data.get("node") - try: - cc_targz = "" - file_path = os.path.join(FABRIC_CHAINCODE_STORE, chaincode_id) - for _, _, files in os.walk(file_path): - cc_targz = os.path.join(file_path + "/" + files[0]) - break - - org = request.user.organization - - # If node_id is provided, get that specific node - if node_id: - try: - peer_node = Node.objects.get( - id=node_id, type="peer", organization=org - ) - except Node.DoesNotExist: - return Response( - err("Selected peer node not found or not authorized."), - status=status.HTTP_404_NOT_FOUND, - ) - else: - # Fallback to first peer if no node selected - qs = Node.objects.filter(type="peer", organization=org) - if not qs.exists(): - raise ResourceNotFound - peer_node = qs.first() - - envs = init_env_vars(peer_node, org) - peer_channel_cli = PeerChainCode(**envs) - res = peer_channel_cli.lifecycle_install(cc_targz) - if res != 0: - return Response( - err("install chaincode failed."), - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - return Response(err(e.args), status=status.HTTP_400_BAD_REQUEST) - return Response(ok("success"), status=status.HTTP_200_OK) - - @swagger_auto_schema( - method="get", - responses=with_common_response( - {status.HTTP_201_CREATED: ChainCodeIDSerializer} - ), - ) - @action(detail=False, methods=["get"]) - def query_installed(self, request): - try: - org = request.user.organization - qs = Node.objects.filter(type="peer", organization=org) - if not qs.exists(): - raise ResourceNotFound("Peer Does Not Exist") - peer_node = qs.first() - envs = init_env_vars(peer_node, org) - - timeout = "5s" - peer_channel_cli = PeerChainCode(**envs) - res, installed_chaincodes = ( - peer_channel_cli.lifecycle_query_installed(timeout) - ) - if res != 0: - return Response( - err("query installed chaincode failed."), - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - return Response(err(e.args), status=status.HTTP_400_BAD_REQUEST) - return Response(ok(installed_chaincodes), status=status.HTTP_200_OK) - - @swagger_auto_schema( - method="get", - responses=with_common_response( - {status.HTTP_201_CREATED: ChainCodeIDSerializer} - ), - ) - @action(detail=False, methods=["get"]) - def get_installed_package(self, request): - try: - org = request.user.organization - qs = Node.objects.filter(type="peer", organization=org) - if not qs.exists(): - raise ResourceNotFound("Peer Does Not Exist") - peer_node = qs.first() - envs = init_env_vars(peer_node, org) - - timeout = "5s" - peer_channel_cli = PeerChainCode(**envs) - res = peer_channel_cli.lifecycle_get_installed_package(timeout) - if res != 0: - return Response( - err("get installed package failed."), - status=status.HTTP_400_BAD_REQUEST, - ) - - except Exception as e: - return Response(err(e.args), status=status.HTTP_400_BAD_REQUEST) - return Response(ok("success"), status=status.HTTP_200_OK) - - @swagger_auto_schema( - method="post", - responses=with_common_response( - {status.HTTP_201_CREATED: ChainCodeIDSerializer} - ), - ) - @action(detail=False, methods=["post"]) - def approve_for_my_org(self, request): - serializer = ChainCodeApproveForMyOrgBody(data=request.data) - if serializer.is_valid(raise_exception=True): - try: - channel_name = serializer.validated_data.get("channel_name") - chaincode_name = serializer.validated_data.get( - "chaincode_name" - ) - chaincode_version = serializer.validated_data.get( - "chaincode_version" - ) - policy = serializer.validated_data.get("policy", "") - sequence = serializer.validated_data.get("sequence") - init_flag = serializer.validated_data.get("init_flag", False) - - org = request.user.organization - qs = Node.objects.filter(type="orderer", organization=org) - if not qs.exists(): - raise ResourceNotFound("Orderer Does Not Exist") - orderer_node = qs.first() - orderer_url = ( - orderer_node.name - + "." - + org.name.split(".", 1)[1] - + ":" - + str(7050) - ) - - qs = Node.objects.filter(type="peer", organization=org) - if not qs.exists(): - raise ResourceNotFound("Peer Does Not Exist") - peer_node = qs.first() - envs = init_env_vars(peer_node, org) - - peer_channel_cli = PeerChainCode(**envs) - code, content = peer_channel_cli.lifecycle_approve_for_my_org( - orderer_url, - channel_name, - chaincode_name, - chaincode_version, - sequence, - policy, - init_flag, - ) - if code != 0: - return Response( - err( - " lifecycle_approve_for_my_org failed. err: " - + content - ), - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - return Response( - err(e.args), status=status.HTTP_400_BAD_REQUEST - ) - return Response(ok("success"), status=status.HTTP_200_OK) - - @swagger_auto_schema( - method="get", - responses=with_common_response( - {status.HTTP_201_CREATED: ChainCodeIDSerializer} - ), - ) - @action(detail=False, methods=["get"]) - def query_approved(self, request): - try: - org = request.user.organization - qs = Node.objects.filter(type="peer", organization=org) - if not qs.exists(): - raise ResourceNotFound("Peer Does Not Exist") - peer_node = qs.first() - envs = init_env_vars(peer_node, org) - - channel_name = request.data.get("channel_name") - cc_name = request.data.get("chaincode_name") - - peer_channel_cli = PeerChainCode(**envs) - code, content = peer_channel_cli.lifecycle_query_approved( - channel_name, cc_name - ) - if code != 0: - return Response( - err("query_approved failed."), - status=status.HTTP_400_BAD_REQUEST, - ) - - except Exception as e: - return Response(err(e.args), status=status.HTTP_400_BAD_REQUEST) - return Response(ok(content), status=status.HTTP_200_OK) - - @swagger_auto_schema( - method="post", - responses=with_common_response( - {status.HTTP_201_CREATED: ChainCodeIDSerializer} - ), - ) - @action(detail=False, methods=["post"]) - def check_commit_readiness(self, request): - serializer = ChainCodeApproveForMyOrgBody(data=request.data) - if serializer.is_valid(raise_exception=True): - try: - channel_name = serializer.validated_data.get("channel_name") - chaincode_name = serializer.validated_data.get( - "chaincode_name" - ) - chaincode_version = serializer.validated_data.get( - "chaincode_version" - ) - policy = serializer.validated_data.get("policy") - # Perhaps the orderer's port is best stored in the database - orderer_url = serializer.validated_data.get("orderer_url") - sequence = serializer.validated_data.get("sequence") - org = request.user.organization - qs = Node.objects.filter(type="orderer", organization=org) - if not qs.exists(): - raise ResourceNotFound("Orderer Does Not Exist") - orderer_node = qs.first() - - orderer_tls_dir = "{}/{}/crypto-config/ordererOrganizations/{}/orderers/{}/msp/tlscacerts".format( - CELLO_HOME, - org.name, - org.name.split(".", 1)[1], - orderer_node.name + "." + org.name.split(".", 1)[1], - ) - - orderer_tls_root_cert = "" - for _, _, files in os.walk(orderer_tls_dir): - orderer_tls_root_cert = orderer_tls_dir + "/" + files[0] - break - - qs = Node.objects.filter(type="peer", organization=org) - if not qs.exists(): - raise ResourceNotFound("Peer Does Not Exist") - peer_node = qs.first() - envs = init_env_vars(peer_node, org) - - peer_channel_cli = PeerChainCode(**envs) - code, content = ( - peer_channel_cli.lifecycle_check_commit_readiness( - orderer_url, - orderer_tls_root_cert, - channel_name, - chaincode_name, - chaincode_version, - policy, - sequence, - ) - ) - if code != 0: - return Response( - err("check_commit_readiness failed."), - status=status.HTTP_400_BAD_REQUEST, - ) - - except Exception as e: - return Response( - err(e.args), status=status.HTTP_400_BAD_REQUEST - ) - return Response(ok(content), status=status.HTTP_200_OK) - - def _get_orderer_url(self, org): - qs = Node.objects.filter(type="orderer", organization=org) - if not qs.exists(): - raise ResourceNotFound("Orderer Does Not Exist") - return ( - qs.first().name + "." + org.name.split(".", 1)[1] + ":" + str(7050) - ) - - def _get_peer_channel_cli(self, org): - qs = Node.objects.filter(type="peer", organization=org) - if not qs.exists(): - raise ResourceNotFound("Peer Does Not Exist") - envs = init_env_vars(qs.first(), org) - return PeerChainCode(**envs) - - def _get_approved_organizations_by_channel_and_chaincode( - self, - peer_channel_cli, - channel_name, - chaincode_name, - chaincode_version, - sequence, - ): - code, readiness_result = ( - peer_channel_cli.lifecycle_check_commit_readiness( - channel_name, chaincode_name, chaincode_version, sequence - ) - ) - if code != 0: - raise Exception( - f"Check commit readiness failed: {readiness_result}" - ) - - # Check approved status - approvals = readiness_result.get("approvals", {}) - approved_msps = [ - org_msp for org_msp, approved in approvals.items() if approved - ] - if not approved_msps: - raise Exception("No organizations have approved this chaincode") - - LOG.info(f"Approved organizations: {approved_msps}") - - try: - channel = Channel.objects.get(name=channel_name) - channel_orgs = channel.organizations.all() - except Channel.DoesNotExist: - raise Exception(f"Channel {channel_name} not found") - - # find the corresponding organization by MSP ID - # MSP ID format: Org1MSP, Org2MSP -> organization name format: org1.xxx, org2.xxx - approved_orgs = [] - for msp_id in approved_msps: - if msp_id.endswith("MSP"): - org_prefix = msp_id[ - :-3 - ].lower() # remove "MSP" and convert to lowercase - # find the corresponding organization in the channel - for channel_org in channel_orgs: - if channel_org.name.split(".")[0] == org_prefix: - approved_orgs.append(channel_org) - LOG.info( - f"Found approved organization: {channel_org.name} (MSP: {msp_id})" - ) - break - - if not approved_orgs: - raise Exception("No approved organizations found in this channel") - return approved_orgs - - def _get_peer_addresses_and_certs_by_organizations(self, orgs): - addresses = [] - certs = [] - for org in orgs: - qs = Node.objects.filter(type="peer", organization=org) - if not qs.exists(): - LOG.warning( - f"No peer nodes found for organization: {org.name}" - ) - continue - - # select the first peer node for each organization - peer = qs.first() - peer_tls_cert = "{}/{}/crypto-config/peerOrganizations/{}/peers/{}/tls/ca.crt".format( - CELLO_HOME, org.name, org.name, peer.name + "." + org.name - ) - peer_address = peer.name + "." + org.name + ":" + str(7051) - LOG.info(f"Added peer from org {org.name}: {peer_address}") - - addresses.append(peer_address) - certs.append(peer_tls_cert) - - if not addresses: - raise Exception("No peer nodes found for specified organizations") - return addresses, certs - - @swagger_auto_schema( - method="post", - responses=with_common_response( - {status.HTTP_201_CREATED: ChainCodeIDSerializer} - ), - ) - @action(detail=False, methods=["post"]) - def commit(self, request): - serializer = ChainCodeCommitBody(data=request.data) - if serializer.is_valid(raise_exception=True): - try: - channel_name = serializer.validated_data.get("channel_name") - chaincode_name = serializer.validated_data.get( - "chaincode_name" - ) - chaincode_version = serializer.validated_data.get( - "chaincode_version" - ) - policy = serializer.validated_data.get("policy") - sequence = serializer.validated_data.get("sequence") - init_flag = serializer.validated_data.get("init_flag", False) - org = request.user.organization - - orderer_url = self._get_orderer_url(org) - - # Step 1: Check commit readiness, find all approved organizations - peer_channel_cli = self._get_peer_channel_cli(org) - approved_organizations = ( - self._get_approved_organizations_by_channel_and_chaincode( - peer_channel_cli, - channel_name, - chaincode_name, - chaincode_version, - sequence, - ) - ) - - # Step 2: Get peer nodes and root certs - peer_address_list, peer_root_certs = ( - self._get_peer_addresses_and_certs_by_organizations( - approved_organizations - ) - ) - - # Step 3: Commit chaincode - code = peer_channel_cli.lifecycle_commit( - orderer_url, - channel_name, - chaincode_name, - chaincode_version, - sequence, - policy, - peer_address_list, - peer_root_certs, - init_flag, - ) - if code != 0: - return Response( - err("Commit chaincode failed"), - status=status.HTTP_400_BAD_REQUEST, - ) - - LOG.info(f"Chaincode {chaincode_name} committed successfully") - - # Step 4: Query committed chaincode - code, committed_result = ( - peer_channel_cli.lifecycle_query_committed( - channel_name, chaincode_name - ) - ) - if code == 0: - LOG.info(committed_result) - return Response( - ok(committed_result), status=status.HTTP_200_OK - ) - else: - return Response( - err("Query committed failed."), - status=status.HTTP_400_BAD_REQUEST, - ) - - except Exception as e: - LOG.error(f"Commit chaincode failed: {str(e)}") - return Response( - err(f"Commit chaincode failed: {str(e)}"), - status=status.HTTP_400_BAD_REQUEST, - ) - - @swagger_auto_schema( - method="get", - responses=with_common_response( - {status.HTTP_201_CREATED: ChainCodeIDSerializer} - ), - ) - @action(detail=False, methods=["get"]) - def query_committed(self, request): - try: - channel_name = request.data.get("channel_name") - chaincode_name = request.data.get("chaincode_name") - org = request.user.organization - qs = Node.objects.filter(type="peer", organization=org) - if not qs.exists(): - raise ResourceNotFound("Peer Does Not Exist") - peer_node = qs.first() - envs = init_env_vars(peer_node, org) - peer_channel_cli = PeerChainCode(**envs) - code, chaincodes_commited = ( - peer_channel_cli.lifecycle_query_committed( - channel_name, chaincode_name - ) - ) - if code != 0: - return Response( - err("query committed failed."), - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - LOG.exception("Could Not Commit Query") - return Response(err(e.args), status=status.HTTP_400_BAD_REQUEST) - return Response(ok(chaincodes_commited), status=status.HTTP_200_OK) diff --git a/src/api-engine/api/routes/channel/__init__.py b/src/api-engine/api/routes/channel/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/api-engine/api/routes/channel/serializers.py b/src/api-engine/api/routes/channel/serializers.py deleted file mode 100644 index e4a0d2b82..000000000 --- a/src/api-engine/api/routes/channel/serializers.py +++ /dev/null @@ -1,73 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -from rest_framework import serializers - -from api.models import Channel -from api.common.serializers import ListResponseSerializer - - -class ChannelCreateBody(serializers.Serializer): - name = serializers.CharField(max_length=128, required=True) - peers = serializers.ListField( - child=serializers.UUIDField(help_text="ID of Peer Nodes") - ) - orderers = serializers.ListField( - child=serializers.UUIDField(help_text="ID of Orderer Nodes") - ) - - def validate(self, attrs): - if len(attrs["peers"]) < 1: - raise serializers.ValidationError("Invalid peers") - if len(attrs["orderers"]) < 1: - raise serializers.ValidationError("Invalid orderers") - - return super().validate(attrs) - - -class ChannelIDSerializer(serializers.Serializer): - id = serializers.UUIDField(help_text="Channel ID") - - -ORG_CHOICES = ( - ("Application", "Application"), - ("Orderer", "Orderer"), -) - - -class ChannelUpdateSerializer(serializers.Serializer): - msp_id = serializers.CharField( - max_length=128, help_text="MSP ID of Organization" - ) - data = serializers.FileField(help_text="Channel config file") - org_type = serializers.ChoiceField( - help_text="Organization type", choices=ORG_CHOICES - ) - - -class ChannelOrgListSerializer(serializers.Serializer): - id = serializers.UUIDField(help_text="Organization ID") - name = serializers.CharField( - max_length=128, help_text="name of Organization" - ) - - -class ChannelNetworkSerializer(serializers.Serializer): - id = serializers.UUIDField(help_text="Network ID") - name = serializers.CharField(max_length=128, help_text="name of Network") - - -class ChannelResponseSerializer( - ChannelIDSerializer, serializers.ModelSerializer -): - id = serializers.UUIDField(help_text="ID of Channel") - network = ChannelNetworkSerializer() - organizations = ChannelOrgListSerializer(many=True) - - class Meta: - model = Channel - fields = ("id", "name", "network", "organizations", "create_ts") - - -class ChannelListResponse(ListResponseSerializer): - data = ChannelResponseSerializer(many=True, help_text="Channel data") diff --git a/src/api-engine/api/routes/channel/views.py b/src/api-engine/api/routes/channel/views.py deleted file mode 100644 index 6729c4805..000000000 --- a/src/api-engine/api/routes/channel/views.py +++ /dev/null @@ -1,613 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -from copy import deepcopy -import logging -import json - -from rest_framework import viewsets, status -from rest_framework.decorators import action -from rest_framework.response import Response -from rest_framework.parsers import MultiPartParser, FormParser, JSONParser -from rest_framework.permissions import IsAuthenticated - -from drf_yasg.utils import swagger_auto_schema - -from django.core.exceptions import ObjectDoesNotExist -from django.core.paginator import Paginator - -from api.config import CELLO_HOME -from api.common.serializers import PageQuerySerializer -from api.utils.common import ( - with_common_response, - parse_block_file, - to_dict, - json_filter, - json_add_anchor_peer, - json_create_envelope, - init_env_vars, -) -from api.lib.configtxgen import ConfigTX, ConfigTxGen -from api.lib.peer.channel import Channel as PeerChannel -from api.lib.configtxlator.configtxlator import ConfigTxLator -from api.exceptions import ResourceNotFound, NoResource -from api.models import ( - Channel, - Node, - Organization, -) -from api.routes.channel.serializers import ( - ChannelCreateBody, - ChannelIDSerializer, - ChannelListResponse, - ChannelResponseSerializer, - ChannelUpdateSerializer, -) - -from api.common import ok, err -from api.common.enums import ( - NodeStatus, - FabricNodeType, -) - -LOG = logging.getLogger(__name__) - -CFG_JSON = "cfg.json" -CFG_PB = "cfg.pb" -DELTA_PB = "delta.pb" -DELTA_JSON = "delta.json" -UPDATED_CFG_JSON = "update_cfg.json" -UPDATED_CFG_PB = "update_cfg.pb" -CFG_DELTA_ENV_JSON = "cfg_delta_env.json" -CFG_DELTA_ENV_PB = "cfg_delta_env.pb" - - -class ChannelViewSet(viewsets.ViewSet): - """Class represents Channel related operations.""" - - permission_classes = [ - IsAuthenticated, - ] - parser_classes = [MultiPartParser, FormParser, JSONParser] - - @swagger_auto_schema( - query_serializer=PageQuerySerializer, - responses=with_common_response( - {status.HTTP_201_CREATED: ChannelListResponse} - ), - ) - def list(self, request): - """ - List Channels - :param request: org_id - :return: channel list - :rtype: list - """ - serializer = PageQuerySerializer(data=request.GET) - if serializer.is_valid(raise_exception=True): - page = serializer.validated_data.get("page") - per_page = serializer.validated_data.get("per_page") - - try: - org = request.user.organization - channels = Channel.objects.filter(organizations=org).order_by( - "create_ts" - ) - p = Paginator(channels, per_page) - channels_pages = p.page(page) - channels_list = [ - { - "id": channel.id, - "name": channel.name, - "network": channel.network.__dict__, - "organizations": [ - {"id": org.id, "name": org.name} - for org in channel.organizations.all() - ], - "create_ts": channel.create_ts, - } - for channel in channels_pages - ] - response = ChannelListResponse( - {"data": channels_list, "total": channels.count()} - ) - return Response( - data=ok(response.data), status=status.HTTP_200_OK - ) - except Exception as e: - return Response( - err(e.args), status=status.HTTP_400_BAD_REQUEST - ) - - @swagger_auto_schema( - request_body=ChannelCreateBody, - responses=with_common_response( - {status.HTTP_201_CREATED: ChannelIDSerializer} - ), - ) - def create(self, request): - """ - Create Channel - :param request: create parameter - :return: Channel ID - :rtype: uuid - """ - - serializer = ChannelCreateBody(data=request.data) - if serializer.is_valid(raise_exception=True): - name = serializer.validated_data.get("name") - peers = serializer.validated_data.get("peers") - orderers = serializer.validated_data.get("orderers") - - try: - org = request.user.organization - orderer_nodes = Node.objects.filter(id__in=orderers) - peer_nodes = Node.objects.filter(id__in=peers) - - # validate if all nodes are running - validate_nodes(orderer_nodes) - validate_nodes(peer_nodes) - - # assemble transaction config - _orderers, _peers = assemble_transaction_config(org) - - ConfigTX(org.network.name).create( - name, org.network.consensus, _orderers, _peers - ) - ConfigTxGen(org.network.name).genesis( - profile=name, - channelid=name, - outputblock="{}.block".format(name), - ) - - # osnadmin channel join - ordering_node = Node.objects.get(id=orderers[0]) - osn_channel_join(name, ordering_node, org) - - # peer channel join - peer_channel_join(name, peers, org) - - # set anchor peer - anchor_peer = Node.objects.get(id=peers[0]) - set_anchor_peer(name, org, anchor_peer, ordering_node) - - # save channel to db - channel = Channel(name=name, network=org.network) - channel.save() - channel.organizations.add(org) - channel.orderers.add(ordering_node) - - # serialize and return channel id - response = ChannelIDSerializer(data=channel.__dict__) - if response.is_valid(raise_exception=True): - return Response( - ok(response.validated_data), - status=status.HTTP_201_CREATED, - ) - except Exception as e: - return Response( - err(e.args), status=status.HTTP_400_BAD_REQUEST - ) - - @swagger_auto_schema( - responses=with_common_response( - {status.HTTP_200_OK: ChannelResponseSerializer} - ), - ) - def retrieve(self, request, pk=None): - """ - Retrieve channel - :param request: retrieve parameter - :param pk: primary key - :return: none - :rtype: rest_framework.status - """ - try: - channel = Channel.objects.get(id=pk) - response = ChannelResponseSerializer(instance=channel) - return Response(ok(response.data), status=status.HTTP_200_OK) - - except ObjectDoesNotExist: - LOG.exception("channel not found") - raise ResourceNotFound - - @swagger_auto_schema( - request_body=ChannelUpdateSerializer, - responses=with_common_response({status.HTTP_202_ACCEPTED: "Accepted"}), - ) - def update(self, request, pk=None): - """ - Update channel - :param request: update parameters - :param pk: primary key - :return: none - :rtype: rest_framework.status - """ - serializer = ChannelUpdateSerializer(data=request.data) - if serializer.is_valid(raise_exception=True): - channel = Channel.objects.get(id=pk) - org = request.user.organization - try: - # Read uploaded file in cache without saving it on disk. - file = request.FILES.get("data").read() - json_data = file.decode("utf8").replace("'", '"') - data = json.loads(json_data) - msp_id = serializer.validated_data.get("msp_id") - org_type = serializer.validated_data.get("org_type") - # Validate uploaded config file - try: - config = data["config"]["channel_group"]["groups"][ - org_type - ]["groups"][msp_id] - except KeyError: - LOG.exception("config file not found") - raise ResourceNotFound - - try: - # Read current channel config from local disk - with open( - channel.get_channel_artifacts_path(CFG_JSON), - "r", - encoding="utf-8", - ) as f: - LOG.info("load current config success") - current_config = json.load(f) - except FileNotFoundError: - LOG.exception("current config file not found") - raise ResourceNotFound - - # Create a new org - new_org = Organization.objects.create( - name=org.name, - ) - LOG.info("new org created") - updated_config = deepcopy(current_config) - updated_config["channel_group"]["groups"]["Application"][ - "groups" - ][msp_id] = config - LOG.info("update config success", updated_config) - - # Update and save the config with new org - with open( - channel.get_channel_artifacts_path(UPDATED_CFG_JSON), - "w", - encoding="utf-8", - ) as f: - LOG.info("save updated config success") - json.dump(updated_config, f, sort_keys=False) - - # Encode it into pb. - ConfigTxLator().proto_encode( - input=channel.get_channel_artifacts_path(UPDATED_CFG_JSON), - type="common.Config", - output=channel.get_channel_artifacts_path(UPDATED_CFG_PB), - ) - LOG.info("encode config to pb success") - - # Calculate the config delta between pb files - ConfigTxLator().compute_update( - original=channel.get_channel_artifacts_path(CFG_PB), - updated=channel.get_channel_artifacts_path(UPDATED_CFG_PB), - channel_id=channel.name, - output=channel.get_channel_artifacts_path(DELTA_PB), - ) - LOG.info("compute config delta success") - # Decode the config delta pb into json - config_update = ConfigTxLator().proto_decode( - input=channel.get_channel_artifacts_path(DELTA_PB), - type="common.ConfigUpdate", - ) - LOG.info("decode config delta to json success") - # Wrap the config update as envelope - updated_config = { - "payload": { - "header": { - "channel_header": { - "channel_id": channel.name, - "type": 2, - } - }, - "data": {"config_update": to_dict(config_update)}, - } - } - with open( - channel.get_channel_artifacts_path(CFG_JSON), - "w", - encoding="utf-8", - ) as f: - LOG.info("save config to json success") - json.dump(updated_config, f, sort_keys=False) - - # Encode the config update envelope into pb - ConfigTxLator().proto_encode( - input=channel.get_channel_artifacts_path(CFG_JSON), - type="common.Envelope", - output=channel.get_channel_artifacts_path( - CFG_DELTA_ENV_PB - ), - ) - LOG.info("Encode the config update envelope success") - - # Peers to send the update transaction - nodes = Node.objects.filter( - organization=org, - type=FabricNodeType.Peer.name.lower(), - status=NodeStatus.Running.name.lower(), - ) - - for node in nodes: - dir_node = "{}/{}/crypto-config/peerOrganizations".format( - CELLO_HOME, org.name - ) - env = { - "FABRIC_CFG_PATH": "{}/{}/peers/{}/".format( - dir_node, org.name, node.name + "." + org.name - ), - } - cli = PeerChannel(**env) - cli.signconfigtx( - channel.get_channel_artifacts_path(CFG_DELTA_ENV_PB) - ) - LOG.info("Peers to send the update transaction success") - - # Save a new organization to db. - new_org.save() - LOG.info("new_org save success") - return Response(ok(None), status=status.HTTP_202_ACCEPTED) - except ObjectDoesNotExist: - LOG.exception("channel not found") - raise ResourceNotFound - - @swagger_auto_schema( - responses=with_common_response({status.HTTP_200_OK: "Accepted"}), - ) - @action(methods=["get"], detail=True, url_path="configs") - def get_channel_org_config(self, request, pk=None): - try: - org = request.user.organization - channel = Channel.objects.get(id=pk) - peer = Node.objects.filter( - organization=org, - type=FabricNodeType.Peer.name.lower(), - status=NodeStatus.Running.name.lower(), - ).first() - orderer = Node.objects.filter( - organization=org, - type=FabricNodeType.Orderer.name.lower(), - status=NodeStatus.Running.name.lower() - ).first() - - peer_channel_fetch(channel.name, org, peer, orderer) - - # Decode block to JSON - ConfigTxLator().proto_decode( - input=channel.get_channel_artifacts_path("config_block.pb"), - type="common.Block", - output=channel.get_channel_artifacts_path("config_block.json"), - ) - - # Get the config data from the block - json_filter( - input=channel.get_channel_artifacts_path("config_block.json"), - output=channel.get_channel_artifacts_path("config.json"), - expression=".data.data[0].payload.data.config" - ) - - # Prepare return data - with open(channel.get_channel_artifacts_path("config.json"), 'r', encoding='utf-8') as f: - data = { - "config": json.load(f), - "organization": org.name, - # TODO: create a method on Organization or Node to return msp_id - "msp_id": '{}'.format(org.name.split(".")[0].capitalize()) - } - return Response(data=data, status=status.HTTP_200_OK) - except ObjectDoesNotExist: - LOG.exception("channel org not found") - raise ResourceNotFound - - -def validate_nodes(nodes): - """ - validate if all nodes are running - :param nodes: list of nodes - :return: none - """ - for node in nodes: - if node.status != NodeStatus.Running.name.lower(): - raise NoResource("Node {} is not running".format(node.name)) - - -def assemble_transaction_config(org): - """ - Assemble transaction config for the channel. - :param org: Organization object. - :return: _orderers, _peers - """ - _orderers = [{"name": org.name, "hosts": []}] - _peers = [{"name": org.name, "hosts": []}] - nodes = Node.objects.filter(organization=org) - for node in nodes: - if node.type == "peer": - _peers[0]["hosts"].append({"name": node.name}) - elif node.type == "orderer": - _orderers[0]["hosts"].append({"name": node.name}) - - return _orderers, _peers - - -def osn_channel_join(name, ordering_node, org): - """ - Join ordering node to the channel. - :param ordering_node: Node object - :param org: Organization object. - :param channel_name: Name of the channel. - :return: none - """ - envs = init_env_vars(ordering_node, org) - peer_channel_cli = PeerChannel(**envs) - peer_channel_cli.create( - channel=name, - orderer_admin_url="{}.{}:{}".format( - ordering_node.name, org.name.split(".", 1)[1], str(7053) - ), - block_path="{}/{}/{}.block".format(CELLO_HOME, org.network.name, name), - ) - - -def peer_channel_join(name, peers, org): - """ - Join peer nodes to the channel. - :param peers: list of Node objects - :param org: Organization object. - :param channel_name: Name of the channel. - :return: none - """ - for i in range(len(peers)): - peer_node = Node.objects.get(id=peers[i]) - envs = init_env_vars(peer_node, org) - peer_channel_cli = PeerChannel(**envs) - peer_channel_cli.join( - block_path="{}/{}/{}.block".format( - CELLO_HOME, org.network.name, name - ) - ) - - -def set_anchor_peer(name, org, anchor_peer, ordering_node): - """ - Set anchor peer for the channel. - :param org: Organization object. - :param anchor_peer: Anchor peer node - :param ordering_node: Orderer node - :return: none - """ - org_msp = "{}".format(org.name.split(".", 1)[0].capitalize()) - channel_artifacts_path = "{}/{}".format(CELLO_HOME, org.network.name) - - # Fetch the channel block from the orderer - peer_channel_fetch(name, org, anchor_peer, ordering_node) - - # Decode block to JSON - ConfigTxLator().proto_decode( - input="{}/config_block.pb".format(channel_artifacts_path), - type="common.Block", - output="{}/config_block.json".format(channel_artifacts_path), - ) - - # Get the config data from the block - json_filter( - input="{}/config_block.json".format(channel_artifacts_path), - output="{}/config.json".format(channel_artifacts_path), - expression=".data.data[0].payload.data.config", - ) - - # add anchor peer config - anchor_peer_config = { - "AnchorPeers": { - "mod_policy": "Admins", - "value": { - "anchor_peers": [ - {"host": anchor_peer.name + "." + org.name, "port": 7051} - ] - }, - "version": 0, - } - } - - json_add_anchor_peer( - input="{}/config.json".format(channel_artifacts_path), - output="{}/modified_config.json".format(channel_artifacts_path), - anchor_peer_config=anchor_peer_config, - org_msp=org_msp, - ) - - ConfigTxLator().proto_encode( - input="{}/config.json".format(channel_artifacts_path), - type="common.Config", - output="{}/config.pb".format(channel_artifacts_path), - ) - - ConfigTxLator().proto_encode( - input="{}/modified_config.json".format(channel_artifacts_path), - type="common.Config", - output="{}/modified_config.pb".format(channel_artifacts_path), - ) - - ConfigTxLator().compute_update( - original="{}/config.pb".format(channel_artifacts_path), - updated="{}/modified_config.pb".format(channel_artifacts_path), - channel_id=name, - output="{}/config_update.pb".format(channel_artifacts_path), - ) - - ConfigTxLator().proto_decode( - input="{}/config_update.pb".format(channel_artifacts_path), - type="common.ConfigUpdate", - output="{}/config_update.json".format(channel_artifacts_path), - ) - - # Create config update envelope - json_create_envelope( - input="{}/config_update.json".format(channel_artifacts_path), - output="{}/config_update_in_envelope.json".format( - channel_artifacts_path - ), - channel=name, - ) - - ConfigTxLator().proto_encode( - input="{}/config_update_in_envelope.json".format( - channel_artifacts_path - ), - type="common.Envelope", - output="{}/config_update_in_envelope.pb".format( - channel_artifacts_path - ), - ) - - # Update the channel of anchor peer - peer_channel_update( - name, org, anchor_peer, ordering_node, channel_artifacts_path - ) - - -def peer_channel_fetch(name, org, anchor_peer, ordering_node): - """ - Fetch the channel block from the orderer. - :param anchor_peer: Anchor peer node - :param org: Organization object. - :param channel_name: Name of the channel. - :return: none - """ - PeerChannel(**{**init_env_vars(ordering_node, org), **init_env_vars(anchor_peer, org)}).fetch( - block_path="{}/{}/config_block.pb".format(CELLO_HOME, org.network.name), - channel=name, orderer_general_url="{}.{}:{}".format( - ordering_node.name, - org.name.split(".", 1)[1], - str(7050) - ) - ) - - -def peer_channel_update( - name, org, anchor_peer, ordering_node, channel_artifacts_path -): - """ - Update the channel. - :param anchor_peer: Anchor peer node - :param org: Organization object. - :param channel_name: Name of the channel. - :return: none - """ - envs = init_env_vars(anchor_peer, org) - peer_channel_cli = PeerChannel(**envs) - peer_channel_cli.update( - channel=name, - channel_tx="{}/config_update_in_envelope.pb".format( - channel_artifacts_path - ), - orderer_url="{}.{}:{}".format( - ordering_node.name, org.name.split(".", 1)[1], str(7050) - ), - ) diff --git a/src/api-engine/api/routes/cluster/__init__.py b/src/api-engine/api/routes/cluster/__init__.py deleted file mode 100644 index 0480730a5..000000000 --- a/src/api-engine/api/routes/cluster/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# diff --git a/src/api-engine/api/routes/cluster/serializers.py b/src/api-engine/api/routes/cluster/serializers.py deleted file mode 100644 index 4654a8afd..000000000 --- a/src/api-engine/api/routes/cluster/serializers.py +++ /dev/null @@ -1,82 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -from rest_framework import serializers - -from api.common.enums import NetworkType, ConsensusPlugin, Operation -from api.common.serializers import PageQuerySerializer - -NAME_MIN_LEN = 4 -NAME_MAX_LEN = 36 -NAME_HELP_TEXT = "Name of Cluster" -SIZE_MAX_VALUE = 6 -SIZE_MIN_VALUE = 2 - - -class ClusterQuery(PageQuerySerializer): - consensus_plugin = serializers.ChoiceField( - required=False, - allow_null=True, - help_text=ConsensusPlugin.get_info("Consensus Plugin:", list_str=True), - choices=ConsensusPlugin.to_choices(True), - ) - name = serializers.CharField( - required=False, - allow_null=True, - min_length=NAME_MIN_LEN, - max_length=NAME_MAX_LEN, - help_text=NAME_HELP_TEXT, - ) - host_id = serializers.CharField( - help_text="Host ID", required=False, allow_null=True - ) - network_type = serializers.ChoiceField( - required=False, - allow_null=True, - help_text=NetworkType.get_info("Network Types:", list_str=True), - choices=NetworkType.to_choices(), - ) - size = serializers.IntegerField( - required=False, - allow_null=True, - min_value=SIZE_MIN_VALUE, - max_value=SIZE_MAX_VALUE, - help_text="Size of cluster", - ) - - -class ClusterIDSerializer(serializers.Serializer): - id = serializers.CharField(help_text="ID of cluster") - - -class ClusterCreateBody(serializers.Serializer): - name = serializers.CharField( - min_length=NAME_MIN_LEN, - max_length=NAME_MAX_LEN, - help_text=NAME_HELP_TEXT, - ) - host_id = serializers.CharField(help_text="Host ID") - network_type = serializers.ChoiceField( - help_text=NetworkType.get_info("Network Types:", list_str=True), - choices=NetworkType.to_choices(), - ) - size = serializers.IntegerField( - min_value=SIZE_MIN_VALUE, - max_value=SIZE_MAX_VALUE, - help_text="Size of cluster", - ) - consensus_plugin = serializers.ChoiceField( - help_text=ConsensusPlugin.get_info("Consensus Plugin:", list_str=True), - choices=ConsensusPlugin.to_choices(True), - ) - - -class ClusterResponse(serializers.Serializer): - name = serializers.CharField() - - -class ClusterOperationSerializer(serializers.Serializer): - action = serializers.ChoiceField( - help_text=Operation.get_info("Operation for cluster:", list_str=True), - choices=Operation.to_choices(True), - ) diff --git a/src/api-engine/api/routes/cluster/views.py b/src/api-engine/api/routes/cluster/views.py deleted file mode 100644 index dde3f5804..000000000 --- a/src/api-engine/api/routes/cluster/views.py +++ /dev/null @@ -1,76 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -import logging - -from rest_framework import viewsets, status -from rest_framework.decorators import action -from rest_framework.response import Response -from drf_yasg.utils import swagger_auto_schema -from api.routes.network.serializers import NetworkListResponse -from api.utils.common import with_common_response -from api.routes.cluster.serializers import ( - ClusterQuery, - ClusterCreateBody, - ClusterIDSerializer, - ClusterOperationSerializer, -) - -LOG = logging.getLogger(__name__) - - -class ClusterViewSet(viewsets.ViewSet): - @swagger_auto_schema( - query_serializer=ClusterQuery, - responses=with_common_response( - with_common_response({status.HTTP_200_OK: NetworkListResponse}) - ), - ) - def list(self, request, *args, **kwargs): - """ - List Clusters - - Filter clusters with query parameters. - """ - return Response(data=[], status=status.HTTP_200_OK) - - @swagger_auto_schema( - request_body=ClusterCreateBody, - responses=with_common_response( - {status.HTTP_201_CREATED: ClusterIDSerializer} - ), - ) - def create(self, request): - """ - Create Cluster - - Create new cluster - """ - pass - - @swagger_auto_schema( - responses=with_common_response( - {status.HTTP_204_NO_CONTENT: "No Content"} - ) - ) - def destroy(self, request, pk=None): - """ - Delete Cluster - - Delete cluster - """ - pass - - @swagger_auto_schema( - methods=["post"], - query_serializer=ClusterOperationSerializer, - responses=with_common_response({status.HTTP_202_ACCEPTED: "Accepted"}), - ) - @action(methods=["post"], detail=True, url_path="operations") - def operate(self, request, pk=None): - """ - Operate Cluster - - Operate cluster start/stop/restart - """ - pass diff --git a/src/api-engine/api/routes/file/__init__.py b/src/api-engine/api/routes/file/__init__.py deleted file mode 100644 index 792d60054..000000000 --- a/src/api-engine/api/routes/file/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# diff --git a/src/api-engine/api/routes/file/serializers.py b/src/api-engine/api/routes/file/serializers.py deleted file mode 100644 index f33b0ddde..000000000 --- a/src/api-engine/api/routes/file/serializers.py +++ /dev/null @@ -1,79 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -import logging - -from django.core.validators import FileExtensionValidator -from rest_framework import serializers - -from api.common.serializers import PageQuerySerializer, ListResponseSerializer -from api.models import File, validate_file, random_name -from api.utils.common import to_form_paras - -LOG = logging.getLogger(__name__) - - -class FileQuerySerializer(PageQuerySerializer, serializers.ModelSerializer): - class Meta: - model = File - fields = ("name", "type", "page", "per_page", "organization") - - -class FileIDSerializer(serializers.ModelSerializer): - class Meta: - model = File - fields = ("id",) - extra_kwargs = {"id": {"validators": [], "read_only": False}} - - -class FileInfoSerializer(serializers.ModelSerializer): - url = serializers.URLField(help_text="File url for download") - organization = serializers.UUIDField( - required=True, read_only=False, help_text="Organization of file" - ) - - class Meta: - model = File - fields = ("id", "name", "type", "organization", "url", "created_at") - extra_kwargs = { - "id": {"validators": [], "read_only": False, "required": True}, - "name": {"required": True}, - "type": {"required": True}, - "url": {"required": True}, - "created_at": {"required": True}, - } - - -class FileListSerializer(ListResponseSerializer): - data = FileInfoSerializer(many=True, help_text="Files list data") - - -class FileCreateSerializer(serializers.ModelSerializer): - def to_form_paras(self): - custom_paras = to_form_paras(self) - - return custom_paras - - class Meta: - model = File - fields = ("name", "type", "file") - extra_kwargs = { - "type": {"required": True}, - "file": { - "required": True, - "validators": [ - FileExtensionValidator(allowed_extensions=["zip"]), - validate_file, - ], - }, - } - - def validate(self, attrs): - file_type = attrs.get("type") - name = attrs.get("name") - - if name is None: - name = random_name(file_type) - attrs["name"] = name - - return attrs diff --git a/src/api-engine/api/routes/file/views.py b/src/api-engine/api/routes/file/views.py deleted file mode 100644 index 8df4afb5f..000000000 --- a/src/api-engine/api/routes/file/views.py +++ /dev/null @@ -1,120 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -import logging - -from django.core.paginator import Paginator -from drf_yasg.utils import swagger_auto_schema -from django.core.exceptions import PermissionDenied -from rest_framework import viewsets, status -from rest_framework.permissions import IsAuthenticated -from rest_framework.response import Response - -from api.auth import IsOperatorAuthenticated, IsAdminAuthenticated -from api.exceptions import CustomError -from api.routes.file.serializers import ( - FileQuerySerializer, - FileListSerializer, - FileIDSerializer, - FileCreateSerializer, -) -from api.utils.common import any_of, with_common_response -from api.models import File - -LOG = logging.getLogger(__name__) - - -class FileViewSet(viewsets.ViewSet): - permission_classes = ( - IsAuthenticated, - any_of(IsAdminAuthenticated, IsOperatorAuthenticated), - ) - - @staticmethod - def _validate_organization(request): - if not request.user.is_operator and request.user.organization is None: - raise CustomError(detail="Need join in organization.") - - @swagger_auto_schema( - query_serializer=FileQuerySerializer, - responses=with_common_response( - {status.HTTP_200_OK: FileListSerializer} - ), - ) - def list(self, request): - """ - List Files - - Filter files with query parameters, - """ - serializer = FileQuerySerializer(data=request.GET) - if serializer.is_valid(raise_exception=True): - name = serializer.validated_data.get("name") - file_type = serializer.validated_data.get("type") - organization = serializer.validated_data.get("organization") - page = serializer.validated_data.get("page") - per_page = serializer.validated_data.get("per_page") - - if organization is not None and not request.user.is_operator: - raise PermissionDenied("Non-Operator Request Denied") - query_filter = {} - if name: - query_filter.update({"name__icontains": name}) - if file_type: - query_filter.update({"type": file_type}) - if organization: - query_filter.update({"organization": organization}) - - files = File.objects.filter(**query_filter) - p = Paginator(files, per_page) - files = p.page(page) - files = [ - { - "id": str(file.id), - "name": file.name, - "type": file.type, - "url": request.build_absolute_uri(file.file.url), - "organization": file.organization.id, - } - for file in files - ] - - response = FileListSerializer( - data={"data": files, "total": p.count} - ) - if response.is_valid(raise_exception=True): - return Response( - response.validated_data, status=status.HTTP_200_OK - ) - - @swagger_auto_schema( - request_body=FileCreateSerializer, - responses=with_common_response( - {status.HTTP_201_CREATED: FileIDSerializer} - ), - ) - def create(self, request): - """ - Create new file - - Create new file - """ - serializer = FileCreateSerializer(data=request.data) - if serializer.is_valid(raise_exception=True): - name = serializer.validated_data.get("name") - file_type = serializer.validated_data.get("type") - file = serializer.validated_data.get("file") - - self._validate_organization(request) - - file = File( - name=name, - type=file_type, - file=file, - organization=request.user.organization, - ) - file.save() - response = FileIDSerializer(data=file.__dict__) - if response.is_valid(raise_exception=True): - return Response( - response.validated_data, status=status.HTTP_201_CREATED - ) diff --git a/src/api-engine/api/routes/general/__init__.py b/src/api-engine/api/routes/general/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/api-engine/api/routes/general/serializers.py b/src/api-engine/api/routes/general/serializers.py deleted file mode 100644 index af9fcd6ce..000000000 --- a/src/api-engine/api/routes/general/serializers.py +++ /dev/null @@ -1,39 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 - -from api.routes.user.serializers import UserInfoSerializer -from rest_framework import serializers - - -class RegisterBody(serializers.Serializer): - orgName = serializers.CharField(help_text="name of Organization") - email = serializers.EmailField(help_text="email of user") - username = serializers.CharField( - help_text="name of Administrator", default="Administator" - ) - password = serializers.CharField( - help_text="password of Administrator", default="666666" - ) - - -class RegisterIDSerializer(serializers.Serializer): - id = serializers.UUIDField(help_text="ID of Organization") - - -class RegisterResponse(serializers.Serializer): - id = serializers.UUIDField(help_text="ID of Organization") - # msg = serializers.CharField(help_text="name of Organization") - - -class LoginBody(serializers.Serializer): - email = serializers.CharField(help_text="email of user") - password = serializers.CharField(help_text="password of user") - - -class LoginSuccessBody(serializers.Serializer): - token = serializers.CharField(help_text="access token") - user = UserInfoSerializer() - - -class TokenVerifyRequest(serializers.Serializer): - token = serializers.CharField(help_text="access token") diff --git a/src/api-engine/api/routes/general/views.py b/src/api-engine/api/routes/general/views.py deleted file mode 100644 index 8470e3fed..000000000 --- a/src/api-engine/api/routes/general/views.py +++ /dev/null @@ -1,176 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# - -from .serializers import ( - LoginBody, - LoginSuccessBody, - TokenVerifyRequest, -) -from api.config import CELLO_HOME -from api.common import ok, err -from api.utils import zip_dir -from api.lib.pki import CryptoGen, CryptoConfig - -from api.routes.general.serializers import ( - RegisterBody, - RegisterResponse, -) -from api.models import UserProfile, Organization -from rest_framework.response import Response -from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned -import logging -import base64 - -from django.contrib.auth import authenticate -from rest_framework import viewsets, status -from rest_framework_simplejwt.views import ( - TokenObtainPairView, - TokenVerifyView, -) -from rest_framework_simplejwt.tokens import ( - RefreshToken, - AccessToken, -) -from rest_framework_simplejwt.exceptions import TokenError - -LOG = logging.getLogger(__name__) - - -class RegisterViewSet(viewsets.ViewSet): - def create(self, request): - try: - serializer = RegisterBody(data=request.data) - if serializer.is_valid(raise_exception=True): - email = serializer.validated_data.get("email") - orgname = serializer.validated_data.get("orgName") - password = serializer.validated_data.get("password") - - try: - UserProfile.objects.get(email=email) - except ObjectDoesNotExist: - pass - except MultipleObjectsReturned: - return Response( - err("Email Aleady exists!"), - status=status.HTTP_409_CONFLICT, - ) - else: - return Response( - err("Email Aleady exists!"), - status=status.HTTP_409_CONFLICT, - ) - - try: - Organization.objects.get(name=orgname) - except ObjectDoesNotExist: - pass - except MultipleObjectsReturned: - return Response( - err("Orgnization already exists!"), - status=status.HTTP_409_CONFLICT, - ) - else: - return Response( - err("Orgnization already exists!"), - status=status.HTTP_409_CONFLICT, - ) - - CryptoConfig(orgname).create(0, 0) - CryptoGen(orgname).generate() - - organization = Organization(name=orgname) - organization.save() - - user = UserProfile( - username=email, - email=email, - role="admin", - organization=organization, - ) - user.set_password(password) - user.save() - - response = RegisterResponse(data={"id": organization.id}) - if response.is_valid(raise_exception=True): - return Response( - data=ok(response.validated_data), - status=status.HTTP_200_OK, - ) - except Exception as e: - return Response(err(e.args), status=status.HTTP_400_BAD_REQUEST) - - def _conversion_msp_tls(self, name): - """ - msp and tls from zip file to byte - - :param name: organization name - :return: msp, tls - :rtype: bytes - """ - try: - dir_org = "{}/{}/crypto-config/peerOrganizations/{}/".format( - CELLO_HOME, name, name - ) - - zip_dir("{}msp".format(dir_org), "{}msp.zip".format(dir_org)) - with open("{}msp.zip".format(dir_org), "rb") as f_msp: - msp = base64.b64encode(f_msp.read()) - - zip_dir("{}tlsca".format(dir_org), "{}tls.zip".format(dir_org)) - with open("{}tls.zip".format(dir_org), "rb") as f_tls: - tls = base64.b64encode(f_tls.read()) - except Exception as e: - LOG.exception("Conversion Failed") - raise e - - return msp, tls - - -class CelloTokenObtainPairView(TokenObtainPairView): - def post(self, request, *args, **kwargs): - serializer = LoginBody(data=request.data) - if serializer.is_valid(raise_exception=True): - user = authenticate( - request, - username=serializer.validated_data["email"], - password=serializer.validated_data["password"], - ) - if user is not None: - refresh = RefreshToken.for_user(user) - data = { - "token": str(refresh.access_token), - "user": user, - } - response = LoginSuccessBody(instance=data) - return Response( - data=ok(response.data), - status=200, - ) - return super().post(request, *args, **kwargs) - - -class CelloTokenVerifyView(TokenVerifyView): - def post(self, request, *args, **kwargs): - serializer = TokenVerifyRequest(data=request.data) - if serializer.is_valid(raise_exception=True): - try: - access_token = AccessToken( - token=serializer.validated_data["token"], - ) - user = UserProfile.objects.get(pk=access_token["user_id"]) - if user is not None: - data = { - "token": str(access_token.token), - "user": user, - } - response = LoginSuccessBody(instance=data) - return Response( - data=ok(response.data), - status=200, - ) - except TokenError: - LOG.exception("invalid token error") - return Response(data=err(msg="invalid token"), status=401) - - return super().post(request, *args, **kwargs) diff --git a/src/api-engine/api/routes/network/__init__.py b/src/api-engine/api/routes/network/__init__.py deleted file mode 100644 index 0480730a5..000000000 --- a/src/api-engine/api/routes/network/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# diff --git a/src/api-engine/api/routes/network/serializers.py b/src/api-engine/api/routes/network/serializers.py deleted file mode 100644 index 0d95a4594..000000000 --- a/src/api-engine/api/routes/network/serializers.py +++ /dev/null @@ -1,125 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -from rest_framework import serializers -from api.common.enums import ( - NetworkStatus, - NetworkOperation, - ChannelType, - FabricNodeType, -) -from api.models import Network - - -CHANNEL_NAME_MIN_LEN = 4 -CHANNEL_NAME_MAX_LEN = 36 - - -class NetworkQuery(serializers.Serializer): - page = serializers.IntegerField( - required=False, help_text="Page of filter", default=1, min_value=1 - ) - per_page = serializers.IntegerField( - required=False, - help_text="Per Page of filter", - default=10, - max_value=100, - ) - status = serializers.ChoiceField( - required=False, - help_text=NetworkStatus.get_info("Network Status:", list_str=True), - choices=NetworkStatus.to_choices(True), - ) - - class Meta: - model = Network - fields = ("page", "per_page", "name") - extra_kwargs = {"name": {"required": False}} - - -class NetworkIDSerializer(serializers.Serializer): - id = serializers.CharField(help_text="Network ID") - - -class NetworkResponse(NetworkIDSerializer): - id = serializers.UUIDField(help_text="ID of Network") - name = serializers.CharField(help_text="Name of Network") - created_at = serializers.DateTimeField(help_text="Network create time") - - class Meta: - model = Network - fields = ("id", "name", "created_at") - extra_kwargs = { - "name": {"required": True}, - "created_at": {"required": True, "read_only": False}, - "id": {"required": True, "read_only": False}, - } - - -class NetworkMemberSerializer(serializers.Serializer): - id = serializers.CharField(help_text="Network member id") - type = serializers.ChoiceField( - help_text=FabricNodeType.get_info("Node Types:", list_str=True), - choices=FabricNodeType.to_choices(True), - ) - url = serializers.CharField(help_text="URL of member") - - -class NetworkCreateBody(serializers.ModelSerializer): - - class Meta: - model = Network - fields = ("name", "consensus", "database") - extra_kwargs = { - "name": {"required": True}, - "consensus": {"required": True}, - "database": {"required": True}, - } - - -class NetworkMemberResponse(serializers.Serializer): - data = NetworkMemberSerializer(many=True) - - -class NetworkListResponse(serializers.Serializer): - total = serializers.IntegerField(help_text="Total number of networks") - data = NetworkResponse(many=True) - - -class NodesSerializer(serializers.ListField): - def __init__(self, *args, **kwargs): - super(NodesSerializer, self).__init__(*args, **kwargs) - self.help_text = "Nodes ID values" - - child = serializers.CharField( - help_text="Node ID value", min_length=1, max_length=64 - ) - - -class NetworkOperationBody(serializers.Serializer): - action = serializers.ChoiceField( - help_text=NetworkOperation.get_info( - "Network Operations:", list_str=True - ), - choices=NetworkOperation.to_choices(True), - ) - nodes = NodesSerializer() - - -class ChannelBody(serializers.Serializer): - name = serializers.CharField( - help_text="Channel Name", - min_length=CHANNEL_NAME_MIN_LEN, - max_length=CHANNEL_NAME_MAX_LEN, - ) - - -class ChannelCreateBody(ChannelBody): - type = serializers.ChoiceField( - help_text=ChannelType.get_info("Channel Types:", list_str=True), - choices=ChannelType.to_choices(True), - ) - - -class ChannelID(serializers.Serializer): - id = serializers.CharField(help_text="Channel ID") diff --git a/src/api-engine/api/routes/network/views.py b/src/api-engine/api/routes/network/views.py deleted file mode 100644 index a86b447a0..000000000 --- a/src/api-engine/api/routes/network/views.py +++ /dev/null @@ -1,299 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -import logging -import base64 -import shutil -import os - -from rest_framework import viewsets, status -from rest_framework.decorators import action -from rest_framework.response import Response -from rest_framework.permissions import IsAuthenticated -from drf_yasg.utils import swagger_auto_schema -from django.core.paginator import Paginator -from django.core.exceptions import ObjectDoesNotExist -from api.exceptions import ResourceNotFound, ResourceExists -from api.routes.network.serializers import ( - NetworkQuery, - NetworkListResponse, - NetworkMemberResponse, - NetworkCreateBody, - NetworkIDSerializer, -) -from api.utils.common import with_common_response -from api.lib.configtxgen import ConfigTX, ConfigTxGen -from api.models import Network, Node, Port -from api.config import CELLO_HOME -from api.utils import zip_file -from api.lib.agent import AgentHandler -from api.common import ok, err -import threading - -LOG = logging.getLogger(__name__) - - -class NetworkViewSet(viewsets.ViewSet): - permission_classes = [ - IsAuthenticated, - ] - - def _genesis2base64(self, network): - """ - convert genesis.block to Base64 - :param network: network id - :return: genesis block - :rtype: bytearray - """ - try: - dir_node = "{}/{}/".format(CELLO_HOME, network) - name = "genesis.block" - zname = "block.zip" - zip_file( - "{}{}".format(dir_node, name), "{}{}".format(dir_node, zname) - ) - with open("{}{}".format(dir_node, zname), "rb") as f_block: - block = base64.b64encode(f_block.read()) - return block - except Exception as e: - LOG.exception("Genesis to Base64 Failed") - raise e - - @swagger_auto_schema( - query_serializer=NetworkQuery, - responses=with_common_response( - with_common_response({status.HTTP_200_OK: NetworkListResponse}) - ), - ) - def list(self, request): - """ - List network - :param request: query parameter - :return: network list - :rtype: list - """ - try: - serializer = NetworkQuery(data=request.GET) - if serializer.is_valid(raise_exception=True): - page = serializer.validated_data.get("page", 1) - per_page = serializer.validated_data.get("page", 10) - org = request.user.organization - networks = org.network - if not networks: - return Response( - ok(data={"total": 0, "data": None}), - status=status.HTTP_200_OK, - ) - p = Paginator([networks], per_page) - networks = p.page(page) - networks = [ - { - "id": network.id, - "name": network.name, - "created_at": network.created_at, - } - for network in networks - ] - response = NetworkListResponse( - data={"total": p.count, "data": networks} - ) - if response.is_valid(raise_exception=True): - return Response( - ok(response.validated_data), status=status.HTTP_200_OK - ) - return Response( - ok(data={"total": 0, "data": None}), status=status.HTTP_200_OK - ) - except Exception as e: - return Response(err(e.args), status=status.HTTP_400_BAD_REQUEST) - - def _agent_params(self, pk): - """ - get node's params from db - :param node: node id - :return: info - """ - try: - node = Node.objects.get(id=pk) - org = node.organization - if org is None: - raise ResourceNotFound(detail="Organization Not Found") - network = org.network - if network is None: - raise ResourceNotFound(detail="Network Not Found") - agent = org.agent.get() - if agent is None: - raise ResourceNotFound(detail="Agent Not Found") - ports = Port.objects.filter(node=node) - if ports is None: - raise ResourceNotFound(detail="Port Not Found") - - info = {} - - org_name = ( - org.name if node.type == "peer" else org.name.split(".", 1)[1] - ) - # get info of node, e.g, tls, msp, config. - info["status"] = node.status - info["msp"] = node.msp - info["tls"] = node.tls - info["config_file"] = node.config_file - info["type"] = node.type - info["name"] = "{}.{}".format(node.name, org_name) - info["urls"] = agent.urls - info["network_type"] = network.type - info["agent_type"] = agent.type - info["ports"] = ports - return info - except Exception as e: - LOG.exception("Could Not Get Params") - raise e - - def _start_node(self, pk): - """ - start node from agent - :param node: node id - :return: null - """ - try: - node_qs = Node.objects.filter(id=pk) - infos = self._agent_params(pk) - agent = AgentHandler(infos) - cid = agent.create(infos) - if cid: - node_qs.update(cid=cid, status="running") - else: - raise ResourceNotFound(detail="Container Not Built") - except Exception as e: - LOG.exception("Node Not Started") - raise e - - @swagger_auto_schema( - request_body=NetworkCreateBody, - responses=with_common_response( - {status.HTTP_201_CREATED: NetworkIDSerializer} - ), - ) - def create(self, request): - """ - Create Network - :param request: create parameter - :return: organization ID - :rtype: uuid - """ - try: - serializer = NetworkCreateBody(data=request.data) - if serializer.is_valid(raise_exception=True): - name = serializer.validated_data.get("name") - consensus = serializer.validated_data.get("consensus") - database = serializer.validated_data.get("database") - - try: - if Network.objects.get(name=name): - raise ResourceExists(detail="Network exists") - except ObjectDoesNotExist: - pass - org = request.user.organization - if org.network: - raise ResourceExists( - detail="Network exists for the organization" - ) - - network = Network( - name=name, consensus=consensus, database=database - ) - network.save() - org.network = network - org.save() - nodes = Node.objects.filter(organization=org) - for node in nodes: - try: - threading.Thread( - target=self._start_node, args=(node.id,) - ).start() - except Exception as e: - LOG.exception("Network Not Created") - raise e - - response = NetworkIDSerializer(data=network.__dict__) - if response.is_valid(raise_exception=True): - return Response( - ok(response.validated_data), - status=status.HTTP_201_CREATED, - ) - except ResourceExists as e: - LOG.exception("Network Exists") - raise e - except Exception as e: - return Response(err(e.args), status=status.HTTP_400_BAD_REQUEST) - - @swagger_auto_schema(responses=with_common_response()) - def retrieve(self, request, pk=None): - """ - Get Network - Get network information - """ - pass - - @swagger_auto_schema( - responses=with_common_response( - {status.HTTP_202_ACCEPTED: "No Content"} - ) - ) - def destroy(self, request, pk=None): - """ - Delete Network - :param request: destory parameter - :param pk: primary key - :return: none - :rtype: rest_framework.status - """ - try: - network = Network.objects.get(pk=pk) - path = "{}/{}".format(CELLO_HOME, network.name) - if os.path.exists(path): - shutil.rmtree(path, True) - network.delete() - return Response(ok(None), status=status.HTTP_202_ACCEPTED) - - except Exception as e: - return Response(err(e.args), status=status.HTTP_400_BAD_REQUEST) - - @swagger_auto_schema( - methods=["get"], - responses=with_common_response( - {status.HTTP_200_OK: NetworkMemberResponse} - ), - ) - @swagger_auto_schema( - methods=["post"], - responses=with_common_response( - {status.HTTP_200_OK: NetworkMemberResponse} - ), - ) - @action(methods=["get", "post"], detail=True, url_path="peers") - def peers(self, request, pk=None): - """ - get: - Get Peers - Get peers of network. - post: - Add New Peer - Add peer into network - """ - pass - - @swagger_auto_schema( - methods=["delete"], - responses=with_common_response( - {status.HTTP_200_OK: NetworkMemberResponse} - ), - ) - @action(methods=["delete"], detail=True, url_path="peers/") - def delete_peer(self, request, pk=None, peer_id=None): - """ - delete: - Delete Peer - Delete peer in network - """ - pass diff --git a/src/api-engine/api/routes/node/__init__.py b/src/api-engine/api/routes/node/__init__.py deleted file mode 100644 index 0480730a5..000000000 --- a/src/api-engine/api/routes/node/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# diff --git a/src/api-engine/api/routes/node/serializers.py b/src/api-engine/api/routes/node/serializers.py deleted file mode 100644 index 067699dab..000000000 --- a/src/api-engine/api/routes/node/serializers.py +++ /dev/null @@ -1,397 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -import logging - -from rest_framework import serializers -from api.common.enums import ( - Operation, -) -from api.common.serializers import PageQuerySerializer -from api.models import ( - Node, - Port, - FabricCA, - NodeUser, - FabricPeer, - PeerCa, - PeerCaUser, -) - -LOG = logging.getLogger(__name__) - - -class PortSerializer(serializers.ModelSerializer): - class Meta: - model = Port - fields = ("external", "internal") - extra_kwargs = { - "external": {"required": True}, - "internal": {"required": True}, - } - - -class NodeQuery(PageQuerySerializer, serializers.ModelSerializer): - agent_id = serializers.UUIDField( - help_text="Agent ID, only operator can use this field", - required=False, - allow_null=True, - ) - - class Meta: - model = Node - fields = ( - "page", - "per_page", - "type", - "name", - "agent_id", - ) - extra_kwargs = {"type": {"required": False}} - - -class NodeIDSerializer(serializers.Serializer): - id = serializers.UUIDField(help_text="ID of node") - - -class NodeCIDSerializer(serializers.Serializer): - id = serializers.CharField(help_text="containter ID of node") - - -class FabricCASerializer(serializers.ModelSerializer): - hosts = serializers.ListField( - help_text="Hosts for ca support", - child=serializers.CharField(help_text="Host name", max_length=64), - required=False, - allow_empty=True, - ) - - class Meta: - model = FabricCA - fields = ("admin_name", "admin_password", "hosts", "type") - - -class PeerCaUserSerializer(serializers.ModelSerializer): - class Meta: - model = PeerCaUser - fields = ("user", "username", "password", "type") - - def validate(self, attrs): - user = attrs.get("user") - username = attrs.get("username") - password = attrs.get("password") - user_type = attrs.get("type") - - if user is None and ( - username is None or password is None or user_type is None - ): - raise serializers.ValidationError( - "Input user or username,password,type" - ) - - if user is not None and ( - username is not None - or password is not None - or user_type is not None - ): - raise serializers.ValidationError( - "Input user or username,password,type" - ) - - return attrs - - -class PeerCaSerializer(serializers.ModelSerializer): - users = PeerCaUserSerializer( - help_text="Users of ca node, " - "can only set user or set username,password,type together", - many=True, - ) - - class Meta: - model = PeerCa - fields = ("node", "address", "certificate", "type", "users") - - def validate(self, attrs): - node = attrs.get("node") - address = attrs.get("address") - certificate = attrs.get("certificate") - ca_type = attrs.get("type") - - # check ether set node or set address,certificate,type together - if ( - node is None - and (address is None or certificate is None or ca_type is None) - ) or ( - node is not None - and ( - address is not None - or certificate is not None - or ca_type is not None - ) - ): - raise serializers.ValidationError( - "Input node or address,certificate" - ) - - return attrs - - -class FabricPeerSerializer(serializers.ModelSerializer): - ca_nodes = PeerCaSerializer( - help_text="CA nodes for peer node, " - "can only set node or set address,certificate,type together", - many=True, - ) - - class Meta: - model = FabricPeer - fields = ( - "name", - "gossip_use_leader_reflection", - "gossip_org_leader", - "gossip_skip_handshake", - "local_msp_id", - "ca_nodes", - ) - extra_kwargs = { - "name": {"required": True}, - "local_msp_id": {"required": True}, - "ca_nodes": {"required": True}, - "gossip_use_leader_reflection": {"default": True}, - "gossip_skip_handshake": {"default": True}, - "gossip_org_leader": {"default": False}, - } - - -class NodeInListSerializer(NodeIDSerializer, serializers.ModelSerializer): - # agent_id = serializers.UUIDField( - # help_text="Agent ID", required=False, allow_null=True - # ) - ports = PortSerializer( - help_text="Port mapping for node", many=True, required=False - ) - network_id = serializers.UUIDField( - help_text="Network ID", required=False, allow_null=True - ) - - class Meta: - model = Node - fields = ( - "id", - "type", - "name", - "urls", - "created_at", - "status", - "network_id", - "organization", - "cid", - "ports", - ) - extra_kwargs = { - "id": {"required": True, "read_only": False}, - "created_at": {"required": True, "read_only": False}, - # "ca": {"required": False, "allow_null": True}, - } - - -class NodeListSerializer(serializers.Serializer): - data = NodeInListSerializer(many=True, help_text="Nodes list") - total = serializers.IntegerField( - help_text="Total number of node", min_value=0 - ) - - -class NodeUrlSerializer(serializers.Serializer): - internal_port = serializers.IntegerField( - min_value=1, - max_value=65535, - required=True, - help_text="Port number of node service", - ) - url = serializers.CharField(help_text="Url of node service", required=True) - - -class NodeInfoSerializer(NodeIDSerializer, serializers.ModelSerializer): - # ca = FabricCASerializer( - # help_text="CA configuration for node", required=False, allow_null=True - # ) - # file = serializers.URLField(help_text="File url of node", required=False) - # links = NodeUrlSerializer(help_text="Links of node service", many=True) - agent_id = serializers.UUIDField( - help_text="Agent ID", required=False, allow_null=True - ) - - class Meta: - model = Node - fields = ( - "id", - "type", - "name", - # "network_type", - # "network_version", - "created_at", - "agent_id", - # "network_id", - "status", - # "ca", - # "file", - # "links", - ) - extra_kwargs = { - "id": {"required": True, "read_only": False}, - "created_at": {"required": True, "read_only": False}, - } - - -class NodeStatusSerializer(NodeIDSerializer, serializers.ModelSerializer): - class Meta: - model = Node - fields = ("status",) - extra_kwargs = { - "id": {"required": True, "read_only": False}, - "created_at": {"required": True, "read_only": False}, - } - - -class NodeCreateBody(serializers.ModelSerializer): - class Meta: - model = Node - fields = ( - "name", - "type", - ) - extra_kwargs = { - "name": {"required": True}, - "type": {"required": True}, - } - - def validate(self, attrs): - # network_type = attrs.get("network_type") - # node_type = attrs.get("type") - # network_version = attrs.get("network_version") - # agent_type = attrs.get("agent_type") - # agent = attrs.get("agent") - # ca = attrs.get("ca") - # peer = attrs.get("peer") - # if network_type == NetworkType.Fabric.value: - # if network_version not in FabricVersions.values(): - # raise serializers.ValidationError("Not valid fabric version") - # if node_type not in FabricNodeType.names(): - # raise serializers.ValidationError( - # "Not valid node type for %s" % network_type - # ) - # if node_type == FabricNodeType.Ca.name.lower() and ca is None: - # raise serializers.ValidationError( - # "Please input ca configuration for ca node" - # ) - # elif ( - # node_type == FabricNodeType.Peer.name.lower() and peer is None - # ): - # raise serializers.ValidationError( - # "Please input peer configuration for peer node" - # ) - # - # if agent_type is None and agent is None: - # raise serializers.ValidationError("Please set agent_type or agent") - # - # if agent_type and agent: - # if agent_type != agent.type: - # raise serializers.ValidationError( - # "agent type not equal to agent" - # ) - - return attrs - - -class NodeUpdateBody(serializers.ModelSerializer): - ports = PortSerializer( - help_text="Port mapping for node", many=True, required=False - ) - - class Meta: - model = Node - fields = ("status", "ports") - - -class NodeOperationSerializer(serializers.Serializer): - action = serializers.ChoiceField( - help_text=Operation.get_info("Operation for node:", list_str=True), - choices=Operation.to_choices(True), - ) - - -class NodeConfigFileSerializer(serializers.ModelSerializer): - files = serializers.FileField() - - class Meta: - model = Node - fields = ("files",) - - -# class NodeFileCreateSerializer(serializers.ModelSerializer): -# def to_form_paras(self): -# custom_paras = to_form_paras(self) - -# return custom_paras - -# class Meta: -# model = Node -# fields = ("file",) -# extra_kwargs = { -# "file": { -# "required": True, -# "validators": [ -# FileExtensionValidator( -# allowed_extensions=["tar.gz", "tgz"] -# ), -# validate_file, -# ], -# } -# } - - -class NodeUserCreateSerializer(serializers.ModelSerializer): - class Meta: - model = NodeUser - fields = ("name", "user_type", "secret", "attrs") - extra_kwargs = { - "name": {"required": True}, - "user_type": {"required": True}, - "secret": {"required": True}, - } - - -class NodeUserQuerySerializer( - PageQuerySerializer, serializers.ModelSerializer -): - class Meta: - model = NodeUser - fields = ("name", "user_type", "page", "per_page", "status") - - -class UserInListSerializer(serializers.ModelSerializer): - class Meta: - model = NodeUser - fields = ("id", "name", "user_type", "status") - - -class NodeUserListSerializer(serializers.Serializer): - data = UserInListSerializer(many=True, help_text="Users list") - total = serializers.IntegerField( - help_text="Total number of node", min_value=0 - ) - - -class NodeUserIDSerializer(serializers.ModelSerializer): - class Meta: - model = NodeUser - fields = ("id",) - - -class NodeUserPatchSerializer(serializers.ModelSerializer): - class Meta: - model = NodeUser - fields = ("status",) - extra_kwargs = {"status": {"required": True}} diff --git a/src/api-engine/api/routes/node/views.py b/src/api-engine/api/routes/node/views.py deleted file mode 100644 index e51e17ae9..000000000 --- a/src/api-engine/api/routes/node/views.py +++ /dev/null @@ -1,1061 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -import logging -import base64 -import shutil -import os -import threading -import yaml - -from django.core.exceptions import ObjectDoesNotExist -from django.core.paginator import Paginator -from django.http import HttpResponse -from drf_yasg.utils import swagger_auto_schema -from rest_framework import viewsets, status -from rest_framework.decorators import action -from rest_framework.response import Response -from rest_framework.parsers import MultiPartParser, FormParser, JSONParser -from rest_framework.permissions import IsAuthenticated - -from api.common.enums import AgentOperation -from api.exceptions import ( - CustomError, - NoResource, - ResourceExists, - ResourceInUse, -) -from api.exceptions import ResourceNotFound -from api.models import ( - Node, - Port, - FabricCA, - FabricCAServerType, - NodeUser, - FabricPeer, - PeerCa, - PeerCaUser, -) -from api.routes.node.serializers import ( - NodeOperationSerializer, - NodeQuery, - NodeCreateBody, - NodeIDSerializer, - NodeListSerializer, - NodeUpdateBody, - # NodeFileCreateSerializer, - # NodeInfoSerializer, - NodeStatusSerializer, - NodeUserCreateSerializer, - NodeUserIDSerializer, - NodeUserPatchSerializer, - NodeUserQuerySerializer, - NodeUserListSerializer, - NodeConfigFileSerializer, -) -from api.tasks import operate_node -from api.utils.common import with_common_response -from api.lib.pki import CryptoGen, CryptoConfig -from api.utils import zip_dir, zip_file -from api.config import CELLO_HOME, FABRIC_NODE, PRODUCTION_NODE -from api.utils.node_config import NodeConfig -from api.lib.agent import AgentHandler -from api.lib.peer.channel import Channel as PeerChannel -from api.utils.port_picker import set_ports_mapping, find_available_ports -from api.common import ok, err -from api.routes.channel.views import init_env_vars - -LOG = logging.getLogger(__name__) - - -class NodeViewSet(viewsets.ViewSet): - permission_classes = [ - IsAuthenticated, - ] - parser_classes = [MultiPartParser, FormParser, JSONParser] - - # Only operator can update node info - # def get_permissions(self): - # if self.action in ["update"]: - # permission_classes = (IsAuthenticated, IsOperatorAuthenticated) - # else: - # permission_classes = (IsAuthenticated,) - # - # return [permission() for permission in permission_classes] - - @staticmethod - def _validate_organization(request): - if request.user.organization is None: - raise CustomError(detail="Need join in organization.") - - @swagger_auto_schema( - query_serializer=NodeQuery, - responses=with_common_response( - with_common_response({status.HTTP_200_OK: NodeListSerializer}) - ), - ) - def list(self, request, *args, **kwargs): - """ - List node - - :param request: query parameter - :return: node list - :rtype: list - """ - try: - serializer = NodeQuery(data=request.GET) - if serializer.is_valid(raise_exception=True): - page = serializer.validated_data.get("page") - per_page = serializer.validated_data.get("per_page") - node_type = serializer.validated_data.get("type") - name = serializer.validated_data.get("name") - agent_id = serializer.validated_data.get("agent_id") - - # if agent_id is not None and not request.user.is_operator: - # raise PermissionDenied - query_filter = {} - - if node_type: - query_filter.update({"type": node_type}) - if name: - query_filter.update({"name__icontains": name}) - if request.user.is_admin: - query_filter.update( - {"organization": request.user.organization} - ) - # elif request.user.is_common_user: - # query_filter.update({"user": request.user}) - if agent_id: - query_filter.update({"agent__id": agent_id}) - nodes = Node.objects.filter(**query_filter) - p = Paginator(nodes, per_page) - nodes = p.page(page) - response = NodeListSerializer( - {"total": p.count, "data": nodes} - ) - return Response( - data=ok(response.data), status=status.HTTP_200_OK - ) - except Exception as e: - return Response(err(e.args), status=status.HTTP_400_BAD_REQUEST) - - def _save_fabric_ca(self, request, ca=None): - if ca is None: - return None - - ca_body = {} - admin_name = ca.get("admin_name") - admin_password = ca.get("admin_password") - # If found tls type ca server under this organization, - # will cause resource exists error - ca_server_type = ca.get("type", FabricCAServerType.Signature.value) - if ca_server_type == FabricCAServerType.TLS.value: - exist_ca_server = Node.objects.filter( - organization=request.user.organization, - ca__type=FabricCAServerType.TLS.value, - ).count() - if exist_ca_server > 0: - raise ResourceExists("CA Exists") - hosts = ca.get("hosts", []) - if admin_name: - ca_body.update({"admin_name": admin_name}) - if admin_password: - ca_body.update({"admin_password": admin_password}) - fabric_ca = FabricCA(**ca_body, hosts=hosts, type=ca_server_type) - fabric_ca.save() - - return fabric_ca - - def _save_fabric_peer(self, request, peer=None): - if peer is None: - return None - name = peer.get("name") - gossip_use_leader_reflection = peer.get("gossip_use_leader_reflection") - gossip_org_leader = peer.get("gossip_org_leader") - gossip_skip_handshake = peer.get("gossip_skip_handshake") - local_msp_id = peer.get("local_msp_id") - ca_nodes = peer.get("ca_nodes") - - body = {"name": name, "local_msp_id": local_msp_id} - if gossip_use_leader_reflection is not None: - body.update( - {"gossip_use_leader_reflection": gossip_use_leader_reflection} - ) - if gossip_org_leader is not None: - body.update({"gossip_org_leader": gossip_org_leader}) - if gossip_skip_handshake is not None: - body.update({"gossip_skip_handshake": gossip_skip_handshake}) - - fabric_peer = FabricPeer(**body) - fabric_peer.save() - - ca_nodes_list = [] - for ca_node in ca_nodes: - node = ca_node.get("node") - address = ca_node.get("address") - certificate = ca_node.get("certificate") - ca_type = ca_node.get("type") - - ca_body = {"peer": fabric_peer} - ca_node_dict = {} - if node is not None: - ca_body.update({"node": node}) - port = Port.objects.filter(node=node, internal=7054).first() - if port: - ca_node_dict.update( - {"address": "%s:%s" % (node.agent.ip, port.external)} - ) - ca_node_dict.update( - { - "type": node.ca.type, - "certificate": request.build_absolute_uri( - node.file.url - ), - } - ) - else: - update_body = { - "address": address, - "certificate": certificate, - "type": ca_type, - } - ca_body.update(update_body) - ca_node_dict.update(update_body) - - peer_ca = PeerCa(**ca_body) - peer_ca.save() - users = ca_node.get("users") - - user_list = [] - for ca_user in users: - ca_user_body = {"peer_ca": peer_ca} - user_dict = {} - user = ca_user.get("user") - username = ca_user.get("username") - password = ca_user.get("password") - user_type = ca_user.get("type") - - if user is not None: - ca_user_body.update({"user": user}) - user_dict.update( - { - "username": user.name, - "password": user.secret, - "type": user.user_type, - } - ) - else: - update_body = { - "username": username, - "password": password, - "type": user_type, - } - ca_user_body.update(update_body) - user_dict.update(update_body) - user_list.append(user_dict) - - ca_user_obj = PeerCaUser(**ca_user_body) - ca_user_obj.save() - - ca_node_dict.update({"users": user_list}) - - ca_nodes_list.append(ca_node_dict) - - return fabric_peer, ca_nodes_list - - @swagger_auto_schema( - request_body=NodeCreateBody, - responses=with_common_response( - {status.HTTP_201_CREATED: NodeIDSerializer} - ), - ) - def create(self, request): - """ - Create Node - - :param request: create parameter - :return: node ID - :rtype: uuid - """ - try: - serializer = NodeCreateBody(data=request.data) - if serializer.is_valid(raise_exception=True): - node_name = serializer.validated_data.get("name") - node_type = serializer.validated_data.get("type") - organization = request.user.organization - - agent = organization.agent.get() - if agent: - nodes = Node.objects.filter( - name=node_name, - organization=organization, - type=node_type, - ) - if nodes: - raise ResourceExists("Node Exists") - else: - raise NoResource("Node Does Not Exist") - urls = "{}.{}".format(node_name, organization.name) - nodes = {"type": node_type, "Specs": [node_name]} - CryptoConfig(organization.name).update(nodes) - CryptoGen(organization.name).extend() - self._generate_config(node_type, organization.name, node_name) - msp, tls, cfg = self._conversion_msp_tls_cfg( - node_type, organization.name, node_name - ) - node = Node( - name=node_name, - organization=organization, - urls=urls, - type=node_type, - msp=msp, - tls=tls, - agent=agent, - config_file=cfg, - ) - node.save() - - self._set_port(node_type, node, agent) - if node.organization.network: - try: - threading.Thread( - target=self._start_node, args=(node.id,) - ).start() - except Exception as e: - LOG.exception("Thread Failed") - raise e - - response = NodeIDSerializer(data=node.__dict__) - if response.is_valid(raise_exception=True): - return Response( - ok(response.validated_data), - status=status.HTTP_201_CREATED, - ) - except (ResourceExists, NoResource) as e: - raise e - except Exception as e: - return Response(err(e.args), status=status.HTTP_400_BAD_REQUEST) - - def _set_port(self, type, node, agent): - """ - get free port from agent, - - :param type: node type - :param node: node obj - :param agent: agent obj - :return: none - :rtype: none - """ - ip = agent.urls.split(":")[1].strip("//") - - if type == "peer": - ports = find_available_ports(ip, node.id, agent.id, 2) - set_ports_mapping( - node.id, - [ - {"internal": 7051, "external": ports[0]}, - {"internal": 9444, "external": ports[1]}, - ], - True, - ) - else: - # unify the port mapping for orderer - ports = find_available_ports(ip, node.id, agent.id, 3) - set_ports_mapping( - node.id, - [ - {"internal": 7050, "external": ports[0]}, - {"internal": 7053, "external": ports[1]}, - {"internal": 9443, "external": ports[2]}, - ], - True, - ) - - def _conversion_msp_tls_cfg(self, type, org, node): - """ - msp and tls , cfg from zip file to byte - - :param org: organization name - :param type: node type - :param node: node name - :return: msp, tls, cfg - :rtype: bytes - """ - try: - if type == "peer": - dir_node = "{}/{}/crypto-config/peerOrganizations/{}/peers/{}/".format( - CELLO_HOME, org, org, node + "." + org - ) - name = "core.yaml" - cname = "peer_config.zip" - else: - dir_node = "{}/{}/crypto-config/ordererOrganizations/{}/orderers/{}/".format( - CELLO_HOME, - org, - org.split(".", 1)[1], - node + "." + org.split(".", 1)[1], - ) - name = "orderer.yaml" - cname = "orderer_config.zip" - - zip_dir("{}msp".format(dir_node), "{}msp.zip".format(dir_node)) - with open("{}msp.zip".format(dir_node), "rb") as f_msp: - msp = base64.b64encode(f_msp.read()) - - zip_dir("{}tls".format(dir_node), "{}tls.zip".format(dir_node)) - with open("{}tls.zip".format(dir_node), "rb") as f_tls: - tls = base64.b64encode(f_tls.read()) - - zip_file( - "{}{}".format(dir_node, name), "{}{}".format(dir_node, cname) - ) - with open("{}{}".format(dir_node, cname), "rb") as f_cfg: - cfg = base64.b64encode(f_cfg.read()) - except Exception as e: - LOG.exception("Failed Conversion: CFG From Zip To Byte") - raise e - - return msp, tls, cfg - - def _generate_config(self, type, org, node): - """ - generate config for node (core.yaml, orderer.yaml) - - :param org: organization name - :param type: node type - :param node: node name - :param port: node port(todo: automatic distribution port) - :return: none - :rtype: none - """ - args = {} - if type == "peer": - args.update({"peer_tls_enabled": True}) - args.update( - {"operations_listenAddress": node + "." + org + ":9444"} - ) - args.update({"peer_address": node + "." + org + ":7051"}) - args.update({"peer_gossip_bootstrap": node + "." + org + ":7051"}) - args.update( - {"peer_gossip_externalEndpoint": node + "." + org + ":7051"} - ) - args.update({"peer_id": node + "." + org}) - args.update({"peer_localMspId": org.capitalize() + "MSP"}) - args.update({"peer_mspConfigPath": "/etc/hyperledger/fabric/msp"}) - args.update( - { - "peer_tls_cert_file": "/etc/hyperledger/fabric/tls/server.crt" - } - ) - args.update( - {"peer_tls_key_file": "/etc/hyperledger/fabric/tls/server.key"} - ) - args.update( - { - "peer_tls_rootcert_file": "/etc/hyperledger/fabric/tls/ca.crt" - } - ) - args.update({"vm_docker_hostConfig_NetworkMode": "cello_net"}) - args.update({"vm_endpoint": "unix:///host/var/run/docker.sock"}) - - a = NodeConfig(org) - a.peer(node, **args) - else: - args.update({"Admin_TLS_Enabled": True}) - args.update({"Admin_ListenAddress": "0.0.0.0:7053"}) - args.update( - { - "Admin_TLS_Certificate": "/etc/hyperledger/fabric/tls/server.crt" - } - ) - args.update( - { - "Admin_TLS_PrivateKey": "/etc/hyperledger/fabric/tls/server.key" - } - ) - args.update({"ChannelParticipation_Enabled": True}) - args.update( - { - "General_Cluster_ClientCertificate": "/etc/hyperledger/fabric/tls/server.crt" - } - ) - args.update( - { - "General_Cluster_ClientPrivateKey": "/etc/hyperledger/fabric/tls/server.key" - } - ) - args.update({"General_ListenAddress": "0.0.0.0"}) - args.update({"General_ListenPort": 7050}) - args.update({"General_LocalMSPID": "OrdererMSP"}) - args.update({"General_LocalMSPDir": "/etc/hyperledger/fabric/msp"}) - args.update({"General_TLS_Enabled": True}) - args.update( - { - "General_TLS_Certificate": "/etc/hyperledger/fabric/tls/server.crt" - } - ) - args.update( - { - "General_TLS_PrivateKey": "/etc/hyperledger/fabric/tls/server.key" - } - ) - args.update( - {"General_TLS_RootCAs": "[/etc/hyperledger/fabric/tls/ca.crt]"} - ) - args.update({"General_BootstrapMethod": "none"}) - args.update({"Metrics_Provider": "prometheus"}) - args.update( - { - "Operations_ListenAddress": node - + "." - + org.split(".", 1)[1] - + ":9443" - } - ) - - a = NodeConfig(org) - a.orderer(node, **args) - pass - - def _agent_params(self, pk): - """ - get node's params from db - :param node: node id - :return: info - """ - try: - node = Node.objects.get(id=pk) - org = node.organization - if org is None: - raise ResourceNotFound("Organization Not Found") - network = org.network - agent = org.agent.get() - if agent is None: - raise ResourceNotFound("Agent Not Found") - ports = Port.objects.filter(node=node) - if ports is None: - raise ResourceNotFound("Port Not Found") - - info = {} - org_name = ( - org.name if node.type == "peer" else org.name.split(".", 1)[1] - ) - # get info of node, e.g, tls, msp, config. - info["status"] = node.status - info["msp"] = node.msp - info["tls"] = node.tls - info["config_file"] = node.config_file - info["type"] = node.type - info["name"] = "{}.{}".format(node.name, org_name) - info["urls"] = agent.urls - info["network_type"] = None if network is None else network.type - info["agent_type"] = agent.type - info["container_name"] = "{}.{}".format(node.name, org_name) - info["ports"] = ports - return info - except Exception as e: - LOG.exception("Failed To Get Node Params") - raise e - - def _start_node(self, pk): - """ - start node from agent - :param node: node id - :return: null - """ - try: - node_qs = Node.objects.filter(id=pk) - infos = self._agent_params(pk) - agent = AgentHandler(infos) - cid = agent.create(infos) - if cid: - node_qs.update(cid=cid, status="running") - else: - raise ResourceNotFound("Node Not Found") - except Exception as e: - LOG.exception("Could Not Start Node") - raise e - - @swagger_auto_schema( - methods=["post"], - request_body=NodeOperationSerializer, - responses=with_common_response({status.HTTP_202_ACCEPTED: "Accepted"}), - ) - @action(methods=["post"], detail=True, url_path="operations") - def operate(self, request, pk=None): - """ - Operate Node - - Do some operation on node, start/stop/restart - """ - try: - serializer = NodeOperationSerializer(data=request.data) - if serializer.is_valid(raise_exception=True): - action = serializer.validated_data.get("action") - infos = self._agent_params(pk) - agent = AgentHandler(infos) - node_qs = Node.objects.filter(id=pk) - node_status = infos.get("status") - - if action == "start" and node_status == "paused": - node_qs.update(status="restarting") - res = True if agent.start() else False - if res: - node_qs.update(status="running") - return Response( - ok({"restart": res}), status=status.HTTP_201_CREATED - ) - elif action == "stop" and node_status == "running": - res = True if agent.stop() else False - if res: - node_qs.update(status="paused") - return Response( - ok({"stop": res}), status=status.HTTP_201_CREATED - ) - else: - return Response( - ok({"error": "invalid operation"}), - status=status.HTTP_201_CREATED, - ) - except Exception as e: - return Response(err(e.args), status=status.HTTP_400_BAD_REQUEST) - - @swagger_auto_schema( - responses=with_common_response( - {status.HTTP_204_NO_CONTENT: "No Content"} - ) - ) - def destroy(self, request, pk=None): - """ - Delete Node - - :param request: destory parameter - :param pk: primary key - :return: none - :rtype: rest_framework.status - """ - try: - node = Node.objects.get(id=pk) - infos = self._agent_params(pk) - agent = AgentHandler(infos) - agent_exist = agent.get() - node.status = "removing" - node.save() - if node.type == "orderer" and node.organization.network is not None: - orderer_cnt = Node.objects.filter( - type="orderer", organization__network=node.organization.network).count() - if orderer_cnt == 1: - raise ResourceInUse("Orderer In Use") - res = False - # if agent not exist or no continer is created for node, do not try to stop/delete container - if not agent_exist or not node.cid: - res = True - else: - # try to stop/delete container 3 times - # TODO: optimize the retry logic - for i in range(3): - LOG.info( - "Retry to stop/delete container %d time(s).", i + 1) - try: - response = agent.stop() - if response is not True: - LOG.error( - "Failed when agent stops/deletes container: %s", response) - continue - response = agent.delete() - if response is not True: - LOG.error( - "Failed when agent stops/deletes container: %s", response) - continue - res = True - except Exception as e: - LOG.error( - "Exception when agent stops/deletes container: %s", e) - continue - break - if res: - fabric_path = "{}/{}".format(FABRIC_NODE, infos["container_name"]) - if os.path.exists(fabric_path): - shutil.rmtree(fabric_path, True) - prod_path = "{}/{}".format(PRODUCTION_NODE, infos["container_name"]) - if os.path.exists(prod_path): - shutil.rmtree(prod_path, True) - node.delete() - # node.status = "exited" - # node.save() - else: - return Response(ok({"delete": False}), status=status.HTTP_202_ACCEPTED) - return Response(ok({"delete": True}), status=status.HTTP_202_ACCEPTED) - except ObjectDoesNotExist: - raise ResourceNotFound("Node Not Found") - except (ResourceNotFound, ResourceInUse) as e: - raise e - except Exception as e: - LOG.exception("Node Not Deleted") - return Response(err(e.args), status=status.HTTP_400_BAD_REQUEST) - - @swagger_auto_schema( - operation_id="update node", - request_body=NodeUpdateBody, - responses=with_common_response({status.HTTP_202_ACCEPTED: "Accepted"}), - ) - def update(self, request, pk=None): - """ - Update Node - - Update special node with id. - """ - try: - serializer = NodeUpdateBody(data=request.data) - if serializer.is_valid(raise_exception=True): - node_status = serializer.validated_data.get("status") - ports = serializer.validated_data.get("ports", []) - try: - node = Node.objects.get(id=pk) - except ObjectDoesNotExist: - raise ResourceNotFound("Node Not Found") - - node.status = node_status - node.save() - - for port_item in ports: - port = Port( - external=port_item.get("external"), - internal=port_item.get("internal"), - node=node, - ) - port.save() - - return Response(status=status.HTTP_202_ACCEPTED) - except Exception as e: - return Response(err(e.args), status=status.HTTP_400_BAD_REQUEST) - - # @swagger_auto_schema( - # methods=["post"], - # request_body=NodeFileCreateSerializer, - # responses=with_common_response({status.HTTP_202_ACCEPTED: "Accepted"}), - # ) - # @action(methods=["post"], detail=True, url_path="files", url_name="files") - # def upload_files(self, request, pk=None): - # """ - # Upload file to node - - # Upload related files to node - # """ - # serializer = NodeFileCreateSerializer(data=request.data) - # if serializer.is_valid(raise_exception=True): - # file = serializer.validated_data.get("file") - # try: - # node = Node.objects.get(id=pk) - # except ObjectDoesNotExist: - # raise ResourceNotFound - # else: - # # delete old file - # if node.file: - # node.file.delete() - # node.file = file - # node.save() - - # return Response(status=status.HTTP_202_ACCEPTED) - - @swagger_auto_schema( - responses=with_common_response( - with_common_response({status.HTTP_200_OK: NodeStatusSerializer}) - ) - ) - def retrieve(self, request, pk=None): - """ - Get Node information - - Get node detail information. - """ - try: - self._validate_organization(request) - try: - node = Node.objects.get( - id=pk, organization=request.user.organization - ) - except ObjectDoesNotExist: - raise ResourceNotFound("Node Not Found") - else: - # Set file url of node, we only need node status for now - # if node.file: - # node.file = request.build_absolute_uri(node.file.url) - # ports = Port.objects.filter(node=node) - # node.links = [ - # { - # "internal_port": port.internal, - # "url": "%s:%s" % (node.agent.ip, port.external), - # } - # for port in ports - # ] - response = NodeStatusSerializer(node) - return Response( - ok(data=response.data), status=status.HTTP_200_OK - ) - except Exception as e: - return Response(err(e.args), status=status.HTTP_400_BAD_REQUEST) - - @swagger_auto_schema( - methods=["get"], - responses=with_common_response( - {status.HTTP_200_OK: NodeConfigFileSerializer} - ), - ) - @swagger_auto_schema( - methods=["post"], - request_body=NodeConfigFileSerializer, - responses=with_common_response({status.HTTP_202_ACCEPTED: "Accepted"}), - ) - @action( - methods=["get", "post"], - detail=True, - url_path="config", - url_name="config", - ) - def node_config(self, request, pk=None): - """ - Download/upload the node config file - """ - try: - self._validate_organization(request) - organization = request.user.organization - org = organization.name - try: - node = Node.objects.get(id=pk, organization=organization) - except ObjectDoesNotExist: - raise ResourceNotFound("Node Not Found") - # Get file locations based on node type - if node.type == "peer": - dir_node = "{}/{}/crypto-config/peerOrganizations/{}/peers/{}/".format( - CELLO_HOME, org, org, node.name + "." + org - ) - cname = "peer_config.zip" - name = "core.yaml" - else: - dir_node = "{}/{}/crypto-config/ordererOrganizations/{}/orderers/{}/".format( - CELLO_HOME, - org, - org.split(".", 1)[1], - node.name + "." + org.split(".", 1)[1], - ) - cname = "orderer_config.zip" - name = "orderer.yaml" - except Exception as e: - return Response(err(e.args), status=status.HTTP_400_BAD_REQUEST) - if request.method == "GET": - # Get the config file from local storage - try: - config_file = open("{}{}".format(dir_node, cname), "rb") - response = HttpResponse( - config_file, content_type="application/zip" - ) - response["Content-Disposition"] = ( - "attachment; filename={}".format(cname) - ) - return response - except Exception as e: - LOG.exception("Config File Not Found") - raise e - elif request.method == "POST": - # Update yaml, zip files, and the database field - try: - new_config_file = request.data["file"] - try: - yaml.safe_load(new_config_file) - except yaml.YAMLError: - return Response( - err("Unable to parse this YAML file."), - status=status.HTTP_400_BAD_REQUEST, - ) - if os.path.exists("{}{}".format(dir_node, name)): - os.remove("{}{}".format(dir_node, name)) - with open("{}{}".format(dir_node, name), "wb+") as f: - for chunk in new_config_file.chunks(): - f.write(chunk) - if os.path.exists("{}{}".format(dir_node, cname)): - os.remove("{}{}".format(dir_node, cname)) - zip_file( - "{}{}".format(dir_node, name), - "{}{}".format(dir_node, cname), - ) - with open("{}{}".format(dir_node, cname), "rb") as f_cfg: - cfg = base64.b64encode(f_cfg.read()) - node.config_file = cfg - node.save() - infos = self._agent_params(pk) - agent = AgentHandler(infos) - agent.update_config(cfg, node.type) - return Response(status=status.HTTP_202_ACCEPTED) - except Exception as e: - LOG.exception("Update Failed") - raise e - - @action(methods=["post"], detail=True, url_path="block", url_name="block") - def block_file(self, request, pk=None): - """ - Peer join channel by uploading a genesis block file - """ - try: - self._validate_organization(request) - organization = request.user.organization - org = organization.name - try: - node = Node.objects.get(id=pk, organization=organization) - except ObjectDoesNotExist: - raise ResourceNotFound("Node Not Found") - envs = init_env_vars(node, organization) - block_path = "{}/{}/crypto-config/peerOrganizations/{}/peers/{}/{}.block".format( - CELLO_HOME, org, org, node.name + "." + org, "channel" - ) - uploaded_block_file = request.data["file"] - with open(block_path, "wb+") as f: - for chunk in uploaded_block_file.chunks(): - f.write(chunk) - peer_channel_cli = PeerChannel(**envs) - peer_channel_cli.join(block_path) - os.remove(block_path) - return Response(status=status.HTTP_202_ACCEPTED) - except Exception as e: - return Response(err(e.args), status=status.HTTP_400_BAD_REQUEST) - - def _register_user(self, request, pk=None): - serializer = NodeUserCreateSerializer(data=request.data) - if serializer.is_valid(raise_exception=True): - name = serializer.validated_data.get("name") - secret = serializer.validated_data.get("secret") - user_type = serializer.validated_data.get("user_type") - attrs = serializer.validated_data.get("attrs", "") - try: - node = Node.objects.get( - id=pk, organization=request.user.organization - ) - # Name is unique for each node - user_count = NodeUser.objects.filter( - node=node, name=name - ).count() - if user_count > 0: - raise ResourceExists("Users Exist") - except ObjectDoesNotExist: - raise ResourceNotFound("Users Not Found") - - node_user = NodeUser( - name=name, - secret=secret, - user_type=user_type, - attrs=attrs, - node=node, - ) - node_user.save() - - agent_config_file = request.build_absolute_uri( - node.agent.config_file.url - ) - node_file_url = request.build_absolute_uri(node.file.url) - user_patch_url = self.reverse_action( - "patch-user", kwargs={"pk": pk, "user_pk": node_user.id} - ) - user_patch_url = request.build_absolute_uri(user_patch_url) - operate_node.delay( - str(node.id), - AgentOperation.FabricCARegister.value, - agent_config_file=agent_config_file, - node_file_url=node_file_url, - user_patch_url=user_patch_url, - fabric_ca_user={ - "name": name, - "secret": secret, - "type": user_type, - "attrs": attrs, - }, - ) - response = NodeUserIDSerializer(node_user) - return Response(data=response.data, status=status.HTTP_201_CREATED) - - def _list_user(self, request, pk=None): - serializer = NodeUserQuerySerializer(data=request.GET) - if serializer.is_valid(raise_exception=True): - page = serializer.validated_data.get("page") - per_page = serializer.validated_data.get("per_page") - name = serializer.validated_data.get("name") - user_type = serializer.validated_data.get("user_type") - user_status = serializer.validated_data.get("status") - query_param = {"node__id": pk} - if name is not None: - query_param.update({"name__icontains": name}) - if user_type is not None: - query_param.update({"user_type": user_type}) - if user_status is not None: - query_param.update({"status": user_status}) - - users = NodeUser.objects.filter(**query_param) - p = Paginator(users, per_page) - users = p.page(page) - - response = NodeUserListSerializer( - {"data": users, "total": p.count} - ) - return Response(ok(response.data), status=status.HTTP_200_OK) - - @swagger_auto_schema( - methods=["post"], - operation_description="Register user to node", - operation_summary="Register user to node", - request_body=NodeUserCreateSerializer, - responses=with_common_response( - {status.HTTP_201_CREATED: NodeUserIDSerializer} - ), - ) - @swagger_auto_schema( - methods=["get"], - operation_description="List user of node", - operation_summary="List user of node", - query_serializer=NodeUserQuerySerializer, - responses=with_common_response( - {status.HTTP_200_OK: NodeUserListSerializer} - ), - ) - @action( - methods=["post", "get"], - detail=True, - url_path="users", - url_name="users", - ) - def users(self, request, pk=None): - if request.method == "POST": - return self._register_user(request, pk) - elif request.method == "GET": - return self._list_user(request, pk) - - @swagger_auto_schema( - methods=["patch"], - request_body=NodeUserPatchSerializer, - responses=with_common_response({status.HTTP_202_ACCEPTED: "Accepted"}), - ) - @action( - methods=["patch"], - detail=True, - url_path="users/(?P[^/.]+)", - url_name="patch-user", - ) - def patch_user(self, request, pk=None, user_pk=None): - """ - Patch user status for node - - Patch user status for node - """ - try: - serializer = NodeUserPatchSerializer(data=request.data) - if serializer.is_valid(raise_exception=True): - try: - node_user = NodeUser.objects.get(id=user_pk, node__id=pk) - except ObjectDoesNotExist: - raise ResourceNotFound("Node User Not Found") - - node_user.status = serializer.validated_data.get("status") - node_user.save() - - return Response(status=status.HTTP_202_ACCEPTED) - except Exception as e: - LOG.exception("Patch Failed") - return Response(err(e.args), status=status.HTTP_400_BAD_REQUEST) diff --git a/src/api-engine/api/routes/organization/__init__.py b/src/api-engine/api/routes/organization/__init__.py deleted file mode 100644 index 0480730a5..000000000 --- a/src/api-engine/api/routes/organization/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# diff --git a/src/api-engine/api/routes/organization/serializers.py b/src/api-engine/api/routes/organization/serializers.py deleted file mode 100644 index 9e9838915..000000000 --- a/src/api-engine/api/routes/organization/serializers.py +++ /dev/null @@ -1,70 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -from rest_framework import serializers - -from api.common.enums import Operation -from api.common.serializers import PageQuerySerializer -from api.models import Organization - - -class OrganizationQuery(PageQuerySerializer, serializers.ModelSerializer): - class Meta: - model = Organization - fields = ("page", "per_page", "name") - extra_kwargs = {"name": {"required": False}} - - -class OrganizationCreateBody(serializers.ModelSerializer): - peernum = serializers.IntegerField( - source="org_peernum", help_text="Total number of peer", required=True - ) - orderernum = serializers.IntegerField( - source="org_orderernum", - help_text="Total number of orderer", - required=True, - ) - - class Meta: - model = Organization - fields = ("name", "peernum", "orderernum") - extra_kwargs = {"name": {"required": True}} - - -class OrganizationUpdateBody(serializers.ModelSerializer): - class Meta: - model = Organization - fields = ("name", "agents", "network") - - -class OrganizationResponse(serializers.ModelSerializer): - id = serializers.UUIDField(help_text="ID of Organization") - network = serializers.UUIDField(help_text="ID of Network", allow_null=True) - agents = serializers.UUIDField(help_text="ID of Network", allow_null=True) - - class Meta: - model = Organization - fields = ("id", "name", "created_at", "agents", "network") - extra_kwargs = { - "name": {"required": True}, - "created_at": {"required": True, "read_only": False}, - "id": {"required": True, "read_only": False}, - } - - -class OrganizationList(serializers.Serializer): - total = serializers.IntegerField( - help_text="Total number of Organizations", default=0 - ) - data = OrganizationResponse(many=True, help_text="Organizations list") - - -class OrganizationIDSerializer(serializers.Serializer): - id = serializers.UUIDField(help_text="ID of Organization") - - -class NodeOperationSerializer(serializers.Serializer): - action = serializers.ChoiceField( - help_text=Operation.get_info("Operation for node:", list_str=True), - choices=Operation.to_choices(True), - ) diff --git a/src/api-engine/api/routes/organization/views.py b/src/api-engine/api/routes/organization/views.py deleted file mode 100644 index c7093079b..000000000 --- a/src/api-engine/api/routes/organization/views.py +++ /dev/null @@ -1,483 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -import logging -import base64 -import shutil -import os - -from rest_framework import viewsets, status -from rest_framework.decorators import action -from rest_framework.response import Response -from drf_yasg.utils import swagger_auto_schema -from django.core.exceptions import ObjectDoesNotExist -from django.core.paginator import Paginator - -from api.utils.common import with_common_response -from api.exceptions import ResourceExists, ResourceNotFound, ResourceInUse -from api.models import ( - Node, - Organization, -) -from api.routes.organization.serializers import ( - OrganizationQuery, - OrganizationCreateBody, - OrganizationList, - OrganizationResponse, - OrganizationIDSerializer, - OrganizationUpdateBody, -) -from api.routes.user.serializers import UserIDSerializer -from api.models import UserProfile -from api.routes.user.serializers import UserListSerializer, UserQuerySerializer -from api.lib.pki import CryptoGen, CryptoConfig -from api.utils import zip_dir, zip_file -from api.config import CELLO_HOME -from api.utils.node_config import NodeConfig - -from api.common import ( - ok, - # err -) - -LOG = logging.getLogger(__name__) - - -class OrganizationViewSet(viewsets.ViewSet): - """Class represents orgnization related operations.""" - - @swagger_auto_schema( - query_serializer=OrganizationQuery, - responses=with_common_response( - with_common_response({status.HTTP_200_OK: OrganizationList}) - ), - ) - def list(self, request): - """ - List Organizations - - :param request: query parameter - :return: organization list - :rtype: list - """ - serializer = OrganizationQuery(data=request.GET) - if serializer.is_valid(raise_exception=True): - page = serializer.validated_data.get("page", 1) - per_page = serializer.validated_data.get("per_page", 10) - name = serializer.validated_data.get("name") - parameters = {} - if name: - parameters.update({"name__icontains": name}) - organizations = Organization.objects.filter(**parameters) - p = Paginator(organizations, per_page) - organizations = p.page(page) - organizations = [ - { - "id": str(organization.id), - "name": organization.name, - "network": ( - str(organization.network.id) - if organization.network - else None - ), - "agents": ( - organization.agents if organization.agents else None - ), - "created_at": organization.created_at, - } - for organization in organizations - ] - response = OrganizationList( - data={"total": p.count, "data": organizations} - ) - if response.is_valid(raise_exception=True): - return Response( - ok(response.validated_data), status=status.HTTP_200_OK - ) - - @swagger_auto_schema( - request_body=OrganizationCreateBody, - responses=with_common_response( - {status.HTTP_201_CREATED: OrganizationIDSerializer} - ), - ) - def create(self, request): - """ - Create Organization - - :param request: create parameter - :return: organization ID - :rtype: uuid - """ - serializer = OrganizationCreateBody(data=request.data) - if serializer.is_valid(raise_exception=True): - name = serializer.validated_data.get("name") - peernum = serializer.validated_data.get("org_peernum") - orderernum = serializer.validated_data.get("org_orderernum") - try: - Organization.objects.get(name=name) - except ObjectDoesNotExist: - pass - else: - raise ResourceExists("Organization Exists") - - CryptoConfig(name).create(peernum, orderernum) - CryptoGen(name).generate() - - msp, tls = self._conversion_msp_tls(name) - - organization = Organization(name=name, msp=msp, tls=tls) - organization.save() - - # create node config - if peernum > 0: - self._create_node(organization, peernum, "peer") - if orderernum > 0: - self._create_node(organization, orderernum, "orderer") - - response = OrganizationIDSerializer(data=organization.__dict__) - if response.is_valid(raise_exception=True): - return Response( - ok(response.validated_data), status=status.HTTP_201_CREATED - ) - - def _create_node(self, org, num, nodeType): - """ - create node - - :param org: organization - :param num: the number of node - :param nodeType: the type of node - :return: null - """ - for i in range(num): - nodeName = ( - "peer" + str(i) if nodeType == "peer" else "orderer" + str(i) - ) - self._generate_config(nodeType, org.name, nodeName) - msp, tls, cfg = self._conversion_msp_tls_cfg( - nodeType, org.name, nodeName - ) - urls = "{}.{}".format(nodeName, org.name) - node = Node( - name=nodeName, - organization=org, - urls=urls, - type=nodeType, - msp=msp, - tls=tls, - agent=None, - config_file=cfg, - ) - node.save() - - def _conversion_msp_tls_cfg(self, type, org, node): - """ - msp and tls , cfg from zip file to byte - - :param org: organization name - :param type: node type - :param node: node name - :return: msp, tls, cfg - :rtype: bytes - """ - try: - if type == "peer": - dir_node = "{}/{}/crypto-config/peerOrganizations/{}/peers/{}/".format( - CELLO_HOME, org, org, node + "." + org - ) - name = "core.yaml" - cname = "peer_config.zip" - else: - dir_node = "{}/{}/crypto-config/ordererOrganizations/{}/orderers/{}/".format( - CELLO_HOME, - org, - org.split(".", 1)[1], - node + "." + org.split(".", 1)[1], - ) - name = "orderer.yaml" - cname = "orderer_config.zip" - - zip_dir("{}msp".format(dir_node), "{}msp.zip".format(dir_node)) - with open("{}msp.zip".format(dir_node), "rb") as f_msp: - msp = base64.b64encode(f_msp.read()) - - zip_dir("{}tls".format(dir_node), "{}tls.zip".format(dir_node)) - with open("{}tls.zip".format(dir_node), "rb") as f_tls: - tls = base64.b64encode(f_tls.read()) - - zip_file( - "{}{}".format(dir_node, name), "{}{}".format(dir_node, cname) - ) - with open("{}{}".format(dir_node, cname), "rb") as f_cfg: - cfg = base64.b64encode(f_cfg.read()) - except Exception as e: - LOG.exception("Conversion Failed: CFG from Zip To Byte") - raise e - - return msp, tls, cfg - - def _generate_config(self, type, org, node): - """ - generate config for node - - :param org: organization name - :param type: node type - :param node: node name - :param port: node port(todo: automatic distribution port) - :return: none - :rtype: none - """ - args = {} - if type == "peer": - args.update({"peer_id": "{}.{}".format(node, org)}) - args.update({"peer_address": "{}.{}:{}".format(node, org, 7051)}) - args.update( - { - "peer_gossip_externalEndpoint": "{}.{}:{}".format( - node, org, 7051 - ) - } - ) - args.update( - {"peer_chaincodeAddress": "{}.{}:{}".format(node, org, 7052)} - ) - args.update({"peer_tls_enabled": True}) - args.update({"peer_localMspId": "{}MSP".format(org.capitalize())}) - - a = NodeConfig(org) - a.peer(node, **args) - else: - args.update({"General_ListenPort": 7050}) - args.update( - {"General_LocalMSPID": "{}OrdererMSP".format(org.capitalize())} - ) - args.update({"General_TLS_Enabled": True}) - - a = NodeConfig(org) - a.orderer(node, **args) - - def _conversion_msp_tls(self, name): - """ - msp and tls from zip file to byte - - :param name: organization name - :return: msp, tls - :rtype: bytes - """ - try: - dir_org = "{}/{}/crypto-config/peerOrganizations/{}/".format( - CELLO_HOME, name, name - ) - - zip_dir("{}msp".format(dir_org), "{}msp.zip".format(dir_org)) - with open("{}msp.zip".format(dir_org), "rb") as f_msp: - msp = base64.b64encode(f_msp.read()) - - zip_dir("{}tlsca".format(dir_org), "{}tls.zip".format(dir_org)) - with open("{}tls.zip".format(dir_org), "rb") as f_tls: - tls = base64.b64encode(f_tls.read()) - except Exception as e: - LOG.exception("Conversion Failed: Zip To Byte") - raise e - - return msp, tls - - @swagger_auto_schema( - responses=with_common_response( - {status.HTTP_204_NO_CONTENT: "No Content"} - ) - ) - def destroy(self, request, pk=None): - """ - Delete Organization - - :param request: destory parameter - :param pk: primary key - :return: none - :rtype: rest_framework.status - """ - try: - organization = Organization.objects.get(id=pk) - if organization.network: - raise ResourceInUse("Organization In Use") - - # user_count = UserProfile.objects.filter( - # organization=organization - # ).count() - # if user_count > 0: - # raise ResourceInUse - path = "{}/{}".format(CELLO_HOME, organization.name) - if os.path.exists(path): - shutil.rmtree(path, True) - organization.delete() - except ObjectDoesNotExist: - raise ResourceNotFound("Organization Not Found") - - return Response(status=status.HTTP_204_NO_CONTENT) - - @swagger_auto_schema( - responses=with_common_response( - {status.HTTP_200_OK: OrganizationResponse} - ) - ) - def retrieve(self, request, pk=None): - """ - Retrieve Organization - - :param request: retrieve parameter - :param pk: primary key - :return: organization info - :rtype: OrganizationResponse - """ - try: - organization = Organization.objects.get(id=pk) - except ObjectDoesNotExist: - raise ResourceNotFound("Organization Not Found") - else: - response = OrganizationResponse(data=organization.__dict__) - if response.is_valid(raise_exception=True): - return Response( - response.validated_data, status=status.HTTP_200_OK - ) - - @swagger_auto_schema( - request_body=OrganizationUpdateBody, - responses=with_common_response({status.HTTP_202_ACCEPTED: "Accepted"}), - ) - def update(self, request, pk=None): - """ - Update Agent - - Update special agent with id. - """ - serializer = OrganizationUpdateBody(data=request.data) - if serializer.is_valid(raise_exception=True): - name = serializer.validated_data.get("name") - # agents = serializer.validated_data.get("agents") - # network = serializer.validated_data.get("network") - # channel = serializer.validated_data.get("channel") - try: - Organization.objects.get(name=name) - except ObjectDoesNotExist: - pass - # organization = Organization.objects.filter(name=name).update(agents=agents, network=network.id, channel=channel.id) - - return Response(status=status.HTTP_204_NO_CONTENT) - - @staticmethod - def _list_users(request, pk=None): - serializer = UserQuerySerializer(data=request.GET) - if serializer.is_valid(raise_exception=True): - try: - organization = Organization.objects.get(id=pk) - except ObjectDoesNotExist: - raise ResourceNotFound("Organization Not Found") - page = serializer.validated_data.get("page") - per_page = serializer.validated_data.get("per_page") - name = serializer.validated_data.get("name") - parameter = {"organization": organization} - if name: - parameter.update({"username__icontains": name}) - users = UserProfile.objects.filter(**parameter) - p = Paginator(users, per_page) - users = p.page(page) - users = [ - { - "id": str(user.id), - "username": user.username, - "role": user.role, - } - for user in users - ] - response = UserListSerializer( - data={"total": p.count, "data": users} - ) - if response.is_valid(raise_exception=True): - return Response( - ok(response.validated_data), status=status.HTTP_200_OK - ) - - @staticmethod - def _add_user(request, pk=None): - serializer = UserIDSerializer(data=request.data) - if serializer.is_valid(raise_exception=True): - user_id = serializer.validated_data.get("id") - try: - organization = Organization.objects.get(id=pk) - user = UserProfile.objects.get(id=user_id) - if user.organization: - raise ResourceInUse("Organization In Use") - except ObjectDoesNotExist: - raise ResourceNotFound("Organization Not Found") - else: - user.organization = organization - user.save() - - return Response(status=status.HTTP_202_ACCEPTED) - - @swagger_auto_schema( - method="get", - query_serializer=UserQuerySerializer, - responses=with_common_response( - {status.HTTP_200_OK: UserListSerializer} - ), - ) - @swagger_auto_schema( - method="post", - request_body=UserIDSerializer, - responses=with_common_response({status.HTTP_202_ACCEPTED: "Accepted"}), - ) - @action(methods=["get", "post"], detail=True, url_path="users") - def manage_users(self, request, pk=None): - """ - get: - List users - - List users in Organization - - post: - Add User - - Add user into Organization - """ - if request.method == "GET": - return self._list_users(request, pk) - elif request.method == "POST": - return self._add_user(request, pk) - - @swagger_auto_schema( - method="delete", - responses=with_common_response( - {status.HTTP_204_NO_CONTENT: "No Content"} - ), - ) - @action( - methods=["delete"], detail=True, url_path="users/(?P[^/.]+)" - ) - def remove_user_from_govern(self, request, pk=None, user_id=None): - """ - Remove user from Organization - - Remove user from Organization - """ - try: - user = UserProfile.objects.get(id=user_id, organization__id=pk) - except ObjectDoesNotExist: - raise ResourceNotFound("User Not Found") - else: - user.organization = None - user.save() - - return Response(status=status.HTTP_204_NO_CONTENT) - - @swagger_auto_schema(method="post", responses=with_common_response()) - @action(methods=["post"], detail=True, url_path="certificates") - def request_certificate(self, request, pk=None): - """ - post: - Request Certificate - - Request certificate - """ - pass diff --git a/src/api-engine/api/routes/user/__init__.py b/src/api-engine/api/routes/user/__init__.py deleted file mode 100644 index 0480730a5..000000000 --- a/src/api-engine/api/routes/user/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# diff --git a/src/api-engine/api/routes/user/serializers.py b/src/api-engine/api/routes/user/serializers.py deleted file mode 100644 index 0d2646c8b..000000000 --- a/src/api-engine/api/routes/user/serializers.py +++ /dev/null @@ -1,101 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -from rest_framework import serializers -from api.common.enums import Operation, NetworkType, FabricNodeType, UserRole -from api.common.serializers import PageQuerySerializer -from api.models import UserProfile -from api.utils.jwt import OrgSerializer - - -class NodeQuery(PageQuerySerializer): - pass - - -class NodeCreateBody(serializers.Serializer): - network_type = serializers.ChoiceField( - help_text=NetworkType.get_info("Network types:", list_str=True), - choices=NetworkType.to_choices(), - ) - type = serializers.ChoiceField( - help_text=FabricNodeType.get_info("Node Types:", list_str=True), - choices=FabricNodeType.to_choices(True), - ) - - -class NodeIDSerializer(serializers.Serializer): - id = serializers.CharField(help_text="ID of node") - - -class NodeOperationSerializer(serializers.Serializer): - action = serializers.ChoiceField( - help_text=Operation.get_info("Operation for node:", list_str=True), - choices=Operation.to_choices(True), - ) - - -class UserCreateBody(serializers.ModelSerializer): - role = serializers.ChoiceField( - help_text=UserRole.get_info("User roles:", list_str=True), - choices=UserRole.to_choices(string_as_value=True), - ) - - class Meta: - model = UserProfile - fields = ("username", "role", "organization", "password", "email") - extra_kwargs = { - "username": {"required": True}, - "role": {"required": True}, - "password": {"required": True}, - "email": {"required": True}, - } - - -class UserIDSerializer(serializers.Serializer): - id = serializers.UUIDField(help_text="ID of user") - - -class UserQuerySerializer(PageQuerySerializer, serializers.Serializer): - username = serializers.CharField( - help_text="Username to filter", required=False, max_length=64 - ) - - -class UserInfoSerializer(serializers.ModelSerializer): - id = serializers.UUIDField(help_text="ID of user") - organization = OrgSerializer(allow_null=True, required=False) - - class Meta: - model = UserProfile - fields = ("id", "username", "role", "organization", "created_at") - extra_kwargs = { - "id": {"read_only": False}, - "username": {"validators": []}, - } - - -class UserListSerializer(serializers.Serializer): - total = serializers.IntegerField(help_text="Total number of users") - data = UserInfoSerializer(many=True, help_text="Users list") - - -class UserAuthSerializer(serializers.Serializer): - username = serializers.CharField( - help_text="Username for login", max_length=64 - ) - password = serializers.CharField( - help_text="Password for login", max_length=64 - ) - - -class UserAuthResponseSerializer(serializers.Serializer): - access_token = serializers.CharField(help_text="Access token") - expires_in = serializers.IntegerField(help_text="Expires time") - scope = serializers.CharField(help_text="Scopes for token") - token_type = serializers.CharField(help_text="Type of token") - - -class UserUpdateSerializer(serializers.Serializer): - password = serializers.CharField( - help_text="New password for login", max_length=64 - ) diff --git a/src/api-engine/api/routes/user/views.py b/src/api-engine/api/routes/user/views.py deleted file mode 100644 index b761f8925..000000000 --- a/src/api-engine/api/routes/user/views.py +++ /dev/null @@ -1,181 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -import logging -import os - -from django.core.paginator import Paginator -from django.db.models import Q -from drf_yasg.utils import swagger_auto_schema -from rest_framework import viewsets, status -from rest_framework.decorators import action -from rest_framework.response import Response -from rest_framework.permissions import IsAuthenticated - -from api.exceptions import ResourceExists, CustomError -from api.models import UserProfile -from api.routes.user.serializers import ( - UserCreateBody, - UserIDSerializer, - UserQuerySerializer, - UserListSerializer, - UserUpdateSerializer, -) -from api.utils.common import with_common_response - -LOG = logging.getLogger(__name__) - -ADMIN_USERNAME = os.getenv("ADMIN_USERNAME") - - -class UserViewSet(viewsets.ViewSet): - @swagger_auto_schema( - query_serializer=UserQuerySerializer, - responses=with_common_response( - {status.HTTP_200_OK: UserListSerializer} - ), - ) - def list(self, request, *args, **kwargs): - """ - List Users - - List user through query parameter - """ - serializer = UserQuerySerializer(data=request.GET) - if serializer.is_valid(raise_exception=True): - username = serializer.validated_data.get("username") - page = serializer.validated_data.get("page") - per_page = serializer.validated_data.get("per_page") - query_params = {} - if username: - query_params.update({"username__icontains": username}) - - users = UserProfile.objects.filter(**query_params).exclude( - username=ADMIN_USERNAME - ) - p = Paginator(users, per_page) - users = p.page(page) - # users = [user for user in users] - - response = UserListSerializer( - {"total": p.count, "data": list(users.object_list)} - ).data - return Response(data=response, status=status.HTTP_200_OK) - - @swagger_auto_schema( - request_body=UserCreateBody, - responses=with_common_response( - {status.HTTP_201_CREATED: UserIDSerializer} - ), - ) - def create(self, request): - """ - Create User - - Create new user - """ - serializer = UserCreateBody(data=request.data) - if serializer.is_valid(raise_exception=True): - username = serializer.validated_data.get("username") - role = serializer.validated_data.get("role") - organization = serializer.validated_data.get("organization") - password = serializer.validated_data.get("password") - email = serializer.validated_data.get("email") - - user_count = UserProfile.objects.filter( - Q(username=username) | Q(email=email) - ).count() - if user_count > 0: - raise ResourceExists( - detail="User name or email already exists" - ) - - user = UserProfile( - username=username, - role=role, - email=email, - organization=organization, - ) - user.set_password(password) - user.save() - response = UserIDSerializer(data={"id": user.id}) - if response.is_valid(raise_exception=True): - return Response( - response.validated_data, status=status.HTTP_201_CREATED - ) - - @swagger_auto_schema( - responses=with_common_response( - {status.HTTP_204_NO_CONTENT: "No Content"} - ) - ) - def destroy(self, request, pk=None): - """ - Delete User - - Delete user - """ - try: - UserProfile.objects.get(id=pk).delete() - except Exception as e: - raise CustomError(detail=str(e)) - else: - return Response(status=status.HTTP_204_NO_CONTENT) - - @action( - methods=["get", "post", "put", "delete"], - detail=True, - url_path="attributes", - ) - def attributes(self, request, pk=None): - """ - get: - Get User Attributes - - Get attributes of user - post: - Create Attributes - - Create attribute for user - put: - Update Attribute - - Update attribute of user - delete: - Delete Attribute - - Delete attribute of user - """ - pass - - @swagger_auto_schema( - method="post", - request_body=UserUpdateSerializer, - responses=with_common_response({status.HTTP_200_OK: "OK"}), - ) - @action( - methods=["post"], - detail=True, - url_path="password", - permission_classes=[ - IsAuthenticated, - ], - ) - def password(self, request, pk=None): - """ - post: - Update/Reset Password - - Update/Reset password for user - """ - serializer = UserUpdateSerializer(data=request.data) - if serializer.is_valid(raise_exception=True): - password = serializer.validated_data.get("password") - user = request.user - user.set_password(password) - user.save() - response = UserIDSerializer(data={"id": user.id}) - if response.is_valid(raise_exception=True): - return Response( - response.validated_data, status=status.HTTP_200_OK - ) diff --git a/src/api-engine/api/tasks/__init__.py b/src/api-engine/api/tasks/__init__.py deleted file mode 100644 index 83335d139..000000000 --- a/src/api-engine/api/tasks/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .agent import operate_node diff --git a/src/api-engine/api/tasks/agent.py b/src/api-engine/api/tasks/agent.py deleted file mode 100644 index 6c07cd7d0..000000000 --- a/src/api-engine/api/tasks/agent.py +++ /dev/null @@ -1,123 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -from __future__ import absolute_import, unicode_literals - -import json -import logging -import os - -import docker -from django.core.exceptions import ObjectDoesNotExist - -from api.models import Node, Port -from api_engine.celery import app - -LOG = logging.getLogger(__name__) -ADMIN_TOKEN = os.getenv("ADMIN_TOKEN") - - -class NodeHandler(object): - def __init__(self, node_id=None, action=None, **kwargs): - self._node_id = node_id - self._action = action - - try: - node = Node.objects.get(id=node_id) - ports = Port.objects.filter(node=node) - ports = {str(item.internal): item.external for item in ports} - except ObjectDoesNotExist: - raise ObjectDoesNotExist("Node Does Not Exist") - - self._node = node - self._network_type = node.network_type - self._network_version = node.network_version - self._node_type = node.type - self._agent_image = node.agent.image - self._agent_id = str(node.agent.id) - self._agent_ip = str(node.agent.ip) - self._service_ports = ports - - self._agent_config_file = kwargs.get("agent_config_file") - self._node_detail_url = kwargs.get("node_detail_url") - self._node_file_upload_api = kwargs.get("node_file_upload_api") - self._node_file_url = kwargs.get("node_file_url") - self._fabric_ca_user = kwargs.get("fabric_ca_user", {}) - self._user_patch_url = kwargs.get("user_patch_url") - self._peer_ca_list = json.loads(kwargs.get("peer_ca_list", "[]")) - - self._agent_environment = { - "DEPLOY_NAME": node.name, - "NETWORK_TYPE": node.network_type, - "NETWORK_VERSION": node.network_version, - "NODE_TYPE": node.type, - "NODE_ID": str(node.id), - "AGENT_ID": str(node.agent.id), - "AGENT_IP": str(node.agent.ip), - "AGENT_CONFIG_FILE": self._agent_config_file, - "NODE_DETAIL_URL": self._node_detail_url, - "NODE_UPLOAD_FILE_URL": self._node_file_upload_api, - # Related files to node - "NODE_FILE_URL": self._node_file_url, - # Token for call update node api - "TOKEN": ADMIN_TOKEN, - "OPERATION": self._action, - "FABRIC_CA_USER": json.dumps(self._fabric_ca_user), - "SERVICE_PORTS": json.dumps(self._service_ports), - "USER_PATCH_URL": self._user_patch_url, - } - - if node.ca: - self._agent_environment.update( - { - "FABRIC_CA_CONFIG": json.dumps( - { - "admin_name": node.ca.admin_name, - "admin_password": node.ca.admin_password, - "hosts": ",".join(node.ca.hosts), - } - ) - } - ) - - if node.peer: - peer = node.peer - peer_config = { - "gossip_use_leader_reflection": peer.gossip_use_leader_reflection, - "gossip_org_leader": peer.gossip_org_leader, - "gossip_skip_handshake": peer.gossip_skip_handshake, - "name": peer.name, - "local_msp_id": peer.local_msp_id, - "ca_list": self._peer_ca_list, - } - self._agent_environment.update( - {"FABRIC_PEER_CONFIG": json.dumps(peer_config)} - ) - - def _launch_agent(self): - client = docker.from_env() - client.containers.run( - self._agent_image, - auto_remove=True, - environment=self._agent_environment, - detach=True, - ) - - def run(self): - self._launch_agent() - - -@app.task(bind=True, default_retry_delay=5, max_retires=3, time_limit=360) -def operate_node(self, node_id=None, action=None, **kwargs): - if node_id is None or action is None: - return False - - try: - node_handler = NodeHandler(node_id=node_id, action=action, **kwargs) - except ObjectDoesNotExist: - return False - - try: - node_handler.run() - except Exception as e: - self.retry(exc=e) diff --git a/src/api-engine/api/utils/__init__.py b/src/api-engine/api/utils/__init__.py index 1f4b2081e..5969b0c4c 100644 --- a/src/api-engine/api/utils/__init__.py +++ b/src/api-engine/api/utils/__init__.py @@ -8,8 +8,7 @@ from api.common.enums import ErrorCode from rest_framework import status from rest_framework.exceptions import ErrorDetail -from .common import zip_dir, zip_file -from api.common import ok, err +from api.common import err LOG = logging.getLogger(__name__) diff --git a/src/api-engine/api/utils/common.py b/src/api-engine/api/utils/common.py index bc8c4f25a..ed79dffcd 100644 --- a/src/api-engine/api/utils/common.py +++ b/src/api-engine/api/utils/common.py @@ -13,21 +13,12 @@ import uuid from zipfile import ZipFile from json import loads -from api.config import CELLO_HOME import json import logging LOG = logging.getLogger(__name__) -def make_uuid(): - return str(uuid.uuid4()) - - -def random_name(prefix=""): - return "%s-%s" % (prefix, uuid.uuid4().hex) - - def with_common_response(responses=None): if responses is None: responses = {} @@ -53,282 +44,3 @@ def with_common_response(responses=None): (serializers.FileField, openapi.TYPE_FILE), (serializers.ImageField, openapi.TYPE_FILE), ] - - -def to_form_paras(self): - custom_paras = [] - for field_name, field in self.fields.items(): - type_str = openapi.TYPE_STRING - for field_class, type_format in basic_type_info: - if isinstance(field, field_class): - type_str = type_format - help_text = getattr(field, "help_text") - default = getattr(field, "default", None) - required = getattr(field, "required") - if callable(default): - custom_paras.append( - openapi.Parameter( - field_name, - openapi.IN_FORM, - help_text, - type=type_str, - required=required, - ) - ) - else: - custom_paras.append( - openapi.Parameter( - field_name, - openapi.IN_FORM, - help_text, - type=type_str, - required=required, - default=default, - ) - ) - return custom_paras - - -def any_of(*perm_classes): - """Returns permission class that allows access for - one of permission classes provided in perm_classes""" - - class Or(BasePermission): - def has_permission(*args): - allowed = [p.has_permission(*args) for p in perm_classes] - return reduce(lambda x, y: x or y, allowed) - - return Or - - -def hash_file(file, block_size=65536): - hash_func = hashlib.md5() - for buf in iter(partial(file.read, block_size), b""): - hash_func.update(buf) - - return hash_func.hexdigest() - - -def zip_dir(dirpath, outFullName): - """ - Compress the specified folder - :param dirpath: specified folder - :param outFullName: Save path+xxxx.zip - :return: null - """ - dir_dst = "/" + dirpath.rsplit("/", 1)[1] - zdir = ZipFile(outFullName, "w") - for path, dirnames, filenames in os.walk(dirpath): - fpath = dir_dst + path.replace(dirpath, "") - for filename in filenames: - zdir.write( - os.path.join(path, filename), os.path.join(fpath, filename) - ) - # zip empty folder - for dirname in dirnames: - zdir.write( - os.path.join(path, dirname), os.path.join(fpath, dirname) - ) - zdir.close() - - -def zip_file(dirpath, outFullName): - """ - Compress the specified file - :param dirpath: specified folder of file - :param outFullName: Save path+filename.zip - :return: null - """ - zfile = ZipFile(outFullName, "w") - zfile.write(dirpath, dirpath.rsplit("/", 1)[1]) - zfile.close() - - -def parse_block_file(data): - """ - Parse org config from channel config block. - - :param data: channel config block in json format. - :param org_name: the organization prefix name - :return organization config - """ - config = loads(data) - if config.get("data"): - return ( - config.get("data") - .get("data")[0] - .get("payload") - .get("data") - .get("config") - ) - return {"error": "can't find channel config"} - - -def to_dict(data): - return loads(data) - - -def json_filter(input, output, expression): - """ - Process JSON data using path expression similar to jq - - Args: - input (str): JSON data or file path to JSON - output (str): Path expression like ".data.data[0].payload.data.config" - - Returns: - dict: Processed JSON data - """ - # if json_data is a file path, read the file - if isinstance(input, str): - with open(input, "r", encoding="utf-8") as f: - data = json.load(f) - else: - data = input - - # parse the path expression - path_parts = expression.strip(".").split(".") - result = data - - for part in path_parts: - # handle array index, like data[0] - if "[" in part and "]" in part: - array_name = part.split("[")[0] - index = int(part.split("[")[1].split("]")[0]) - result = result[array_name][index] - else: - result = result[part] - - with open(output, "w", encoding="utf-8") as f: - json.dump(result, f, sort_keys=False, indent=4) - - LOG.info("jq {} {} -> {}".format(expression, input, output)) - - -def json_add_anchor_peer(input, output, anchor_peer_config, org_msp): - """ - Add anchor peer to the organization - - Args: - input (str): JSON data or file path to JSON - output (str): Path expression like ".data.data[0].payload.data.config" - expression (str): Anchor peer data - """ - # if json_data is a file path, read the file - if isinstance(input, str): - with open(input, "r", encoding="utf-8") as f: - data = json.load(f) - else: - data = input - - if "groups" not in data["channel_group"]: - data["channel_group"]["groups"] = {} - if "Application" not in data["channel_group"]["groups"]: - data["channel_group"]["groups"]["Application"] = {"groups": {}} - if org_msp not in data["channel_group"]["groups"]["Application"]["groups"]: - data["channel_group"]["groups"]["Application"]["groups"][org_msp] = { - "values": {} - } - - data["channel_group"]["groups"]["Application"]["groups"][org_msp][ - "values" - ].update(anchor_peer_config) - - with open(output, "w", encoding="utf-8") as f: - json.dump(data, f, sort_keys=False, indent=4) - - LOG.info( - "jq '.channel_group.groups.Application.groups.Org1MSP.values += ... ' {} -> {}".format( - input, output - ) - ) - - -def json_create_envelope(input, output, channel): - """ - Create a config update envelope structure - - Args: - input (str): Path to the config update JSON file - output (str): Path to save the envelope JSON - channel (str): Name of the channel - """ - try: - # Read the config update file - with open(input, "r", encoding="utf-8") as f: - config_update = json.load(f) - - # Create the envelope structure - envelope = { - "payload": { - "header": { - "channel_header": {"channel_id": channel, "type": 2} - }, - "data": {"config_update": config_update}, - } - } - - # Write the envelope to output file - with open(output, "w", encoding="utf-8") as f: - json.dump(envelope, f, sort_keys=False, indent=4) - - LOG.info("echo 'payload ... ' | jq . > {}".format(output)) - - except Exception as e: - LOG.error("Failed to create config update envelope: {}".format(str(e))) - raise - - -def init_env_vars(node, org): - """ - Initialize environment variables for peer channel CLI. - :param node: Node object - :param org: Organization object. - :return env: dict - """ - org_name = org.name - org_domain = org_name.split(".", 1)[1] - dir_certificate = "{}/{}/crypto-config/ordererOrganizations/{}".format( - CELLO_HOME, org_name, org_domain - ) - dir_node = "{}/{}/crypto-config/peerOrganizations".format( - CELLO_HOME, org_name - ) - - envs = {} - - if node.type == "orderer": - envs = { - "CORE_PEER_TLS_ENABLED": "true", - "ORDERER_CA": "{}/orderers/{}/msp/tlscacerts/tlsca.{}-cert.pem".format( - dir_certificate, node.name + "." + org_domain, org_domain - ), - "ORDERER_ADMIN_TLS_SIGN_CERT": "{}/orderers/{}/tls/server.crt".format( - dir_certificate, node.name + "." + org_domain - ), - "ORDERER_ADMIN_TLS_PRIVATE_KEY": "{}/orderers/{}/tls/server.key".format( - dir_certificate, node.name + "." + org_domain - ), - } - - elif node.type == "peer": - envs = { - "CORE_PEER_TLS_ENABLED": "true", - "CORE_PEER_LOCALMSPID": "{}MSP".format( - org_name.split(".")[0].capitalize() - ), - "CORE_PEER_TLS_ROOTCERT_FILE": "{}/{}/peers/{}/tls/ca.crt".format( - dir_node, org_name, node.name + "." + org_name - ), - "CORE_PEER_MSPCONFIGPATH": "{}/{}/users/Admin@{}/msp".format( - dir_node, org_name, org_name - ), - "CORE_PEER_ADDRESS": "{}:{}".format( - node.name + "." + org_name, str(7051) - ), - "FABRIC_CFG_PATH": "{}/{}/peers/{}/".format( - dir_node, org_name, node.name + "." + org_name - ), - } - - return envs diff --git a/src/api-engine/api/utils/jwt.py b/src/api-engine/api/utils/jwt.py deleted file mode 100644 index b945e4104..000000000 --- a/src/api-engine/api/utils/jwt.py +++ /dev/null @@ -1,39 +0,0 @@ -import logging - -from rest_framework import serializers - -from api.models import UserProfile, Organization -from api.common import ok - -LOG = logging.getLogger(__name__) - - -class OrgSerializer(serializers.ModelSerializer): - class Meta: - model = Organization - fields = ("id", "name") - - -class UserSerializer(serializers.ModelSerializer): - organization = OrgSerializer(allow_null=True) - - class Meta: - model = UserProfile - fields = ("id", "username", "role", "email", "organization") - - -def jwt_response_payload_handler(token, user=None, request=None): - """ - Customize response for json web token - - :param token: the token value - :param user: user object for UserProfile - :param request: request context - :return: UserSerializer data - """ - return ok( - { - "token": token, - "user": UserSerializer(user, context={"request": request}).data, - } - ) diff --git a/src/api-engine/api/utils/node_config.py b/src/api-engine/api/utils/node_config.py deleted file mode 100644 index 100295e4c..000000000 --- a/src/api-engine/api/utils/node_config.py +++ /dev/null @@ -1,146 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -from string import Template -import os -import yaml -from api.config import CELLO_HOME - - -class NodeConfig: - """Class represents crypto-config yaml.""" - - def __init__( - self, - org, - peer_file="core.yaml", - orderer_file="orderer.yaml", - ca_file="", - ): - """ - init node config - - :param org: organization name - :param peer: peer profile template - :param ca: ca profile template - :param orderer: orderer profile template - :return: none - :rtype: xxx - """ - self.org = org - self.peer_file = peer_file - self.orderer_file = orderer_file - self.ca_file = ca_file - - @staticmethod - def _render(src, dst, **kw): - """ - Generate configuration file based on parameters - - :param kw: Node configuration parameters,Use the underline interval key。 - e.g., - peer listenAddress, kwargs["peer_listenAddress"]="0.0.0.0:7051" - chaincode builder, kwargs["chaincode_builder"]="hyperledger/fabric-ccenv:1.4.2" - :param src: Node profile template - :param dst: Node profile - :return: none - :rtype: none - """ - try: - with open(src, "r+") as f: - cfg = yaml.load(f, Loader=yaml.FullLoader) - - for key, value in kw.items(): - keys = key.split("_") - # switch = {2: cfg[keys[0]][keys[1]], - # 3: cfg[keys[0]][keys[1]][keys[2]], - # 4: cfg[keys[0]][keys[1]][keys[2]][keys[3]], - # 5: cfg[keys[0]][keys[1]][keys[2]][keys[3]][keys[4]]} - - if len(keys) == 2: - cfg[keys[0]][keys[1]] = value - elif len(keys) == 3: - cfg[keys[0]][keys[1]][keys[2]] = value - elif len(keys) == 4: - cfg[keys[0]][keys[1]][keys[2]][keys[3]] = value - elif len(keys) == 5: - cfg[keys[0]][keys[1]][keys[2]][keys[3]][keys[4]] = value - - with open(dst, "w+") as f: - yaml.dump(cfg, f) - except Exception as e: - raise Exception("Configuration File Not Generated") from e - - def __from_dst(self, node, node_type): - """ - Location of the new profile - - :param node: node name - :param node_type: node type (peer, orderer, ca) - :return: dst - :rtype: string - """ - if node_type == "peer": - dst = "{}/{}/crypto-config/peerOrganizations/{}/peers/{}.{}/{}".format( - CELLO_HOME, self.org, self.org, node, self.org, self.peer_file - ) - elif node_type == "orderer": - dst = "{}/{}/crypto-config/ordererOrganizations/{}/orderers/{}.{}/{}".format( - CELLO_HOME, - self.org, - self.org.split(".", 1)[1], - node, - self.org.split(".", 1)[1], - self.orderer_file, - ) - else: - dst = "" - return dst - - def peer(self, node, **kwargs): - """ - Location of the node profile - - :param node: peer name - :param kwargs: Node configuration parameters,Use the underline interval key。 - e.g., - peer listenAddress, kwargs["peer_listenAddress"]="0.0.0.0:7051" - chaincode builder, kwargs["chaincode_builder"]="hyperledger/fabric-ccenv:1.4.2" - :return: none - :rtype: none - """ - src = "/opt/node/core.yaml.bak" - dst = self.__from_dst(node, "peer") - self._render(src, dst, **kwargs) - - def orderer(self, node, **kwargs): - """ - Location of the orderer profile - - :param node: orderer name - :param kwargs: Node configuration parameters,Use the underline interval key。 - e.g., - peer listenAddress, kwargs["peer_listenAddress"]="0.0.0.0:7051" - chaincode builder, kwargs["chaincode_builder"]="hyperledger/fabric-ccenv:1.4.2" - :return: none - :rtype: none - """ - src = "/opt/node/orderer.yaml.bak" - dst = self.__from_dst(node, "orderer") - self._render(src, dst, **kwargs) - - def ca(self, node, **kwargs): - """ - Location of the orderer profile - - :param node: ca name - :param kwargs: Node configuration parameters,Use the underline interval key。 - e.g., - peer listenAddress, kwargs["peer_listenAddress"]="0.0.0.0:7051" - chaincode builder, kwargs["chaincode_builder"]="hyperledger/fabric-ccenv:1.4.2" - :return: none - :rtype: none - """ - src = self.ca_file - dst = self.__from_dst(node, "ca") - self._render(src, dst, **kwargs) diff --git a/src/api-engine/api/utils/port_picker.py b/src/api-engine/api/utils/port_picker.py deleted file mode 100644 index 4d422c335..000000000 --- a/src/api-engine/api/utils/port_picker.py +++ /dev/null @@ -1,130 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -import logging -import socket -import os -from random import sample -from django.core.exceptions import ObjectDoesNotExist -from api.models import Port, Node, Agent - -CLUSTER_PORT_START = int(os.getenv("CLUSTER_PORT_START", 7050)) -MAX_RETRY = 100 - -LOG = logging.getLogger(__name__) - - -def port_is_free(ip=None, port=0): - s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - s.settimeout(1) - try: - s.connect((ip, int(port))) - s.shutdown(socket.SHUT_RDWR) - return False - except Exception: - return True - finally: - s.close() - - -def port_picker(agent_id=None, request_count=1, exclude_ports=None): - if exclude_ports is None: - exclude_ports = [] - - used_ports = Port.objects.values_list("external").filter( - node__agent__id=agent_id - ) - exclude_ports += [port[0] for port in used_ports] - - return sample( - [ - i - for i in range(CLUSTER_PORT_START, 65535) - if i not in exclude_ports - ], - request_count, - ) - - -def find_available_ports( - ip=None, - node_id=None, - agent_id=None, - request_count=1, - exclude_ports=None, - retry=MAX_RETRY, -): - if node_id is None or agent_id is None or retry == 0: - return [] - all_port_is_free = True - - if exclude_ports is None: - exclude_ports = [] - ports = port_picker(agent_id, request_count, exclude_ports) - - for port in ports: - if not port_is_free(ip, port): - exclude_ports.append(port) - all_port_is_free = False - - if not all_port_is_free: - retry -= 1 - return find_available_ports( - ip, node_id, agent_id, request_count, exclude_ports, retry - ) - # Removed these lines of code bc they can produce port objects with 0 internal port number. - # try: - # node = Node.objects.get(id=node_id) - # except ObjectDoesNotExist: - # return [] - # else: - # port_objects = [Port(external=port, node=node) for port in ports] - # Port.objects.bulk_create(port_objects) - - return ports - - -def set_ports_mapping(node_id=None, mapping=None, new=False): - if mapping is None: - mapping = [] - - if new: - try: - node = Node.objects.get(id=node_id) - except ObjectDoesNotExist: - LOG.error("Node not found") - else: - port_objects = [ - Port( - external=port.get("external"), - internal=port.get("internal"), - node=node, - ) - for port in mapping - ] - Port.objects.bulk_create(port_objects) - else: - for port in mapping: - Port.objects.filter( - node__id=node_id, external=port.get("external") - ).update(internal=port.get("internal")) - - -def get_available_ports( - agent_id=None, - request_count=1, -): - - agent = Agent.objects.get(id=agent_id).free_ports - - used_ports = agent.free_ports - - ports = sample( - [i for i in range(CLUSTER_PORT_START, 65535) if i not in used_ports], - request_count, - ) - - agent.free_ports = used_ports.append(ports) - agent.save() - - return ports diff --git a/src/api-engine/api/views.py b/src/api-engine/api/views.py deleted file mode 100644 index 23752175f..000000000 --- a/src/api-engine/api/views.py +++ /dev/null @@ -1,6 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -from django.shortcuts import render - -# Create your views here. diff --git a/src/api-engine/api_engine/__init__.py b/src/api-engine/api_engine/__init__.py index acb5342eb..e69de29bb 100644 --- a/src/api-engine/api_engine/__init__.py +++ b/src/api-engine/api_engine/__init__.py @@ -1,10 +0,0 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# -from __future__ import absolute_import, unicode_literals - -# This will make sure the app is always imported when -# Django starts so that shared_task will use this app. -from .celery import app as celery_app - -__all__ = ("celery_app",) diff --git a/src/api-engine/api_engine/celery.py b/src/api-engine/api_engine/celery.py deleted file mode 100644 index cf5bbdb3f..000000000 --- a/src/api-engine/api_engine/celery.py +++ /dev/null @@ -1,17 +0,0 @@ -from __future__ import absolute_import, unicode_literals -import os -from celery import Celery - -# set the default Django settings module for the 'celery' program. -os.environ.setdefault("DJANGO_SETTINGS_MODULE", "api_engine.settings") - -app = Celery("api_engine") - -# Using a string here means the worker doesn't have to serialize -# the configuration object to child processes. -# - namespace='CELERY' means all celery-related configuration keys -# should have a `CELERY_` prefix. -app.config_from_object("django.conf:settings", namespace="CELERY") - -# Load task modules from all registered Django app configs. -app.autodiscover_tasks() diff --git a/src/api-engine/api_engine/settings.py.example b/src/api-engine/api_engine/settings.py similarity index 79% rename from src/api-engine/api_engine/settings.py.example rename to src/api-engine/api_engine/settings.py index c25f700ac..c64faf9dc 100644 --- a/src/api-engine/api_engine/settings.py.example +++ b/src/api-engine/api_engine/settings.py @@ -21,10 +21,10 @@ # See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! -SECRET_KEY = "=5-oa588z5-5ow4wd8+=xoj%uy_rd6a65edkfvn3&zw+1=qhwd" +SECRET_KEY = os.getenv('SECRET_KEY', 'change_me') # SECURITY WARNING: don't run with debug turned on in production! -DEBUG = ${DEBUG} +DEBUG = os.getenv('DEBUG', 'True').upper() == 'TRUE' ALLOWED_HOSTS = ["*"] @@ -50,6 +50,11 @@ "rest_auth.registration", "corsheaders", "rest_framework_simplejwt", + "user.apps.UserConfig", + "organization.apps.OrganizationConfig", + "node.apps.NodeConfig", + "channel.apps.ChannelConfig", + "chaincode.apps.ChaincodeConfig" ] MIDDLEWARE = [ @@ -91,11 +96,11 @@ DATABASES = { "default": { "ENGINE": "django.db.backends.postgresql", - "NAME": "$DB_NAME", - "USER": "$DB_USER", - "PASSWORD": "$DB_PASSWORD", - "HOST": "$DB_HOST", - "PORT": "$DB_PORT", + "NAME": os.getenv("DB_NAME", "postgres"), + "USER": os.getenv("DB_USER", "postgres"), + "PASSWORD": os.getenv("DB_PASSWORD", "postgres"), + "HOST": os.getenv("DB_HOST", "localhost"), + "PORT": os.getenv("DB_PORT", "5432"), } } @@ -131,10 +136,9 @@ # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/2.1/howto/static-files/ - -WEBROOT = "$WEBROOT" -STATIC_URL = "$WEBROOT/static/" -STATIC_ROOT = "/var/www/server/static" +WEBROOT = os.path.join(os.getenv("WEB_PREFIX", ""), "api", os.getenv("API_VERSION", "v1")) + "/" +STATIC_URL = os.path.join(WEBROOT, "static/") +STATIC_ROOT = os.path.join(BASE_DIR, STATIC_URL) REST_FRAMEWORK = { "DEFAULT_VERSIONING_CLASS": "rest_framework.versioning.AcceptHeaderVersioning", @@ -164,7 +168,7 @@ REST_USE_JWT = True -AUTH_USER_MODEL = 'api.UserProfile' +AUTH_USER_MODEL = 'user.UserProfile' SWAGGER_SETTINGS = { # For validating your swagger schema(setting None to not validate) @@ -205,15 +209,19 @@ "propagate": False, }, "api": {"handlers": ["console"], "level": "DEBUG", "propagate": False}, + "auth": {"handlers": ["console"], "level": "DEBUG", "propagate": False}, + "user": {"handlers": ["console"], "level": "DEBUG", "propagate": False}, + "organization": {"handlers": ["console"], "level": "DEBUG", "propagate": False}, + "node": {"handlers": ["console"], "level": "DEBUG", "propagate": False}, + "channel": {"handlers": ["console"], "level": "DEBUG", "propagate": False}, + "chaincode": {"handlers": ["console"], "level": "DEBUG", "propagate": False}, }, } MAX_AGENT_CAPACITY = 100 -MEDIA_ROOT = "/var/www/media" -MEDIA_URL = "$WEBROOT/media/" - -CELERY_BROKER_URL = "$CELERY_BROKER_URL" +MEDIA_URL = os.path.join(WEBROOT, "media/") +MEDIA_ROOT = os.path.join(BASE_DIR, MEDIA_URL) SIMPLE_JWT = { "ACCESS_TOKEN_LIFETIME": timedelta(hours=1), @@ -242,3 +250,12 @@ } DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField' + +CELLO_HOME = os.path.join(BASE_DIR, "cello") +FABRIC_TOOL = os.path.join(CELLO_HOME, "bin") +FABRIC_CFG = os.path.join(CELLO_HOME, "node") + +FABRIC_PEER_CFG = os.path.join(FABRIC_CFG, "core.yaml.bak") +FABRIC_ORDERER_CFG = os.path.join(FABRIC_CFG, "orderer.yaml.bak") + +FABRIC_VERSION = "2.5.13" diff --git a/src/api-engine/api_engine/urls.py b/src/api-engine/api_engine/urls.py index f05bc4991..48b64f882 100644 --- a/src/api-engine/api_engine/urls.py +++ b/src/api-engine/api_engine/urls.py @@ -16,8 +16,6 @@ 1. Import the include() function: from django.urls import include, path 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) """ -import os - from django.conf import settings from django.urls import path, include from rest_framework import permissions @@ -28,27 +26,13 @@ TokenRefreshView, ) from django.conf.urls.static import static - -from api.routes.network.views import NetworkViewSet -from api.routes.agent.views import AgentViewSet -from api.routes.node.views import NodeViewSet -from api.routes.organization.views import OrganizationViewSet -from api.routes.user.views import UserViewSet -from api.routes.file.views import FileViewSet -from api.routes.general.views import RegisterViewSet -from api.routes.channel.views import ChannelViewSet -from api.routes.chaincode.views import ChainCodeViewSet -from api.routes.general.views import ( - CelloTokenObtainPairView, - CelloTokenVerifyView, -) - - -DEBUG = getattr(settings, "DEBUG") -API_VERSION = os.getenv("API_VERSION") -WEBROOT = os.getenv("WEBROOT") -# WEBROOT = "/".join(WEBROOT.split("/")[1:]) + "/" -WEBROOT = "api/v1/" +from api_engine.settings import DEBUG, WEBROOT +from auth.views import RegisterViewSet, CelloTokenObtainPairView, CelloTokenVerifyView +from chaincode.views import ChaincodeViewSet +from channel.views import ChannelViewSet +from node.views import NodeViewSet +from organization.views import OrganizationViewSet +from user.views import UserViewSet swagger_info = openapi.Info( title="Cello API Engine Service", @@ -58,38 +42,34 @@ """, ) -SchemaView = get_schema_view( +schema_view = get_schema_view( validators=["ssv", "flex"], public=True, - permission_classes=(permissions.AllowAny,), + permission_classes=[permissions.AllowAny], ) # define and register routers of api router = DefaultRouter(trailing_slash=False) -router.register("networks", NetworkViewSet, basename="network") -router.register("agents", AgentViewSet, basename="agent") -router.register("nodes", NodeViewSet, basename="node") router.register("organizations", OrganizationViewSet, basename="organization") router.register("users", UserViewSet, basename="user") -router.register("files", FileViewSet, basename="file") +router.register("nodes", NodeViewSet, basename="node") router.register("register", RegisterViewSet, basename="register") router.register("channels", ChannelViewSet, basename="channel") -router.register("chaincodes", ChainCodeViewSet, basename="chaincode") - -urlpatterns = router.urls +router.register("chaincodes", ChaincodeViewSet, basename="chaincode") -urlpatterns += [ +urlpatterns = [path(WEBROOT, include(router.urls + [ path( "login", CelloTokenObtainPairView.as_view(), name="token_obtain_pair" ), - path("login/refresh/", TokenRefreshView.as_view(), name="token_refresh"), + path("login/refresh", TokenRefreshView.as_view(), name="token_refresh"), path("token-verify", CelloTokenVerifyView.as_view(), name="token_verify"), - path("docs/", SchemaView.with_ui("swagger", cache_timeout=0), name="docs"), - path("redoc/", SchemaView.with_ui("redoc", cache_timeout=0), name="redoc"), -] + path("docs", schema_view.with_ui("swagger", cache_timeout=0), name="docs"), + path("redoc", schema_view.with_ui("redoc", cache_timeout=0), name="redoc"), +]))] if DEBUG: - urlpatterns = [path(WEBROOT, include(urlpatterns))] urlpatterns += static( + settings.STATIC_URL, document_root=settings.STATIC_ROOT + ) + static( settings.MEDIA_URL, document_root=settings.MEDIA_ROOT ) diff --git a/src/agent/k8s-rest-agent/src/api/__init__.py b/src/api-engine/auth/__init__.py similarity index 100% rename from src/agent/k8s-rest-agent/src/api/__init__.py rename to src/api-engine/auth/__init__.py diff --git a/src/agent/k8s-rest-agent/src/api/admin.py b/src/api-engine/auth/admin.py similarity index 100% rename from src/agent/k8s-rest-agent/src/api/admin.py rename to src/api-engine/auth/admin.py diff --git a/src/api-engine/auth/apps.py b/src/api-engine/auth/apps.py new file mode 100644 index 000000000..836fe02b5 --- /dev/null +++ b/src/api-engine/auth/apps.py @@ -0,0 +1,6 @@ +from django.apps import AppConfig + + +class AuthConfig(AppConfig): + default_auto_field = 'django.db.models.BigAutoField' + name = 'auth' diff --git a/src/agent/k8s-rest-agent/src/api/management/__init__.py b/src/api-engine/auth/migrations/__init__.py similarity index 100% rename from src/agent/k8s-rest-agent/src/api/management/__init__.py rename to src/api-engine/auth/migrations/__init__.py diff --git a/src/api-engine/auth/models.py b/src/api-engine/auth/models.py new file mode 100644 index 000000000..71a836239 --- /dev/null +++ b/src/api-engine/auth/models.py @@ -0,0 +1,3 @@ +from django.db import models + +# Create your models here. diff --git a/src/api-engine/auth/serializers.py b/src/api-engine/auth/serializers.py new file mode 100644 index 000000000..5e8832972 --- /dev/null +++ b/src/api-engine/auth/serializers.py @@ -0,0 +1,69 @@ +from typing import Dict, Any, Optional + +from rest_framework import serializers + +from common.validators import validate_host +from api.lib.pki import CryptoConfig, CryptoGen +from organization.models import Organization +from user.models import UserProfile +from user.serializers import UserInfo + + +class RegisterBody(serializers.Serializer): + org_name = serializers.CharField(help_text="User Organization Name") + email = serializers.EmailField(help_text="User Email") + password = serializers.CharField(help_text="User Password") + + class Meta: + fields = ("org_name", "email", "password") + extra_kwargs = { + "org_name": {"required": True}, + "email": {"required": True}, + "password": {"required": True}, + } + + @staticmethod + def validate_org_name(org_name: str) -> str: + if Organization.objects.filter(name=org_name).exists(): + raise serializers.ValidationError("Organization already exists!") + validate_host(org_name) + return org_name + + def create(self, validated_data: Dict[str, Any]) -> Optional[Organization]: + org_name = validated_data.get("org_name") + + CryptoConfig(org_name).create() + CryptoGen(org_name).generate() + organization = Organization(name=org_name) + organization.save() + + user = UserProfile( + email=validated_data["email"], + username=validated_data["email"], + role=UserProfile.Role.ADMIN, + organization=organization, + ) + + password = validated_data.get("password") + user.set_password(password) + user.save() + return organization + + +class RegisterResponse(serializers.Serializer): + id = serializers.UUIDField(help_text="Organization ID") + msg = serializers.CharField(help_text="Organization Name") + + +class LoginBody(serializers.Serializer): + email = serializers.CharField(help_text="User Email") + password = serializers.CharField(help_text="User Password") + + +class LoginSuccessBody(serializers.Serializer): + token = serializers.CharField(help_text="access token") + user = UserInfo() + + +class TokenVerifyRequest(serializers.Serializer): + token = serializers.CharField(help_text="access token") diff --git a/src/agent/k8s-rest-agent/src/api/tests.py b/src/api-engine/auth/tests.py similarity index 100% rename from src/agent/k8s-rest-agent/src/api/tests.py rename to src/api-engine/auth/tests.py diff --git a/src/api-engine/auth/views.py b/src/api-engine/auth/views.py new file mode 100644 index 000000000..c6ee725b9 --- /dev/null +++ b/src/api-engine/auth/views.py @@ -0,0 +1,105 @@ +import logging +from typing import Union + +from django.contrib.auth import authenticate +from drf_yasg.utils import swagger_auto_schema +from rest_framework import viewsets, status +from rest_framework.request import Request +from rest_framework.response import Response +from rest_framework_simplejwt.exceptions import TokenError +from rest_framework_simplejwt.tokens import AccessToken +from rest_framework_simplejwt.views import TokenObtainPairView, TokenVerifyView + +from api.common import err, ok +from api.common.response import make_response_serializer +from api.utils.common import with_common_response +from auth.serializers import RegisterBody, RegisterResponse, LoginBody, LoginSuccessBody, TokenVerifyRequest +from user.models import UserProfile +from user.serializers import UserInfo + +LOG = logging.getLogger(__name__) + + +class RegisterViewSet(viewsets.ViewSet): + @swagger_auto_schema( + operation_summary="Create an organization and Register its first administrator", + request_body=RegisterBody, + responses=with_common_response( + {status.HTTP_201_CREATED: make_response_serializer(RegisterResponse)} + ), + ) + def create(self, request: Request) -> Response: + serializer = RegisterBody(data=request.data) + serializer.is_valid(raise_exception=True) + + organization = serializer.save() + response = RegisterResponse(data={ + "id": organization.id, + "msg": organization.name + }) + response.is_valid(raise_exception=True) + return Response( + data=ok(response.data), + status=status.HTTP_201_CREATED, + ) + + +class CelloTokenObtainPairView(TokenObtainPairView): + @swagger_auto_schema( + operation_summary="User Login", + request_body=LoginBody, + responses=with_common_response( + {status.HTTP_200_OK: make_response_serializer(LoginSuccessBody)} + ), + ) + def post(self, request: Request, *args, **kwargs): + serializer = LoginBody(data=request.data) + serializer.is_valid(raise_exception=True) + user = authenticate( + request, + username=serializer.validated_data["email"], + password=serializer.validated_data["password"], + ) + if user is None: + return Response( + status=status.HTTP_401_UNAUTHORIZED, + ) + + return Response( + data=ok(LoginSuccessBody({ + "token": str(AccessToken.for_user(user)), + "user": UserInfo(user).data, + }).data), + status=status.HTTP_200_OK, + ) + + +class CelloTokenVerifyView(TokenVerifyView): + @swagger_auto_schema( + operation_summary="Verify User Token", + request_body=TokenVerifyRequest, + responses=with_common_response( + {status.HTTP_200_OK: LoginSuccessBody} + ), + ) + def post(self, request, *args, **kwargs): + serializer = TokenVerifyRequest(data=request.data) + serializer.is_valid(raise_exception=True) + try: + access_token = AccessToken( + token=serializer.validated_data["token"], + ) + user = UserProfile.objects.get(pk=access_token["user_id"]) + except (TokenError, UserProfile.DoesNotExist): + LOG.exception("invalid token error") + return Response( + data=err(msg="invalid token"), + status=status.HTTP_400_BAD_REQUEST) + + return Response( + data=ok(LoginSuccessBody({ + "token": str(access_token.token), + "user": UserInfo(user).data, + }).data), + status=status.HTTP_200_OK, + ) diff --git a/template/node/core.yaml.bak b/src/api-engine/cello/node/core.yaml.bak similarity index 99% rename from template/node/core.yaml.bak rename to src/api-engine/cello/node/core.yaml.bak index 7809b001d..059379357 100644 --- a/template/node/core.yaml.bak +++ b/src/api-engine/cello/node/core.yaml.bak @@ -797,4 +797,4 @@ metrics: writeInterval: 10s # prefix is prepended to all emitted statsd metrics - prefix: + prefix: \ No newline at end of file diff --git a/template/node/orderer.yaml.bak b/src/api-engine/cello/node/orderer.yaml.bak similarity index 100% rename from template/node/orderer.yaml.bak rename to src/api-engine/cello/node/orderer.yaml.bak diff --git a/src/agent/k8s-rest-agent/src/api/management/commands/__init__.py b/src/api-engine/chaincode/__init__.py similarity index 100% rename from src/agent/k8s-rest-agent/src/api/management/commands/__init__.py rename to src/api-engine/chaincode/__init__.py diff --git a/src/api-engine/api/admin.py b/src/api-engine/chaincode/admin.py similarity index 60% rename from src/api-engine/api/admin.py rename to src/api-engine/chaincode/admin.py index 51fb5d189..8c38f3f3d 100644 --- a/src/api-engine/api/admin.py +++ b/src/api-engine/chaincode/admin.py @@ -1,6 +1,3 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# from django.contrib import admin # Register your models here. diff --git a/src/api-engine/chaincode/application-gateway/go.mod b/src/api-engine/chaincode/application-gateway/go.mod new file mode 100644 index 000000000..448402d60 --- /dev/null +++ b/src/api-engine/chaincode/application-gateway/go.mod @@ -0,0 +1,18 @@ +module assetTransfer + +go 1.24.0 + +require ( + github.com/hyperledger/fabric-gateway v1.10.0 + google.golang.org/grpc v1.76.0 +) + +require ( + github.com/hyperledger/fabric-protos-go-apiv2 v0.3.7 // indirect + github.com/miekg/pkcs11 v1.1.1 // indirect + golang.org/x/net v0.42.0 // indirect + golang.org/x/sys v0.35.0 // indirect + golang.org/x/text v0.28.0 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20250804133106-a7a43d27e69b // indirect + google.golang.org/protobuf v1.36.10 // indirect +) diff --git a/src/api-engine/chaincode/application-gateway/go.sum b/src/api-engine/chaincode/application-gateway/go.sum new file mode 100644 index 000000000..abb099b63 --- /dev/null +++ b/src/api-engine/chaincode/application-gateway/go.sum @@ -0,0 +1,52 @@ +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= +github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/hyperledger/fabric-gateway v1.10.0 h1:x5z/pofdVYIqgMo9QWejubfAZYCSt94WdUPj4Wipdeg= +github.com/hyperledger/fabric-gateway v1.10.0/go.mod h1:fSFS1vQkPZq6inNvzsnI/7PCaKSU+UZOZ6uAuau0Yq0= +github.com/hyperledger/fabric-protos-go-apiv2 v0.3.7 h1:sQ5qv8vQQfwewa1JlCiSCC8dLElmaU2/frLolpgibEY= +github.com/hyperledger/fabric-protos-go-apiv2 v0.3.7/go.mod h1:bJnwzfv03oZQeCc863pdGTDgf5nmCy6Za3RAE7d2XsQ= +github.com/miekg/pkcs11 v1.1.1 h1:Ugu9pdy6vAYku5DEpVWVFPYnzV+bxB+iRdbuFSu7TvU= +github.com/miekg/pkcs11 v1.1.1/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= +go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= +go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= +go.opentelemetry.io/otel v1.37.0 h1:9zhNfelUvx0KBfu/gb+ZgeAfAgtWrfHJZcAqFC228wQ= +go.opentelemetry.io/otel v1.37.0/go.mod h1:ehE/umFRLnuLa/vSccNq9oS1ErUlkkK71gMcN34UG8I= +go.opentelemetry.io/otel/metric v1.37.0 h1:mvwbQS5m0tbmqML4NqK+e3aDiO02vsf/WgbsdpcPoZE= +go.opentelemetry.io/otel/metric v1.37.0/go.mod h1:04wGrZurHYKOc+RKeye86GwKiTb9FKm1WHtO+4EVr2E= +go.opentelemetry.io/otel/sdk v1.37.0 h1:ItB0QUqnjesGRvNcmAcU0LyvkVyGJ2xftD29bWdDvKI= +go.opentelemetry.io/otel/sdk v1.37.0/go.mod h1:VredYzxUvuo2q3WRcDnKDjbdvmO0sCzOvVAiY+yUkAg= +go.opentelemetry.io/otel/sdk/metric v1.37.0 h1:90lI228XrB9jCMuSdA0673aubgRobVZFhbjxHHspCPc= +go.opentelemetry.io/otel/sdk/metric v1.37.0/go.mod h1:cNen4ZWfiD37l5NhS+Keb5RXVWZWpRE+9WyVCpbo5ps= +go.opentelemetry.io/otel/trace v1.37.0 h1:HLdcFNbRQBE2imdSEgm/kwqmQj1Or1l/7bW6mxVK7z4= +go.opentelemetry.io/otel/trace v1.37.0/go.mod h1:TlgrlQ+PtQO5XFerSPUYG0JSgGyryXewPGyayAWSBS0= +golang.org/x/net v0.42.0 h1:jzkYrhi3YQWD6MLBJcsklgQsoAcw89EcZbJw8Z614hs= +golang.org/x/net v0.42.0/go.mod h1:FF1RA5d3u7nAYA4z2TkclSCKh68eSXtiFwcWQpPXdt8= +golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI= +golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng= +golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU= +gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= +gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250804133106-a7a43d27e69b h1:zPKJod4w6F1+nRGDI9ubnXYhU9NSWoFAijkHkUXeTK8= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250804133106-a7a43d27e69b/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= +google.golang.org/grpc v1.76.0 h1:UnVkv1+uMLYXoIz6o7chp59WfQUYA2ex/BXQ9rHZu7A= +google.golang.org/grpc v1.76.0/go.mod h1:Ju12QI8M6iQJtbcsV+awF5a4hfJMLi4X0JLo94ULZ6c= +google.golang.org/protobuf v1.36.10 h1:AYd7cD/uASjIL6Q9LiTjz8JLcrh/88q5UObnmY3aOOE= +google.golang.org/protobuf v1.36.10/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/src/api-engine/chaincode/application-gateway/main.go b/src/api-engine/chaincode/application-gateway/main.go new file mode 100644 index 000000000..076e77a75 --- /dev/null +++ b/src/api-engine/chaincode/application-gateway/main.go @@ -0,0 +1,219 @@ +package main + +import ( + "bytes" + "crypto/x509" + "encoding/json" + "fmt" + "os" + "path" + "path/filepath" + "strings" + "time" + + "github.com/hyperledger/fabric-gateway/pkg/client" + "github.com/hyperledger/fabric-gateway/pkg/hash" + "github.com/hyperledger/fabric-gateway/pkg/identity" + "google.golang.org/grpc" + "google.golang.org/grpc/credentials" +) + +var ( + mspID string + certPath string + keyPath string + tlsCertPath string + peerEndpoint string + gatewayPeer string +) + +func main() { + /* Check and get the environment variables */ + env := checkEnvVars() + mspID = env["CORE_PEER_LOCALMSPID"] + mspConfigPath := env["CORE_PEER_MSPCONFIGPATH"] + address := env["CORE_PEER_ADDRESS"] + chaincodeName := env["CHAINCODE_NAME"] + channelName := env["CHANNEL_NAME"] + + certPath = filepath.Join(mspConfigPath, "signcerts") + keyPath = filepath.Join(mspConfigPath, "keystore") + tlsCertPath = filepath.Join(mspConfigPath, "../tls/ca.crt") + + peerEndpoint = "dns:///" + address + gatewayPeer = strings.Split(address, ":")[0] + + /* Check if the action and function are given */ + if len(os.Args) < 3 { + panic(fmt.Sprintf("Error: expected at lease 2 arguments but only %s is given.", len(os.Args) - 1)) + } + + /* submit/evaluate */ + action := strings.ToLower(os.Args[1]) + /* chaincode function name */ + function := os.Args[2] + /* other arguments */ + args := os.Args[3:] + + // create a Gateway client + clientConnection := newGrpcConnection() + defer clientConnection.Close() + + id := newIdentity() + sign := newSign() + + gw, err := client.Connect( + id, + client.WithSign(sign), + client.WithHash(hash.SHA256), + client.WithClientConnection(clientConnection), + client.WithEvaluateTimeout(5*time.Second), + client.WithEndorseTimeout(15*time.Second), + client.WithSubmitTimeout(5*time.Second), + client.WithCommitStatusTimeout(1*time.Minute), + ) + if err != nil { + panic(err) + } + defer gw.Close() + + // get chaincode + network := gw.GetNetwork(channelName) + contract := network.GetContract(chaincodeName) + + // execute + switch action { + case "submit": + result, err := contract.SubmitTransaction(function, args...) + if err != nil { + panic(fmt.Errorf("Error: %w", err)) + } + printJSON(result) + + case "evaluate": + result, err := contract.EvaluateTransaction(function, args...) + if err != nil { + panic(fmt.Errorf("Error: %w", err)) + } + printJSON(result) + default: + panic(fmt.Sprintf("Error: invalid action %s (should be 'submit' or 'evaluate').", action)) + } +} + +func checkEnvVars() map[string]string { + requiredVars := []string{ + "CORE_PEER_LOCALMSPID", + "CORE_PEER_MSPCONFIGPATH", + "CORE_PEER_ADDRESS", + "CHAINCODE_NAME", + "CHANNEL_NAME", + } + + missing := []string{} + values := make(map[string]string) + + for _, key := range requiredVars { + val := os.Getenv(key) + if val == "" { + missing = append(missing, key) + } else { + values[key] = val + } + } + + if len(missing) > 0 { + if len(missing) == 1 { + panic(fmt.Sprintf("Missing an environment variable: %s", missing[0])) + } + panic(fmt.Sprintf("Missing environment variables: %s", strings.Join(missing, ", "))) + } + + return values +} + +func newGrpcConnection() *grpc.ClientConn { + certificatePEM, err := os.ReadFile(tlsCertPath) + if err != nil { + panic(fmt.Errorf("failed to read TLS certificate file: %w", err)) + } + + certificate, err := identity.CertificateFromPEM(certificatePEM) + if err != nil { + panic(err) + } + + certPool := x509.NewCertPool() + certPool.AddCert(certificate) + transportCredentials := credentials.NewClientTLSFromCert(certPool, gatewayPeer) + + connection, err := grpc.NewClient(peerEndpoint, grpc.WithTransportCredentials(transportCredentials)) + if err != nil { + panic(fmt.Errorf("failed to create gRPC connection: %w", err)) + } + + return connection +} + +func newIdentity() *identity.X509Identity { + certificatePEM, err := readFirstFile(certPath) + if err != nil { + panic(fmt.Errorf("failed to read certificate file: %w", err)) + } + + certificate, err := identity.CertificateFromPEM(certificatePEM) + if err != nil { + panic(err) + } + + id, err := identity.NewX509Identity(mspID, certificate) + if err != nil { + panic(err) + } + + return id +} + +func newSign() identity.Sign { + privateKeyPEM, err := readFirstFile(keyPath) + if err != nil { + panic(fmt.Errorf("failed to read private key file: %w", err)) + } + + privateKey, err := identity.PrivateKeyFromPEM(privateKeyPEM) + if err != nil { + panic(err) + } + + sign, err := identity.NewPrivateKeySign(privateKey) + if err != nil { + panic(err) + } + + return sign +} + +func readFirstFile(dirPath string) ([]byte, error) { + dir, err := os.Open(dirPath) + if err != nil { + return nil, err + } + fileNames, err := dir.Readdirnames(1) + if err != nil { + return nil, err + } + return os.ReadFile(path.Join(dirPath, fileNames[0])) +} + +func printJSON(data []byte) { + if len(data) == 0 { + return + } + var pretty bytes.Buffer + if err := json.Indent(&pretty, data, "", " "); err != nil { + /* returned data is not in json */ + fmt.Println(string(data)) + return + } + fmt.Println(string(pretty.Bytes())) +} diff --git a/src/api-engine/chaincode/apps.py b/src/api-engine/chaincode/apps.py new file mode 100644 index 000000000..841d0d811 --- /dev/null +++ b/src/api-engine/chaincode/apps.py @@ -0,0 +1,6 @@ +from django.apps import AppConfig + + +class ChaincodeConfig(AppConfig): + default_auto_field = 'django.db.models.BigAutoField' + name = 'chaincode' diff --git a/src/api-engine/chaincode/migrations/0001_initial.py b/src/api-engine/chaincode/migrations/0001_initial.py new file mode 100644 index 000000000..afafe5215 --- /dev/null +++ b/src/api-engine/chaincode/migrations/0001_initial.py @@ -0,0 +1,41 @@ +# Generated by Django 4.2.16 on 2025-09-28 23:48 + +import chaincode.models +import common.utils +import django.core.validators +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ('channel', '0001_initial'), + ] + + operations = [ + migrations.CreateModel( + name='Chaincode', + fields=[ + ('id', models.UUIDField(default=common.utils.make_uuid, editable=False, help_text='Chaincode ID', primary_key=True, serialize=False, unique=True)), + ('package_id', models.CharField(editable=False, help_text='Chaincode Package ID', max_length=128, unique=True)), + ('package', models.FileField(help_text='Chaincode Package', upload_to=chaincode.models.get_package_path)), + ('name', models.CharField(help_text='Chaincode Name', max_length=128)), + ('version', models.CharField(help_text='Chaincode Version', max_length=128)), + ('sequence', models.IntegerField(help_text='Chaincode Sequence', validators=[django.core.validators.MinValueValidator(1)])), + ('label', models.CharField(help_text='Chaincode Label', max_length=128)), + ('language', models.CharField(help_text='Chaincode Language', max_length=128)), + ('init_required', models.BooleanField(default=False, help_text='Whether Chaincode Initialization Required')), + ('signature_policy', models.CharField(blank=True, help_text='Chaincode Signature Policy', null=True)), + ('status', models.CharField(choices=[('CREATED', 'Created'), ('INSTALLED', 'Installed'), ('APPROVED', 'Approved'), ('COMMITTED', 'Committed')], default='CREATED', help_text='Chaincode Status', max_length=16)), + ('description', models.CharField(blank=True, help_text='Chaincode Description', max_length=128, null=True)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Chaincode Creation Timestamp')), + ('channel', models.ForeignKey(help_text='Chaincode Channel', on_delete=django.db.models.deletion.CASCADE, related_name='chaincodes', to='channel.channel')), + ], + options={ + 'ordering': ('-created_at',), + }, + ), + ] diff --git a/src/api-engine/chaincode/migrations/0002_initial.py b/src/api-engine/chaincode/migrations/0002_initial.py new file mode 100644 index 000000000..01eabb16f --- /dev/null +++ b/src/api-engine/chaincode/migrations/0002_initial.py @@ -0,0 +1,29 @@ +# Generated by Django 4.2.16 on 2025-09-28 23:48 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ('chaincode', '0001_initial'), + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ('node', '0001_initial'), + ] + + operations = [ + migrations.AddField( + model_name='chaincode', + name='creator', + field=models.ForeignKey(help_text='Chaincode Creator', null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL), + ), + migrations.AddField( + model_name='chaincode', + name='peers', + field=models.ManyToManyField(help_text='Chaincode Installed Peers', to='node.node'), + ), + ] diff --git a/src/agent/k8s-rest-agent/src/api/migrations/__init__.py b/src/api-engine/chaincode/migrations/__init__.py similarity index 100% rename from src/agent/k8s-rest-agent/src/api/migrations/__init__.py rename to src/api-engine/chaincode/migrations/__init__.py diff --git a/src/api-engine/chaincode/models.py b/src/api-engine/chaincode/models.py new file mode 100644 index 000000000..a38a217ea --- /dev/null +++ b/src/api-engine/chaincode/models.py @@ -0,0 +1,104 @@ +import os.path + +from django.core.validators import MinValueValidator +from django.db import models + +from channel.models import Channel +from common.utils import make_uuid +from node.models import Node +from user.models import UserProfile + + +def get_package_path(instance, filename) -> str: + return str(os.path.join(instance.channel.name, filename)) +# Create your models here. + + +class Chaincode(models.Model): + class Status(models.TextChoices): + CREATED = "CREATED", "Created" + INSTALLED = "INSTALLED", "Installed" + APPROVED = "APPROVED", "Approved" + COMMITTED = "COMMITTED", "Committed" + + id = models.UUIDField( + primary_key=True, + help_text="Chaincode ID", + default=make_uuid, + editable=False, + unique=True, + ) + package_id = models.CharField( + help_text="Chaincode Package ID", + max_length=128, + editable=False, + unique=True, + ) + package = models.FileField( + help_text="Chaincode Package", + upload_to=get_package_path, + ) + name = models.CharField( + help_text="Chaincode Name", + max_length=128, + ) + version = models.CharField( + help_text="Chaincode Version", + max_length=128, + ) + sequence = models.IntegerField( + help_text="Chaincode Sequence", + validators=[MinValueValidator(1)], + ) + label = models.CharField( + help_text="Chaincode Label", + max_length=128, + ) + creator = models.ForeignKey( + UserProfile, + help_text="Chaincode Creator", + on_delete=models.SET_NULL, + null=True, + ) + channel = models.ForeignKey( + Channel, + help_text="Chaincode Channel", + on_delete=models.CASCADE, + related_name="chaincodes", + ) + language = models.CharField( + help_text="Chaincode Language", + max_length=128, + ) + init_required = models.BooleanField( + help_text="Whether Chaincode Initialization Required", + default=False, + ) + signature_policy = models.CharField( + help_text="Chaincode Signature Policy", + null=True, + blank=True, + ) + status = models.CharField( + help_text="Chaincode Status", + choices=Status.choices, + default=Status.CREATED, + max_length=16, + ) + peers = models.ManyToManyField( + to=Node, + help_text="Chaincode Installed Peers", + ) + description = models.CharField( + help_text="Chaincode Description", + max_length=128, + blank=True, + null=True, + ) + created_at = models.DateTimeField( + help_text="Chaincode Creation Timestamp", + auto_now_add=True, + ) + + class Meta: + ordering = ("-created_at",) diff --git a/src/api-engine/chaincode/serializers.py b/src/api-engine/chaincode/serializers.py new file mode 100644 index 000000000..918aa2a1e --- /dev/null +++ b/src/api-engine/chaincode/serializers.py @@ -0,0 +1,157 @@ +import tarfile +from typing import List, Dict, Any + +from django.core.validators import MinValueValidator +from rest_framework import serializers +from chaincode.models import Chaincode +from chaincode.service import ChaincodeAction, create_chaincode, get_chaincode, get_metadata, install_chaincode, approve_chaincode, commit_chaincode, send_chaincode_request +from channel.models import Channel +from channel.serializers import ChannelID +from common.serializers import ListResponseSerializer +from node.models import Node +from user.serializers import UserID + + +class ChaincodeID(serializers.ModelSerializer): + class Meta: + model = Chaincode + fields = ("id",) + + def create(self, validated_data: Dict[str, Any]) -> Chaincode: + return get_chaincode(validated_data["id"]) + + +class ChaincodeResponse(ChaincodeID): + channel = ChannelID() + creator = UserID() + + class Meta: + model = Chaincode + fields = ( + "id", + "package_id", + "label", + "creator", + "channel", + "language", + "status", + "created_at", + "description", + ) + + +class ChaincodeList(ListResponseSerializer): + data = ChaincodeResponse(many=True, help_text="Chaincode data") + + +class ChaincodeCreateBody(serializers.ModelSerializer): + peers = serializers.PrimaryKeyRelatedField( + many=True, + queryset=Node.objects.filter(type=Node.Type.PEER), + help_text="Chaincode Peers" + ) + + class Meta: + model = Chaincode + fields = ( + "name", + "version", + "sequence", + "init_required", + "signature_policy", + "package", + "channel", + "peers", + "description", + ) + extra_kwargs = { + "sequence": { + "validators": [MinValueValidator(1)] + }, + "init_required": {"required": False}, + "signature_policy": {"required": False}, + } + + @staticmethod + def validate_package(value): + if not value.name.endswith(".tar.gz"): + raise serializers.ValidationError("Chaincode Package must be a '.tar.gz' file.") + + if value.content_type != "application/gzip": + raise serializers.ValidationError( + "Chaincode Package is not a 'application/gzip' file but {} instead." + .format(value.content_type) + ) + + try: + metadata = get_metadata(value) + if metadata is None: + raise serializers.ValidationError("Metadata not found.") + except tarfile.TarError: + raise serializers.ValidationError("Failed to open the chaincode tar package.") + + return value + + def validate_channel(self, value: Channel): + if not value.organizations.contains(self.context["organization"]): + raise serializers.ValidationError("You can only install chaincodes on your organization.") + return value + + def validate_peers(self, value: List[Node]): + for node in value: + if Node.Type.PEER != node.type: + raise serializers.ValidationError( + "Node {} is not a peer but a/an {} instead.".format(node.id, node.type) + ) + if node.organization != self.context["organization"]: + raise serializers.ValidationError( + "Node {} does not belong to your organization.".format(node.id) + ) + return value + + def create(self, validated_data: Dict[str, Any]) -> ChaincodeID: + validated_data["user"] = self.context["user"] + validated_data["organization"] = self.context["organization"] + return ChaincodeID({"id": create_chaincode(**validated_data).id}) + + +class ChaincodeInstallBody(ChaincodeID): + def create(self, validated_data: Dict[str, Any]): + install_chaincode( + self.context["organization"], + super().create(validated_data) + ) + + +class ChaincodeApproveBody(ChaincodeID): + def create(self, validated_data: Dict[str, Any]): + approve_chaincode( + self.context["organization"], + super().create(validated_data) + ) + + +class ChaincodeCommitBody(ChaincodeID): + def create(self, validated_data: Dict[str, Any]): + commit_chaincode( + self.context["organization"], + super().create(validated_data) + ) + + +class ChaincodeRequestBody(ChaincodeID): + action = serializers.ChoiceField(choices=[(tag.name, tag.name) for tag in ChaincodeAction]) + function = serializers.CharField() + args = serializers.ListField( + child=serializers.CharField(), + allow_empty=True + ) + + def create(self, validated_data: Dict[str, Any]): + send_chaincode_request( + self.context["organization"], + super().create(validated_data), + validated_data["action"], + validated_data["function"], + validated_data["args"] + ) diff --git a/src/api-engine/chaincode/service.py b/src/api-engine/chaincode/service.py new file mode 100644 index 000000000..7c90ffa81 --- /dev/null +++ b/src/api-engine/chaincode/service.py @@ -0,0 +1,299 @@ +from enum import Enum, auto +import json +import logging +import os +import subprocess +import tarfile +from typing import Optional, List, Any, Dict, Tuple + +from django.db import transaction + +from api_engine.settings import CELLO_HOME, FABRIC_TOOL +from chaincode.models import Chaincode +from channel.models import Channel +from node.models import Node +from node.service import get_domain_name, get_peer_directory, get_org_directory, get_orderer_directory +from organization.models import Organization +from user.models import UserProfile + +LOG = logging.getLogger(__name__) + +peer_command = os.path.join(FABRIC_TOOL, "peer") + + +def get_chaincode(id: str) -> Optional[Chaincode]: + return Chaincode.objects.get(id=id) + + +def create_chaincode( + name: str, + version: str, + sequence: int, + package, + channel: Channel, + user: UserProfile, + organization: Organization, + peers: List[Node], + description: str, + init_required: bool = False, + signature_policy: str = None) -> Chaincode: + metadata = get_metadata(package) + + chaincode = Chaincode( + name=name, + version=version, + sequence=sequence, + label=metadata["label"], + language=metadata["type"], + package=package, + init_required=init_required, + signature_policy=signature_policy, + channel=channel, + creator=user, + description=description, + ) + chaincode.peers.add(*peers) + + peer_envs = get_peers_root_certs_and_addresses_and_envs( + organization.name, + peers + )[2] + + _set_chaincode_package_id(peer_envs[0], chaincode) + _install_chaincode_with_envs(peer_envs, chaincode) + _approve_chaincode_with_envs(peer_envs[0], organization, chaincode) + return chaincode + + +def get_metadata(file) -> Optional[Dict[str, Any]]: + file.seek(0) + res = None + with tarfile.open(fileobj=file, mode='r:gz') as tar: + for member in tar.getmembers(): + if member.name.endswith("metadata.json"): + res = json.loads( + tar.extractfile(member) + .read() + .decode("utf-8") + ) + break + file.seek(0) + return res + + +def install_chaincode(organization: Organization, chaincode: Chaincode) -> None: + peer_envs: List[Dict[str, str]] = get_peers_root_certs_and_addresses_and_envs( + organization.name, + chaincode.peers + )[2] + + _install_chaincode_with_envs(peer_envs, chaincode) + + +def _set_chaincode_package_id(peer_env: Dict[str, str], chaincode: Chaincode) -> None: + command: List[str] = [ + peer_command, + "lifecycle", + "chaincode", + "calculatepackageid", + chaincode.package.path + ] + LOG.info(" ".join(command)) + with transaction.atomic(): + chaincode.package_id = subprocess.run( + command, + env=peer_env, + check=True, + capture_output=True, + text=True + ).stdout + chaincode.save() + + +def _install_chaincode_with_envs(peer_envs: List[Dict[str, str]], chaincode: Chaincode) -> None: + command = [ + peer_command, + "lifecycle", + "chaincode", + "install", + chaincode.package.path, + ] + LOG.info(" ".join(command)) + for peer_env in peer_envs: + subprocess.run( + command, + env=peer_env, + check=True) + + +def approve_chaincode( + organization: Organization, + chaincode: Chaincode) -> None: + _approve_chaincode_with_envs( + get_peers_root_certs_and_addresses_and_envs( + organization.name, + [chaincode.peers[0]] # type: ignore + )[2][0], + organization, + chaincode + ) + + +def _approve_chaincode_with_envs( + peer_env: Dict[str, str], + organization: Organization, + chaincode: Chaincode) -> None: + # Chaincode is approved at the organization level, + # so the command only needs to target one peer. + orderer_domain_name = get_domain_name( + organization.name, + Node.Type.ORDERER, + Node.objects.filter(type=Node.Type.ORDERER, organization=organization).first().name + ) + command = [ + peer_command, + "lifecycle", + "chaincode", + "approveformyorg", + "-o", + "{}:7050".format(orderer_domain_name), + "--ordererTLSHostnameOverride", + orderer_domain_name, + "--channelID", + chaincode.channel.name, + "--name", + chaincode.name, + "--version", + chaincode.version, + "--package-id", + chaincode.package_id, + "--sequence", + str(chaincode.sequence), + "--tls", + "--cafile", + "{}/msp/tlscacerts/tlsca.{}-cert.pem".format( + get_orderer_directory(organization.name, orderer_domain_name), + organization.name.split(".", 1)[1], + ) + ] + if chaincode.init_required: + command.append("--init-required") + if chaincode.signature_policy and chaincode.signature_policy.strip(): + command.extend(["--signature-policy", chaincode.signature_policy]) + + LOG.info(" ".join(command)) + subprocess.run( + command, + env=peer_env, + check=True) + + +def commit_chaincode( + organization: Organization, + chaincode: Chaincode) -> None: + peer_root_certs, peer_addresses, peer_envs = get_peers_root_certs_and_addresses_and_envs( + organization.name, + chaincode.peers + ) + orderer_domain_name = get_domain_name( + organization.name, + Node.Type.ORDERER, + Node.objects.filter(type=Node.Type.ORDERER, organization=organization).first().name + ) + command = [ + peer_command, + "lifecycle", + "chaincode", + "commit", + "-o", + "{}:7050".format(orderer_domain_name), + "--ordererTLSHostnameOverride", + orderer_domain_name, + "--channelID", + chaincode.channel.name, + "--name", + chaincode.name, + "--version", + chaincode.version, + "--sequence", + str(chaincode.sequence), + "--tls", + "--cafile", + "{}/msp/tlscacerts/tlsca.{}-cert.pem".format( + get_orderer_directory(organization.name, orderer_domain_name), + organization.name.split(".", 1)[1], + ) + ] + for i in range(len(chaincode.peers)): + command.extend(["--peerAddresses", peer_addresses[i], "--tlsRootCertFiles", peer_root_certs[i]]) + + LOG.info(" ".join(command)) + subprocess.run( + command, + env=peer_envs[0], + check=True) + + +class ChaincodeAction(Enum): + SUBMIT = auto() + EVALUATE = auto() + + +def send_chaincode_request( + organization: Organization, + chaincode: Chaincode, + action: ChaincodeAction, + function: str, + *args: str): + # Pick any organization peer + peer_env: Dict[str, str] = get_peers_root_certs_and_addresses_and_envs( + organization.name, + [chaincode.peers.filter(organization=organization)[0]] # type: ignore + )[2][0] + command = [ + "go", + "run", + os.path.join(CELLO_HOME, "chaincode", "application-gateway", "main.go"), + action.name, + function, + *args + ] + LOG.info(" ".join(command)) + response: str = subprocess.run( + command, + env={ + **peer_env, + "CHANNEL_NAME": chaincode.channel.name, + "CHAINCODE_NAME": chaincode.name + }, + check=True, + capture_output=True, + text=True).stdout + LOG.info(response) + + +def get_peers_root_certs_and_addresses_and_envs( + organization_name: str, + peers: List[Node]) -> Tuple[List[str], List[str], List[Dict[str, str]]]: + peer_root_certs: List[str] = [] + peer_addresses: List[str] = [] + peer_envs: List[Dict[str, str]] = [] + for peer_name in [peer.name for peer in peers]: + peer_domain_name: str = get_domain_name(organization_name, Node.Type.PEER, peer_name) + peer_dir: str = get_peer_directory(organization_name, peer_domain_name) + peer_root_cert: str = os.path.join(peer_dir, "tls/ca.crt") + peer_address: str = "{}:7051".format(peer_domain_name) + peer_root_certs.append(peer_root_cert) + peer_addresses.append(peer_address) + peer_envs.append({ + "CORE_PEER_TLS_ENABLED": "true", + "CORE_PEER_LOCALMSPID": "{}MSP".format(organization_name.split(".", 1)[0].capitalize()), + "CORE_PEER_TLS_ROOTCERT_FILE": peer_root_cert, + "CORE_PEER_MSPCONFIGPATH": "{}/users/Admin@{}/msp".format( + get_org_directory(organization_name, Node.Type.PEER), + organization_name + ), + "CORE_PEER_ADDRESS": peer_address, + "FABRIC_CFG_PATH": peer_dir, + }) + return peer_root_certs, peer_addresses, peer_envs diff --git a/src/api-engine/api/tests.py b/src/api-engine/chaincode/tests.py similarity index 58% rename from src/api-engine/api/tests.py rename to src/api-engine/chaincode/tests.py index 557879540..7ce503c2d 100644 --- a/src/api-engine/api/tests.py +++ b/src/api-engine/chaincode/tests.py @@ -1,6 +1,3 @@ -# -# SPDX-License-Identifier: Apache-2.0 -# from django.test import TestCase # Create your tests here. diff --git a/src/api-engine/chaincode/views.py b/src/api-engine/chaincode/views.py new file mode 100644 index 000000000..60b615c03 --- /dev/null +++ b/src/api-engine/chaincode/views.py @@ -0,0 +1,140 @@ +from drf_yasg import openapi +from drf_yasg.utils import swagger_auto_schema +from rest_framework import viewsets, status +from rest_framework.decorators import action +from rest_framework.parsers import FileUploadParser, JSONParser, FormParser, MultiPartParser +from rest_framework.permissions import IsAuthenticated +from rest_framework.response import Response + +from api.common.response import make_response_serializer +from chaincode.models import Chaincode +from chaincode.serializers import ChaincodeCommitBody, ChaincodeList, ChaincodeCreateBody, ChaincodeID, ChaincodeRequestBody, ChaincodeResponse, \ + ChaincodeInstallBody, ChaincodeApproveBody +from common.responses import with_common_response, ok +from common.serializers import PageQuerySerializer + + +# Create your views here. +class ChaincodeViewSet(viewsets.ViewSet): + permission_classes = [ + IsAuthenticated, + ] + + def get_parsers(self): + if getattr(self, 'action', None) == "create" or getattr(getattr(self, 'request', None), "FILES", None) is not None: + return [MultiPartParser] + return [JSONParser] + + @swagger_auto_schema( + operation_summary="List all chaincodes of the current organization", + query_serializer=PageQuerySerializer(), + responses=with_common_response( + {status.HTTP_200_OK: make_response_serializer(ChaincodeList)} + ), + ) + def list(self, request): + serializer = PageQuerySerializer(data=request.GET) + p = serializer.get_paginator( + Chaincode.objects.filter(channel__organizations__id__contains=request.user.organization.id), + ) + return Response( + status=status.HTTP_200_OK, + data=ok(ChaincodeList({ + "total": p.count, + "data": ChaincodeResponse( + p.get_page(serializer.data["page"]) + .object_list, + many=True + ).data, + }).data), + ) + + @swagger_auto_schema( + operation_summary="Create (Install and Approve) a chaincode for the current organization", + request_body=ChaincodeCreateBody(), + responses=with_common_response( + {status.HTTP_201_CREATED: make_response_serializer(ChaincodeID)} + ), + ) + def create(self, request): + serializer = ChaincodeCreateBody(data=request.data, context={ + "user": request.user, + "organization": request.user.organization, + }) + serializer.is_valid(raise_exception=True) + return Response( + status=status.HTTP_201_CREATED, + data=ok(serializer.save().data) + ) + + @swagger_auto_schema( + operation_summary="Install a chaincode for the current organization", + responses=with_common_response( + {status.HTTP_204_NO_CONTENT: None} + ), + ) + @action(detail=True, methods=["PUT"]) + def install(self, request, pk=None): + serializer = ChaincodeInstallBody( + data={ + "id": pk + }, + context={"organization": request.user.organization}) + serializer.save() + return Response( + status=status.HTTP_204_NO_CONTENT, + ) + + @swagger_auto_schema( + operation_summary="Approve a chaincode for the current organization", + responses=with_common_response( + {status.HTTP_204_NO_CONTENT: None} + ), + ) + @action(detail=True, methods=["PUT"]) + def approve(self, request, pk=None): + serializer = ChaincodeApproveBody( + data={ + "id": pk + }, + context={"organization": request.user.organization}) + serializer.save() + return Response( + status=status.HTTP_204_NO_CONTENT, + ) + + @swagger_auto_schema( + operation_summary="Commit a chaincode to its channel", + responses=with_common_response( + {status.HTTP_204_NO_CONTENT: None} + ), + ) + @action(detail=True, methods=["PUT"]) + def commit(self, request, pk=None): + serializer = ChaincodeCommitBody( + data={ + "id": pk + }, + context={"organization": request.user.organization}) + serializer.save() + return Response( + status=status.HTTP_204_NO_CONTENT, + ) + + @swagger_auto_schema( + operation_summary="Invoke/Query a chaincode for the current organization", + responses=with_common_response( + {status.HTTP_204_NO_CONTENT: None} + ), + ) + @action(detail=True, methods=["PUT"]) + def transact(self, request, pk=None): + serializer = ChaincodeRequestBody( + data={ + "id": pk + }, + context={"organization": request.user.organization}) + serializer.save() + return Response( + status=status.HTTP_204_NO_CONTENT, + ) diff --git a/src/agent/k8s-rest-agent/src/api/routes/__init__.py b/src/api-engine/channel/__init__.py similarity index 100% rename from src/agent/k8s-rest-agent/src/api/routes/__init__.py rename to src/api-engine/channel/__init__.py diff --git a/src/api-engine/channel/admin.py b/src/api-engine/channel/admin.py new file mode 100644 index 000000000..8c38f3f3d --- /dev/null +++ b/src/api-engine/channel/admin.py @@ -0,0 +1,3 @@ +from django.contrib import admin + +# Register your models here. diff --git a/src/api-engine/channel/apps.py b/src/api-engine/channel/apps.py new file mode 100644 index 000000000..af966978f --- /dev/null +++ b/src/api-engine/channel/apps.py @@ -0,0 +1,6 @@ +from django.apps import AppConfig + + +class ChannelConfig(AppConfig): + default_auto_field = 'django.db.models.BigAutoField' + name = 'channel' diff --git a/src/api-engine/channel/migrations/0001_initial.py b/src/api-engine/channel/migrations/0001_initial.py new file mode 100644 index 000000000..3e8e5d5b0 --- /dev/null +++ b/src/api-engine/channel/migrations/0001_initial.py @@ -0,0 +1,30 @@ +# Generated by Django 4.2.16 on 2025-09-28 23:48 + +import common.utils +from django.db import migrations, models + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ('organization', '0001_initial'), + ('node', '0001_initial'), + ] + + operations = [ + migrations.CreateModel( + name='Channel', + fields=[ + ('id', models.UUIDField(default=common.utils.make_uuid, editable=False, help_text='Channel ID', primary_key=True, serialize=False, unique=True)), + ('name', models.CharField(help_text='Channel Name', max_length=128)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Channel Creation Timestamp')), + ('orderers', models.ManyToManyField(help_text='Channel Orderers', to='node.node')), + ('organizations', models.ManyToManyField(help_text='Channel Organizations', related_name='channels', to='organization.organization')), + ], + options={ + 'ordering': ('-created_at',), + }, + ), + ] diff --git a/src/agent/k8s-rest-agent/src/api/routes/hello/__init__.py b/src/api-engine/channel/migrations/__init__.py similarity index 100% rename from src/agent/k8s-rest-agent/src/api/routes/hello/__init__.py rename to src/api-engine/channel/migrations/__init__.py diff --git a/src/api-engine/channel/models.py b/src/api-engine/channel/models.py new file mode 100644 index 000000000..031cf8449 --- /dev/null +++ b/src/api-engine/channel/models.py @@ -0,0 +1,32 @@ +from django.db import models + +from common.utils import make_uuid +from node.models import Node +from organization.models import Organization + + +class Channel(models.Model): + id = models.UUIDField( + primary_key=True, + help_text="Channel ID", + default=make_uuid, + editable=False, + unique=True, + ) + name = models.CharField(help_text="Channel Name", max_length=128) + organizations = models.ManyToManyField( + to=Organization, + help_text="Channel Organizations", + related_name="channels", + # on_delete=models.SET_NULL + ) + created_at = models.DateTimeField( + help_text="Channel Creation Timestamp", auto_now_add=True + ) + orderers = models.ManyToManyField( + to=Node, + help_text="Channel Orderers", + ) + + class Meta: + ordering = ("-created_at",) diff --git a/src/api-engine/channel/serializers.py b/src/api-engine/channel/serializers.py new file mode 100644 index 000000000..149771e76 --- /dev/null +++ b/src/api-engine/channel/serializers.py @@ -0,0 +1,84 @@ +from typing import Dict, Any + +from rest_framework import serializers + +from channel.models import Channel +from channel.service import create +from common.serializers import ListResponseSerializer +from node.models import Node +from node.service import get_node +from organization.serializeres import OrganizationID + + +class ChannelID(serializers.ModelSerializer): + class Meta: + model = Channel + fields = ("id",) + + +class ChannelResponse(serializers.ModelSerializer): + organizations = OrganizationID(many=True) + + class Meta: + model = Channel + fields = ( + "id", + "name", + "organizations", + "created_at" + ) + + +class ChannelList(ListResponseSerializer): + data = ChannelResponse(many=True, help_text="Channel data") + + +class ChannelCreateBody(serializers.Serializer): + name = serializers.CharField(max_length=128, required=True) + peer_ids = serializers.ListField( + child=serializers.UUIDField(help_text="ID of Peer Nodes") + ) + orderer_ids = serializers.ListField( + child=serializers.UUIDField(help_text="ID of Orderer Nodes") + ) + + @staticmethod + def validate_peer_ids(value): + if len(value) < 1: + raise serializers.ValidationError("You must specify at least one peer for a channel.") + + for peer_id in value: + node = get_node(peer_id) + if node is None: + raise serializers.ValidationError("Peer {} not found.".format(peer_id)) + if node.type != Node.Type.PEER: + raise serializers.ValidationError( + "Node {} is not a peer but {} instead.".format(peer_id, node.type)) + if node.status != Node.Status.RUNNING: + raise serializers.ValidationError("Peer {} is not running.".format(peer_id)) + + return value + + @staticmethod + def validate_orderer_ids(value): + if len(value) < 1: + raise serializers.ValidationError("You must specify at least one orderer for a channel.") + + for orderer_id in value: + node = get_node(orderer_id) + if node is None: + raise serializers.ValidationError("Orderer {} not found.".format(orderer_id)) + if node.type != Node.Type.ORDERER: + raise serializers.ValidationError( + "Node {} is not an orderer but {} instead.".format(orderer_id, node.type)) + if node.status != Node.Status.RUNNING: + raise serializers.ValidationError("Orderer {} is not running.".format(orderer_id)) + + return value + + def create(self, validated_data: Dict[str, Any]) -> ChannelID: + return ChannelID(create( + self.context["organization"], + validated_data["name"], + validated_data["peer_ids"], + validated_data["orderer_ids"])) diff --git a/src/api-engine/channel/service.py b/src/api-engine/channel/service.py new file mode 100644 index 000000000..c75937cf9 --- /dev/null +++ b/src/api-engine/channel/service.py @@ -0,0 +1,396 @@ +import json +import logging +import os +import subprocess +import time +from copy import deepcopy +from typing import List + +import yaml + +from api.exceptions import NoResource +from api_engine.settings import CELLO_HOME, FABRIC_TOOL +from channel.models import Channel +from node.models import Node +from node.service import get_org_directory, get_domain_name, get_orderer_directory, get_peer_directory +from organization.models import Organization + +LOG = logging.getLogger(__name__) + + +def create( + channel_organization: Organization, + channel_name: str, + channel_peer_ids: List[str], + channel_orderer_ids: List[str]) -> Channel: + channel_peers = list(Node.objects.filter(id__in=channel_peer_ids)) + channel_orderers = list(Node.objects.filter(id__in=channel_orderer_ids)) + validate_nodes(channel_peers + channel_orderers) + + orderer_msp = "OrdererMSP" + orderer_domain_names = [get_domain_name( + channel_organization.name, + Node.Type.ORDERER, + orderer.name) for orderer in channel_orderers] + orderer_addresses = ["{}:7050".format(orderer_domain_name) for orderer_domain_name in orderer_domain_names] + consenters = [{ + "Host": orderer_domain_name, + "Port": 7050, + "ClientTLSCert": "{}/tls/server.crt".format(get_orderer_directory( + channel_organization.name, + orderer_domain_name)), + "ServerTLSCert": "{}/tls/server.crt".format(get_orderer_directory( + channel_organization.name, + orderer_domain_name)), + } for orderer_domain_name in orderer_domain_names] + orderer_organization = { + "Name": "Orderer", + "ID": orderer_msp, + "MSPDir": "{}/msp".format(get_org_directory(channel_organization.name, Node.Type.ORDERER)), + "Policies": { + "Readers": { + "Type": "Signature", + "Rule": "OR('{}.member')".format(orderer_msp), + }, + "Writers": { + "Type": "Signature", + "Rule": "OR('{}.member')".format(orderer_msp), + }, + "Admins": { + "Type": "Signature", + "Rule": "OR('{}.admin')".format(orderer_msp), + }, + }, + "OrdererEndpoints": orderer_addresses, + } + + peer_organization_name = channel_organization.name.split(".", 1)[0].capitalize() + peer_msp = "{}MSP".format(peer_organization_name) + peer_organization = { + "Name": peer_organization_name, + "ID": peer_msp, + "MSPDir": "{}/msp".format(get_org_directory(channel_organization.name, Node.Type.PEER)), + "Policies": { + "Readers": { + "Type": "Signature", + "Rule": "OR('{}.admin', '{}.peer', '{}.client')".format(peer_msp, peer_msp, peer_msp), + }, + "Writers": { + "Type": "Signature", + "Rule": "OR('{}.admin', '{}.client')".format(peer_msp, peer_msp), + }, + "Admins": { + "Type": "Signature", + "Rule": "OR('{}.admin')".format(peer_msp), + }, + "Endorsement": { + "Type": "Signature", + "Rule": "OR('{}.peer')".format(peer_msp), + } + } + } + + with open(os.path.join(CELLO_HOME, "config", "configtx.yaml"), "r", encoding="utf-8") as f: + template = yaml.load(f, Loader=yaml.FullLoader) + + application = deepcopy(template["Application"]) + application["Capabilities"] = template["Capabilities"]["Application"] + + orderer = deepcopy(template["Orderer"]) + orderer["Addresses"] = orderer_addresses + orderer["Capabilities"] = template["Capabilities"]["Orderer"] + orderer["OrdererType"] = "etcdraft" + orderer["EtcdRaft"]["Consenters"] = consenters + + channel = deepcopy(template["Channel"]) + channel["Capabilities"] = template["Capabilities"]["Channel"] + + profiles = {channel_name: deepcopy(channel)} + profiles[channel_name]["Orderer"] = deepcopy(orderer) + profiles[channel_name]["Orderer"]["Capabilities"] = template["Capabilities"]["Orderer"] + profiles[channel_name]["Orderer"]["Organizations"] = orderer_organization + profiles[channel_name]["Application"] = deepcopy(application) + profiles[channel_name]["Application"]["Capabilities"] = template["Capabilities"]["Application"] + profiles[channel_name]["Application"]["Organizations"] = peer_organization + + channel_dir = os.path.join(CELLO_HOME, channel_name) + os.makedirs(channel_dir, exist_ok=True) + with open(os.path.join(channel_dir, "configtx.yaml"), "w", encoding="utf-8") as f: + yaml.dump( + { + "Organizations": [orderer_organization, peer_organization], + "Capabilities": { + "Channel": template["Capabilities"]["Channel"], + "Orderer": template["Capabilities"]["Orderer"], + "Application": template["Capabilities"]["Application"], + }, + "Application": application, + "Orderer": orderer, + "Channel": channel, + "Profiles": profiles, + }, + f, + sort_keys=False) + + command = [ + os.path.join(FABRIC_TOOL, "configtxgen"), + "-configPath", + channel_dir, + "-profile", + channel_name, + "-outputBlock", + os.path.join(channel_dir, "genesis.block"), + "-channelID", + channel_name, + ] + LOG.info(" ".join(command)) + subprocess.run(command, check=True) + + orderer_domain_name = orderer_domain_names[0] + orderer_dir = get_orderer_directory(channel_organization.name, orderer_domain_name) + command = [ + os.path.join(FABRIC_TOOL, "osnadmin"), + "channel", + "join", + "--channelID", + channel_name, + "--config-block", + os.path.join(channel_dir, "genesis.block"), + "-o", + "{}:7053".format(orderer_domain_name), + "--ca-file", + "{}/msp/tlscacerts/tlsca.{}-cert.pem".format( + orderer_dir, + channel_organization.name.split(".", 1)[1], + ), + "--client-cert", + "{}/tls/server.crt".format(orderer_dir), + "--client-key", + "{}/tls/server.key".format(orderer_dir), + ] + LOG.info(" ".join(command)) + subprocess.run( + command, + check=True) + + peer_domain_names = [ + get_domain_name(channel_organization.name, Node.Type.PEER, peer.name) for peer in channel_peers + ] + for peer_domain_name in peer_domain_names: + command = [ + os.path.join(FABRIC_TOOL, "peer"), + "channel", + "join", + "-b", + os.path.join(channel_dir, "genesis.block"), + ] + LOG.info(" ".join(command)) + peer_dir = get_peer_directory(channel_organization.name, peer_domain_name) + subprocess.run( + command, + env={ + "CORE_PEER_TLS_ENABLED": "true", + "CORE_PEER_LOCALMSPID": peer_msp, + "CORE_PEER_TLS_ROOTCERT_FILE": "{}/tls/ca.crt".format(peer_dir), + "CORE_PEER_MSPCONFIGPATH": "{}/users/Admin@{}/msp".format( + get_org_directory(channel_organization.name, Node.Type.PEER), + channel_organization.name + ), + "CORE_PEER_ADDRESS": "{}:7051".format(peer_domain_name), + "FABRIC_CFG_PATH": peer_dir, + }, + check=True) + + command = [ + os.path.join(FABRIC_TOOL, "peer"), + "channel", + "fetch", + "config", + os.path.join(channel_dir, "config_block.pb"), + "-o", + orderer_addresses[0], + "--ordererTLSHostnameOverride", + orderer_domain_name, + "-c", + channel_name, + "--tls", + "--cafile", + "{}/msp/tlscacerts/tlsca.{}-cert.pem".format( + orderer_dir, + channel_organization.name.split(".", 1)[1], + ) + ] + LOG.info(" ".join(command)) + anchor_peer_domain_name = peer_domain_names[0] + anchor_peer_dir = get_peer_directory(channel_organization.name, anchor_peer_domain_name) + time.sleep(5) + subprocess.run( + command, + env={ + "CORE_PEER_TLS_ENABLED": "true", + "CORE_PEER_LOCALMSPID": peer_msp, + "CORE_PEER_TLS_ROOTCERT_FILE": "{}/tls/ca.crt".format(anchor_peer_dir), + "CORE_PEER_MSPCONFIGPATH": "{}/users/Admin@{}/msp".format( + get_org_directory(channel_organization.name, Node.Type.PEER), + channel_organization.name + ), + "CORE_PEER_ADDRESS": "{}:7051".format( + anchor_peer_domain_name + ), + "FABRIC_CFG_PATH": anchor_peer_dir, + }, + check=True) + + command = [ + os.path.join(FABRIC_TOOL, "configtxlator"), + "proto_decode", + "--input={}".format(os.path.join(channel_dir, "config_block.pb")), + "--type=common.Block", + "--output={}".format(os.path.join(channel_dir, "config_block.json")), + ] + LOG.info(" ".join(command)) + subprocess.run(command, check=True) + + with open(os.path.join(channel_dir, "config_block.json"), "r", encoding="utf-8") as f: + config_block = json.load(f) + + with open(os.path.join(channel_dir, "config.json"), "w", encoding="utf-8") as f: + json.dump(config_block["data"]["data"][0]["payload"]["data"]["config"], f, sort_keys=False, indent=4) + + with open(os.path.join(channel_dir, "config.json"), "r", encoding="utf-8") as f: + config = json.load(f) + + config["channel_group"]["groups"]["Application"]["groups"][peer_organization_name]["values"].update({ + "AnchorPeers": { + "mod_policy": "Admins", + "value": { + "anchor_peers": [ + { + "host": anchor_peer_domain_name, + "port": 7051 + } + ] + }, + "version": 0, + } + }) + + with open(os.path.join(channel_dir, "modified_config.json"), "w", encoding="utf-8") as f: + json.dump(config, f, sort_keys=False, indent=4) + + command = [ + os.path.join(FABRIC_TOOL, "configtxlator"), + "proto_encode", + "--input={}".format(os.path.join(channel_dir, "config.json")), + "--type=common.Config", + "--output={}".format(os.path.join(channel_dir, "config.pb")), + ] + LOG.info(" ".join(command)) + subprocess.run(command, check=True) + + command = [ + os.path.join(FABRIC_TOOL, "configtxlator"), + "proto_encode", + "--input={}".format(os.path.join(channel_dir, "modified_config.json")), + "--type=common.Config", + "--output={}".format(os.path.join(channel_dir, "modified_config.pb")), + ] + LOG.info(" ".join(command)) + subprocess.run(command, check=True) + + command = [ + os.path.join(FABRIC_TOOL, "configtxlator"), + "compute_update", + "--original={}".format(os.path.join(channel_dir, "config.pb")), + "--updated={}".format(os.path.join(channel_dir, "modified_config.pb")), + "--channel_id={}".format(channel_name), + "--output={}".format(os.path.join(channel_dir, "config_update.pb")), + ] + LOG.info(" ".join(command)) + subprocess.run(command, check=True) + + command = [ + os.path.join(FABRIC_TOOL, "configtxlator"), + "proto_decode", + "--input={}".format(os.path.join(channel_dir, "config_update.pb")), + "--type=common.ConfigUpdate", + "--output={}".format(os.path.join(channel_dir, "config_update.json")), + ] + LOG.info(" ".join(command)) + subprocess.run(command, check=True) + + with open(os.path.join(channel_dir, "config_update.json"), "r", encoding="utf-8") as f: + config_update = json.load(f) + + with open(os.path.join(channel_dir, "config_update_in_envelope.json"), "w", encoding="utf-8") as f: + json.dump( + { + "payload": { + "header": { + "channel_header": {"channel_id": channel_name, "type": 2} + }, + "data": {"config_update": config_update}, + } + }, + f, + sort_keys=False, + indent=4 + ) + + command = [ + os.path.join(FABRIC_TOOL, "configtxlator"), + "proto_encode", + "--input={}".format(os.path.join(channel_dir, "config_update_in_envelope.json")), + "--type=common.Envelope", + "--output={}".format(os.path.join(channel_dir, "config_update_in_envelope.pb")), + ] + LOG.info(" ".join(command)) + subprocess.run(command, check=True) + + command = [ + os.path.join(FABRIC_TOOL, "peer"), + "channel", + "update", + "-f", + os.path.join(channel_dir, "config_update_in_envelope.pb"), + "-c", + channel_name, + "-o", + orderer_addresses[0], + "--ordererTLSHostnameOverride", + orderer_domain_name, + "--tls", + "--cafile", + "{}/msp/tlscacerts/tlsca.{}-cert.pem".format( + orderer_dir, + channel_organization.name.split(".", 1)[1], + ) + ] + LOG.info(" ".join(command)) + subprocess.run( + command, + env={ + "CORE_PEER_TLS_ENABLED": "true", + "CORE_PEER_LOCALMSPID": peer_msp, + "CORE_PEER_TLS_ROOTCERT_FILE": "{}/tls/ca.crt".format(anchor_peer_dir), + "CORE_PEER_MSPCONFIGPATH": "{}/users/Admin@{}/msp".format( + get_org_directory(channel_organization.name, Node.Type.PEER), + channel_organization.name + ), + "CORE_PEER_ADDRESS": "{}:7051".format( + anchor_peer_domain_name + ), + "FABRIC_CFG_PATH": anchor_peer_dir, + }, + check=True) + + res = Channel.objects.create(name=channel_name) + res.organizations.add(channel_organization) + res.orderers.add(channel_orderers[0]) + return res + + +def validate_nodes(nodes: List[Node]): + for node in nodes: + if node.status != Node.Status.RUNNING: + raise NoResource("Node {} is not running".format(node.name)) diff --git a/src/api-engine/channel/tests.py b/src/api-engine/channel/tests.py new file mode 100644 index 000000000..7ce503c2d --- /dev/null +++ b/src/api-engine/channel/tests.py @@ -0,0 +1,3 @@ +from django.test import TestCase + +# Create your tests here. diff --git a/src/api-engine/channel/views.py b/src/api-engine/channel/views.py new file mode 100644 index 000000000..4a03c38d7 --- /dev/null +++ b/src/api-engine/channel/views.py @@ -0,0 +1,53 @@ +from django.core.paginator import Paginator +from drf_yasg.utils import swagger_auto_schema +from rest_framework import viewsets, status +from rest_framework.permissions import IsAuthenticated +from rest_framework.response import Response + +from api.common import ok +from api.common.response import make_response_serializer +from channel.models import Channel +from channel.serializers import ChannelList, ChannelID, ChannelResponse, ChannelCreateBody +from common.responses import with_common_response +from common.serializers import PageQuerySerializer + + +# Create your views here. + +class ChannelViewSet(viewsets.ViewSet): + permission_classes = [ + IsAuthenticated, + ] + + @swagger_auto_schema( + operation_summary="List all channels of the current organization", + query_serializer=PageQuerySerializer(), + responses=with_common_response( + {status.HTTP_200_OK: make_response_serializer(ChannelList)} + ), + ) + def list(self, request): + serializer = PageQuerySerializer(data=request.GET) + p = serializer.get_paginator(Channel.objects.filter(organizations__id__contains=request.user.organization.id)) + return Response( + status=status.HTTP_200_OK, + data=ok(ChannelList({ + "total": p.count, + "data": ChannelResponse(p.page(serializer.data["page"]).object_list, many=True).data, + }).data), + ) + + @swagger_auto_schema( + operation_summary="Create a channel of the current organization", + request_body=ChannelCreateBody(), + responses=with_common_response( + {status.HTTP_201_CREATED: make_response_serializer(ChannelID)} + ), + ) + def create(self, request): + serializer = ChannelCreateBody(data=request.data, context={"organization": request.user.organization}) + serializer.is_valid(raise_exception=True) + return Response( + status=status.HTTP_201_CREATED, + data=ok(serializer.save().data) + ) diff --git a/src/api-engine/common/enums.py b/src/api-engine/common/enums.py new file mode 100644 index 000000000..644a34a89 --- /dev/null +++ b/src/api-engine/common/enums.py @@ -0,0 +1,55 @@ +from enum import Enum + +from common.utils import separate_upper_class + + +class ExtraEnum(Enum): + @classmethod + def get_info(cls, title="", list_str=False): + str_info = """ + """ + str_info += title + if list_str: + for name, member in cls.__members__.items(): + str_info += """ + %s + """ % ( + name.lower().replace("_", "."), + ) + else: + for name, member in cls.__members__.items(): + str_info += """ + %s: %s + """ % ( + member.value, + name, + ) + return str_info + + @classmethod + def to_choices(cls, string_as_value=False, separate_class_name=False): + if string_as_value: + choices = [ + (name.lower().replace("_", "."), name) + for name, member in cls.__members__.items() + ] + elif separate_class_name: + choices = [ + (separate_upper_class(name), name) + for name, member in cls.__members__.items() + ] + else: + choices = [ + (member.value, name) + for name, member in cls.__members__.items() + ] + + return choices + + @classmethod + def values(cls): + return list(map(lambda c: c.value, cls.__members__.values())) + + @classmethod + def names(cls): + return [name.lower() for name, _ in cls.__members__.items()] diff --git a/src/api-engine/common/pagination.py b/src/api-engine/common/pagination.py new file mode 100644 index 000000000..b8df6e649 --- /dev/null +++ b/src/api-engine/common/pagination.py @@ -0,0 +1,14 @@ +from rest_framework.pagination import PageNumberPagination +from rest_framework.response import Response + + +class TotalDataPagination(PageNumberPagination): + page_size = 10 + page_size_query_param = "per_page" + max_page_size = 100 + + def get_paginated_response(self, data): + return Response({ + "total": self.page.paginator.count, + "data": data + }) diff --git a/src/api-engine/common/responses.py b/src/api-engine/common/responses.py new file mode 100644 index 000000000..e6fc75b1a --- /dev/null +++ b/src/api-engine/common/responses.py @@ -0,0 +1,28 @@ +from rest_framework import status + +from api.common.serializers import BadResponseSerializer + + +def ok(data): + return {"data": data, "msg": None, "status": "successful"} + + +def err(msg): + return {"data": None, "msg": msg, "status": "fail"} + + +def with_common_response(responses=None): + if responses is None: + responses = {} + + responses.update( + { + status.HTTP_400_BAD_REQUEST: BadResponseSerializer, + status.HTTP_401_UNAUTHORIZED: "Permission denied", + status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal Error", + status.HTTP_403_FORBIDDEN: "Authentication credentials " + "were not provided.", + } + ) + + return responses diff --git a/src/api-engine/common/serializers.py b/src/api-engine/common/serializers.py new file mode 100644 index 000000000..41ebd2896 --- /dev/null +++ b/src/api-engine/common/serializers.py @@ -0,0 +1,22 @@ +from django.core.paginator import Paginator +from django.db.models import QuerySet +from rest_framework import serializers + + +class PageQuerySerializer(serializers.Serializer): + page = serializers.IntegerField( + help_text="Page of filter", default=1, min_value=1 + ) + per_page = serializers.IntegerField( + default=10, help_text="Per Page of filter", min_value=1, max_value=100 + ) + + def get_paginator(self, q: QuerySet) -> Paginator: + self.is_valid(raise_exception=True) + return Paginator(q, self.data['per_page']) + + +class ListResponseSerializer(serializers.Serializer): + total = serializers.IntegerField( + help_text="Total number of data", min_value=0 + ) diff --git a/src/api-engine/common/utils.py b/src/api-engine/common/utils.py new file mode 100644 index 000000000..9ecee9ad3 --- /dev/null +++ b/src/api-engine/common/utils.py @@ -0,0 +1,17 @@ +import uuid + + +def make_uuid(): + return str(uuid.uuid4()) + + +def separate_upper_class(class_name): + x = "" + i = 0 + for c in class_name: + if c.isupper() and not class_name[i - 1].isupper(): + x += " %s" % c.lower() + else: + x += c + i += 1 + return "_".join(x.strip().split(" ")) diff --git a/src/api-engine/api/validators.py b/src/api-engine/common/validators.py similarity index 77% rename from src/api-engine/api/validators.py rename to src/api-engine/common/validators.py index 5b436e8fc..3ca9f63ea 100644 --- a/src/api-engine/api/validators.py +++ b/src/api-engine/common/validators.py @@ -1,10 +1,8 @@ -import re from urllib.parse import urlparse -from fqdn import FQDN -from django.core.validators import URLValidator from django.core.exceptions import ValidationError - +from django.core.validators import URLValidator +from fqdn import FQDN _url_validator = URLValidator() @@ -20,7 +18,11 @@ def validate_url(value): raise ValidationError( "Invalid scheme. URLs must start with 'http' or 'https'" ) - fqdn = FQDN(host, min_labels=1) - if not fqdn.is_valid: - raise ValidationError("Invalid hostname") + validate_host(host) return value + + +def validate_host(value): + fqdn = FQDN(value, min_labels=1) + if not fqdn.is_valid: + raise ValidationError("Invalid hostname") diff --git a/src/api-engine/entrypoint.sh b/src/api-engine/entrypoint.sh new file mode 100755 index 000000000..d53040a9f --- /dev/null +++ b/src/api-engine/entrypoint.sh @@ -0,0 +1,11 @@ +#!/usr/bin/bash + +holdup -t 120 tcp://${DB_HOST:-localhost}:${DB_PORT:-5432}; +python manage.py migrate; +python manage.py collectstatic --noinput +DEBUG="${DEBUG:-True}" +if [[ "${DEBUG,,}" == "true" ]]; then # For dev, use pure Django directly + python manage.py runserver 0.0.0.0:8080; +else # For production, use uwsgi in front + uwsgi --ini server.ini; +fi diff --git a/src/agent/k8s-rest-agent/src/api/tasks/task/__init__.py b/src/api-engine/node/__init__.py similarity index 100% rename from src/agent/k8s-rest-agent/src/api/tasks/task/__init__.py rename to src/api-engine/node/__init__.py diff --git a/src/api-engine/node/admin.py b/src/api-engine/node/admin.py new file mode 100644 index 000000000..8c38f3f3d --- /dev/null +++ b/src/api-engine/node/admin.py @@ -0,0 +1,3 @@ +from django.contrib import admin + +# Register your models here. diff --git a/src/api-engine/node/apps.py b/src/api-engine/node/apps.py new file mode 100644 index 000000000..916606d64 --- /dev/null +++ b/src/api-engine/node/apps.py @@ -0,0 +1,6 @@ +from django.apps import AppConfig + + +class NodeConfig(AppConfig): + default_auto_field = 'django.db.models.BigAutoField' + name = 'node' diff --git a/src/api-engine/node/migrations/0001_initial.py b/src/api-engine/node/migrations/0001_initial.py new file mode 100644 index 000000000..007ec3443 --- /dev/null +++ b/src/api-engine/node/migrations/0001_initial.py @@ -0,0 +1,34 @@ +# Generated by Django 4.2.16 on 2025-09-28 23:48 + +import common.utils +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ('organization', '0001_initial'), + ] + + operations = [ + migrations.CreateModel( + name='Node', + fields=[ + ('id', models.UUIDField(default=common.utils.make_uuid, help_text='Node ID', primary_key=True, serialize=False)), + ('name', models.CharField(help_text='Node Name', max_length=64)), + ('type', models.CharField(choices=[('PEER', 'Peer'), ('ORDERER', 'Orderer')], help_text='Node Type', max_length=64)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Node Creation Timestamp')), + ('status', models.CharField(choices=[('CREATED', 'Created'), ('RUNNING', 'Running'), ('FAILED', 'Failed')], default='CREATED', help_text='Node Status', max_length=64)), + ('config_file', models.TextField(help_text='Node Config File', null=True)), + ('msp', models.TextField(help_text='Node MSP', null=True)), + ('tls', models.TextField(help_text='Node TLS', null=True)), + ('organization', models.ForeignKey(help_text='Organization Nodes', on_delete=django.db.models.deletion.CASCADE, related_name='nodes', to='organization.organization')), + ], + options={ + 'ordering': ('-created_at',), + }, + ), + ] diff --git a/src/agent/k8s-rest-agent/src/api/utils/__init__.py b/src/api-engine/node/migrations/__init__.py similarity index 100% rename from src/agent/k8s-rest-agent/src/api/utils/__init__.py rename to src/api-engine/node/migrations/__init__.py diff --git a/src/api-engine/node/models.py b/src/api-engine/node/models.py new file mode 100644 index 000000000..b979e14f9 --- /dev/null +++ b/src/api-engine/node/models.py @@ -0,0 +1,62 @@ +from django.db import models + +from common.utils import make_uuid +from organization.models import Organization + + +# Create your models here. + +class Node(models.Model): + class Type(models.TextChoices): + PEER = "PEER", "Peer" + ORDERER = "ORDERER", "Orderer" + + class Status(models.TextChoices): + CREATED = "CREATED", "Created" + RUNNING = "RUNNING", "Running" + FAILED = "FAILED", "Failed" + + id = models.UUIDField( + primary_key=True, + help_text="Node ID", + default=make_uuid, + ) + name = models.CharField( + help_text="Node Name", + max_length=64, + ) + type = models.CharField( + help_text="Node Type", + choices=Type.choices, + max_length=64, + ) + organization = models.ForeignKey( + Organization, + help_text="Organization Nodes", + related_name="nodes", + on_delete=models.CASCADE, + ) + created_at = models.DateTimeField( + help_text="Node Creation Timestamp", auto_now_add=True + ) + status = models.CharField( + help_text="Node Status", + choices=Status.choices, + max_length=64, + default=Status.CREATED, + ) + config_file = models.TextField( + help_text="Node Config File", + null=True, + ) + msp = models.TextField( + help_text="Node MSP", + null=True, + ) + tls = models.TextField( + help_text="Node TLS", + null=True, + ) + + class Meta: + ordering = ("-created_at",) diff --git a/src/api-engine/node/serializers.py b/src/api-engine/node/serializers.py new file mode 100644 index 000000000..5fe4c818b --- /dev/null +++ b/src/api-engine/node/serializers.py @@ -0,0 +1,47 @@ +from typing import Dict, Any +from rest_framework import serializers +from api.common.serializers import ListResponseSerializer +from api.lib.pki import CryptoConfig, CryptoGen +from node import service +from node.models import Node + + +class NodeID(serializers.Serializer): + id = serializers.UUIDField(help_text="ID of node") + + +class NodeResponse(NodeID, serializers.ModelSerializer): + class Meta: + model = Node + fields = ( + "id", + "type", + "name", + "created_at", + "status", + ) + + +class NodeList(ListResponseSerializer): + data = NodeResponse(many=True, help_text="Node list") + + +class NodeCreateBody(serializers.ModelSerializer): + class Meta: + model = Node + fields = ( + "name", + "type", + ) + extra_kwargs = { + "name": {"required": True}, + "type": {"required": True}, + } + + def validate(self, data: Dict[str, Any]): + if any(node.name == data["name"] for node in self.context["organization"].nodes.all()): + raise serializers.ValidationError("Node Exists") + return data + + def create(self, validated_data: Dict[str, Any]) -> Node: + return service.create(self.context["organization"], validated_data["type"], validated_data["name"]) diff --git a/src/api-engine/node/service.py b/src/api-engine/node/service.py new file mode 100644 index 000000000..d04601f49 --- /dev/null +++ b/src/api-engine/node/service.py @@ -0,0 +1,327 @@ +import base64 +import logging +import os +import sys +from typing import Optional, Dict, Any +from zipfile import ZipFile + +import docker +import yaml +from docker.errors import DockerException + +from api.lib.pki import CryptoConfig, CryptoGen +from api_engine.settings import CELLO_HOME, FABRIC_PEER_CFG, FABRIC_ORDERER_CFG, FABRIC_VERSION +from node.models import Node +from organization.models import Organization + + +def get_node(node_id: str) -> Optional[Node]: + try: + return Node.objects.get(id=node_id) + except Node.DoesNotExist: + return None + + +def create(organization: Organization, node_type: Node.Type, node_name: str) -> Node: + CryptoConfig(organization.name).update({"type": node_type, "Specs": [node_name]}) + CryptoGen(organization.name).extend() + node_domain_name = get_domain_name(organization.name, node_type, node_name) + _generate_node_config(organization.name, node_type, node_domain_name) + msp = _get_msp(organization.name, node_type, node_domain_name) + tls = _get_tls(organization.name, node_type, node_domain_name) + cfg = _get_cfg(organization.name, node_type, node_domain_name) + + node = Node( + name=node_name, + type=node_type, + organization=organization, + config_file=cfg, + msp=msp, + tls=tls, + ) + + try: + # same as `docker run -dit yeasy/hyperledge-fabric:2.2.0 -e VARIABLES`` + docker.DockerClient("unix:///var/run/docker.sock").containers.run( + "hyperledger/fabric:" + FABRIC_VERSION, + _get_node_cmd(node_type), + detach=True, + tty=True, + stdin_open=True, + network="cello-net", + name=node_domain_name, + volumes=[ + "/var/run/docker.sock:/host/var/run/docker.sock" + ], + environment=_get_node_env(node_type, node_domain_name, msp, tls, cfg), + # ports=port_map, + ) + node.status = Node.Status.RUNNING + except DockerException: + node.status = Node.Status.FAILED + logging.error(sys.exc_info()) + raise + + node.save() + return node + + +def get_domain_name(organization_name: str, node_type: Node.Type, node_name: str) -> str: + return "{}.{}".format( + node_name, + organization_name + if node_type == Node.Type.PEER + else organization_name.split(".", 1)[1]) + + +def _generate_node_config(organization_name: str, node_type: Node.Type, node_domain_name: str) -> None: + if node_type == Node.Type.PEER: + _generate_peer_config(organization_name, node_domain_name) + elif node_type == Node.Type.ORDERER: + _generate_orderer_config(organization_name, node_domain_name) + # throw exception here + return None + + +def _generate_peer_config(organization_name: str, peer_domain_name: str) -> None: + _generate_config( + FABRIC_PEER_CFG, + os.path.join( + get_peer_directory(organization_name, peer_domain_name), + "core.yaml"), + **{ + "peer_tls_enabled": True, + "operations_listenAddress": "{}:9444".format(peer_domain_name), + "peer_address": "{}:7051".format(peer_domain_name), + "peer_gossip_bootstrap": "{}:7051".format(peer_domain_name), + "peer_gossip_externalEndpoint": "{}:7051".format(peer_domain_name), + "peer_id": peer_domain_name, + "peer_localMspId": "{}MSP".format(organization_name.split(".", 1)[0].capitalize()), + "peer_mspConfigPath": "/etc/hyperledger/fabric/msp", + "peer_tls_cert_file": "/etc/hyperledger/fabric/tls/server.crt", + "peer_tls_key_file": "/etc/hyperledger/fabric/tls/server.key", + "peer_tls_rootcert_file": "/etc/hyperledger/fabric/tls/ca.crt", + "vm_docker_hostConfig_NetworkMode": "cello_net", + "vm_endpoint": "unix:///host/var/run/docker.sock" + } + ) + + +def _generate_orderer_config(organization_name: str, orderer_domain_name: str) -> None: + _generate_config( + FABRIC_ORDERER_CFG, + os.path.join( + get_orderer_directory(organization_name, orderer_domain_name), + "orderer.yaml"), + **{ + "Admin_TLS_Enabled": True, + "Admin_ListenAddress": "0.0.0.0:7053", + "Admin_TLS_Certificate": "/etc/hyperledger/fabric/tls/server.crt", + "Admin_TLS_PrivateKey": "/etc/hyperledger/fabric/tls/server.key", + "ChannelParticipation_Enabled": True, + "General_Cluster_ClientCertificate": "/etc/hyperledger/fabric/tls/server.crt", + "General_Cluster_ClientPrivateKey": "/etc/hyperledger/fabric/tls/server.key", + "General_ListenAddress": "0.0.0.0", + "General_ListenPort": 7050, + "General_LocalMSPID": "OrdererMSP", + "General_LocalMSPDir": "/etc/hyperledger/fabric/msp", + "General_TLS_Enabled": True, + "General_TLS_Certificate": "/etc/hyperledger/fabric/tls/server.crt", + "General_TLS_PrivateKey": "/etc/hyperledger/fabric/tls/server.key", + "General_TLS_RootCAs": "[/etc/hyperledger/fabric/tls/ca.crt]", + "General_BootstrapMethod": "none", + "Metrics_Provider": "prometheus", + "Operations_ListenAddress": "{}:9443".format(orderer_domain_name), + } + ) + + +def _generate_config(src: str, dst: str, **kwargs) -> None: + with open(src, "r+") as f: + cfg = yaml.load(f, Loader=yaml.FullLoader) + if cfg is None: + cfg = {} + + for key, value in kwargs.items(): + sub_keys = key.split("_") + cfg_iterator = cfg + for sub_key in sub_keys[:-1]: + cfg_iterator = cfg_iterator.setdefault(sub_key, {}) + cfg_iterator[sub_keys[-1]] = value + with open(dst, "w+") as f: + yaml.dump(cfg, f) + + +def _get_msp(organization_name: str, node_type: Node.Type, node_domain_name: str) -> bytes: + directory_path = _get_node_directory(organization_name, node_type, node_domain_name) + msp_zip_path = os.path.join(directory_path, "msp.zip") + _zip_directory( + os.path.join(directory_path, "msp"), + msp_zip_path, + ) + with open(msp_zip_path, "rb") as msp_input_stream: + return base64.b64encode(msp_input_stream.read()) + + +def _get_tls(organization_name: str, node_type: Node.Type, node_domain_name: str) -> bytes: + directory_path = _get_node_directory(organization_name, node_type, node_domain_name) + tls_zip_path = os.path.join(directory_path, "tls.zip") + _zip_directory( + os.path.join(directory_path, "tls"), + tls_zip_path, + ) + with open(tls_zip_path, "rb") as tls_input_stream: + return base64.b64encode(tls_input_stream.read()) + + +def _get_cfg(organization_name: str, node_type: Node.Type, node_domain_name: str) -> Optional[bytes]: + if node_type == Node.Type.PEER: + return _get_peer_cfg(organization_name, node_domain_name) + elif node_type == Node.Type.ORDERER: + return _get_orderer_cfg(organization_name, node_domain_name) + # throw exception here + return None + + +def _get_peer_cfg(organization_name: str, peer_domain_name: str): + directory_path = get_peer_directory(organization_name, peer_domain_name) + cfg_zip_path = os.path.join(directory_path, "peer_config.zip") + _zip_directory( + os.path.join(directory_path, "core.yaml"), + cfg_zip_path + ) + with open(cfg_zip_path, "rb") as cfg_zip_input_stream: + return base64.b64encode(cfg_zip_input_stream.read()) + + +def _get_orderer_cfg(organization_name: str, orderer_domain_name: str): + directory_path = get_orderer_directory(organization_name, orderer_domain_name) + cfg_zip_path = os.path.join(directory_path, "orderer_config.zip") + _zip_directory( + os.path.join(directory_path, "orderer.yaml"), + cfg_zip_path + ) + with open(cfg_zip_path, "rb") as cfg_zip_input_stream: + return base64.b64encode(cfg_zip_input_stream.read()) + + +def _zip_directory(directory_path: str, output_file_path: str) -> None: + root_path_inside_zip = "/{}".format(directory_path.rsplit("/", 1)[1]) + with ZipFile(output_file_path, "w") as zip_output_stream: + for path, sub_directories, files in os.walk(directory_path): + path_inside_zip = root_path_inside_zip + path.replace(directory_path, "") + for filename in files: + zip_output_stream.write( + str(os.path.join(path, filename)), + str(os.path.join(path_inside_zip, filename)) + ) + for sud_directory in sub_directories: + zip_output_stream.write( + str(os.path.join(path, sud_directory)), + str(os.path.join(path_inside_zip, sud_directory)) + ) + + +def get_peer_directory(organization_name: str, peer_domain_name: str): + return _get_node_directory(organization_name, Node.Type.PEER, peer_domain_name) + + +def get_orderer_directory(organization_name: str, orderer_domain_name: str): + return _get_node_directory(organization_name, Node.Type.ORDERER, orderer_domain_name) + + +def _get_node_directory(organization_name: str, node_type: Node.Type, node_domain_name: str) -> str: + return "{}/{}s/{}".format( + get_org_directory(organization_name, node_type), + node_type.lower(), + node_domain_name, + ) + + +def get_org_directory(organization_name: str, node_type: Node.Type) -> str: + return "{}/{}/crypto-config/{}Organizations/{}".format( + CELLO_HOME, + organization_name, + node_type.lower(), + organization_name.split(".", 1)[1] + if node_type == Node.Type.ORDERER + else organization_name, + ) + + +def _get_node_env(node_type: Node.Type, node_domain_name: str, msp, tls, cfg) -> Optional[Dict[str, Any]]: + if node_type == Node.Type.PEER: + return _get_peer_env(node_domain_name, msp, tls, cfg) + elif node_type == Node.Type.ORDERER: + return _get_orderer_env(node_domain_name, msp, tls, cfg) + # throw exception here + return None + + +def _get_peer_env(peer_domain_name: str, msp, tls, cfg) -> Dict[str, Any]: + return { + "HLF_NODE_MSP": msp, + "HLF_NODE_TLS": tls, + "HLF_NODE_PEER_CONFIG": cfg, + "HLF_NODE_ORDERER_CONFIG": cfg, + "platform": "linux/amd64", + "CORE_VM_ENDPOINT": "unix:///host/var/run/docker.sock", + "CORE_VM_DOCKER_HOSTCONFIG_NETWORKMODE": "cello-net", + "FABRIC_LOGGING_SPEC": "INFO", + "CORE_PEER_TLS_ENABLED": "true", + "CORE_PEER_PROFILE_ENABLED": "false", + "CORE_PEER_TLS_CERT_FILE": "/etc/hyperledger/fabric/tls/server.crt", + "CORE_PEER_TLS_KEY_FILE": "/etc/hyperledger/fabric/tls/server.key", + "CORE_PEER_TLS_ROOTCERT_FILE": "/etc/hyperledger/fabric/tls/ca.crt", + "CORE_PEER_ID": peer_domain_name, + "CORE_PEER_ADDRESS": peer_domain_name + ":7051", + "CORE_PEER_LISTENADDRESS": "0.0.0.0:7051", + "CORE_PEER_CHAINCODEADDRESS": peer_domain_name + ":7052", + "CORE_PEER_CHAINCODELISTENADDRESS": "0.0.0.0:7052", + "CORE_PEER_GOSSIP_BOOTSTRAP": peer_domain_name + ":7051", + "CORE_PEER_GOSSIP_EXTERNALENDPOINT": peer_domain_name + ":7051", + "CORE_PEER_LOCALMSPID": peer_domain_name.split(".")[1].capitalize() + "MSP", + "CORE_PEER_MSPCONFIGPATH": "/etc/hyperledger/fabric/msp", + "CORE_OPERATIONS_LISTENADDRESS": peer_domain_name + ":9444", + "CORE_METRICS_PROVIDER": "prometheus", + } + + +def _get_orderer_env(orderer_domain_name: str, msp, tls, cfg) -> Dict[str, Any]: + return { + "HLF_NODE_MSP": msp, + "HLF_NODE_TLS": tls, + "HLF_NODE_PEER_CONFIG": cfg, + "HLF_NODE_ORDERER_CONFIG": cfg, + "platform": "linux/amd64", + "FABRIC_LOGGING_SPEC": "INFO", + "ORDERER_GENERAL_LISTENADDRESS": "0.0.0.0", + "ORDERER_GENERAL_LISTENPORT": "7050", + "ORDERER_GENERAL_LOCALMSPID": "OrdererMSP", + "ORDERER_GENERAL_LOCALMSPDIR": "/etc/hyperledger/fabric/msp", + "ORDERER_GENERAL_TLS_ENABLED": "true", + "ORDERER_GENERAL_TLS_PRIVATEKEY": "/etc/hyperledger/fabric/tls/server.key", + "ORDERER_GENERAL_TLS_CERTIFICATE": "/etc/hyperledger/fabric/tls/server.crt", + "ORDERER_GENERAL_TLS_ROOTCAS": "[/etc/hyperledger/fabric/tls/ca.crt]", + "ORDERER_GENERAL_CLUSTER_CLIENTCERTIFICATE": "/etc/hyperledger/fabric/tls/server.crt", + "ORDERER_GENERAL_CLUSTER_CLIENTPRIVATEKEY": "/etc/hyperledger/fabric/tls/server.key", + "ORDERER_GENERAL_CLUSTER_ROOTCAS": "[/etc/hyperledger/fabric/tls/ca.crt]", + "ORDERER_GENERAL_BOOTSTRAPMETHOD": "none", + "ORDERER_CHANNELPARTICIPATION_ENABLED": "true", + "ORDERER_ADMIN_TLS_ENABLED": "true", + "ORDERER_ADMIN_TLS_CERTIFICATE": "/etc/hyperledger/fabric/tls/server.crt", + "ORDERER_ADMIN_TLS_PRIVATEKEY": "/etc/hyperledger/fabric/tls/server.key", + "ORDERER_ADMIN_TLS_ROOTCAS": "[/etc/hyperledger/fabric/tls/ca.crt]", + "ORDERER_ADMIN_TLS_CLIENTROOTCAS": "[/etc/hyperledger/fabric/tls/ca.crt]", + "ORDERER_ADMIN_LISTENADDRESS": "0.0.0.0:7053", + "ORDERER_OPERATIONS_LISTENADDRESS": orderer_domain_name + ":9443", + "ORDERER_METRICS_PROVIDER": "prometheus", + } + + +def _get_node_cmd(node_type: Node.Type) -> Optional[str]: + if node_type == Node.Type.PEER: + return 'bash /tmp/init.sh "peer node start"' + elif node_type == Node.Type.ORDERER: + return 'bash /tmp/init.sh "orderer"' + return None diff --git a/src/api-engine/node/tests.py b/src/api-engine/node/tests.py new file mode 100644 index 000000000..7ce503c2d --- /dev/null +++ b/src/api-engine/node/tests.py @@ -0,0 +1,3 @@ +from django.test import TestCase + +# Create your tests here. diff --git a/src/api-engine/node/views.py b/src/api-engine/node/views.py new file mode 100644 index 000000000..105edea70 --- /dev/null +++ b/src/api-engine/node/views.py @@ -0,0 +1,50 @@ +from drf_yasg.utils import swagger_auto_schema +from rest_framework import viewsets, status +from rest_framework.permissions import IsAuthenticated +from rest_framework.response import Response + +from api.common import ok +from api.common.response import make_response_serializer +from api.utils.common import with_common_response +from common.serializers import PageQuerySerializer +from node.models import Node +from node.serializers import NodeList, NodeCreateBody, NodeID, NodeResponse + + +class NodeViewSet(viewsets.ViewSet): + permission_classes = [ + IsAuthenticated, + ] + + @swagger_auto_schema( + operation_summary="List all nodes of the current organization", + query_serializer=PageQuerySerializer(), + responses=with_common_response( + {status.HTTP_200_OK: make_response_serializer(NodeList)} + ), + ) + def list(self, request): + serializer = PageQuerySerializer(data=request.GET) + p = serializer.get_paginator(Node.objects.filter(organization=request.user.organization)) + return Response( + status=status.HTTP_200_OK, + data=ok(NodeList({ + "total": p.count, + "data": NodeResponse(p.page(serializer.data['page']).object_list, many=True).data + }).data), + ) + + @swagger_auto_schema( + operation_summary="Create a new node of the current organization", + request_body=NodeCreateBody, + responses=with_common_response( + {status.HTTP_201_CREATED: make_response_serializer(NodeID)} + ), + ) + def create(self, request): + serializer = NodeCreateBody(data=request.data, context={"organization": request.user.organization}) + serializer.is_valid(raise_exception=True) + return Response( + status=status.HTTP_201_CREATED, + data=ok(NodeID(serializer.save().__dict__).data), + ) diff --git a/src/api-engine/api/lib/configtxlator/__init__.py b/src/api-engine/organization/__init__.py similarity index 100% rename from src/api-engine/api/lib/configtxlator/__init__.py rename to src/api-engine/organization/__init__.py diff --git a/src/api-engine/organization/admin.py b/src/api-engine/organization/admin.py new file mode 100644 index 000000000..8c38f3f3d --- /dev/null +++ b/src/api-engine/organization/admin.py @@ -0,0 +1,3 @@ +from django.contrib import admin + +# Register your models here. diff --git a/src/api-engine/organization/apps.py b/src/api-engine/organization/apps.py new file mode 100644 index 000000000..b33d50af9 --- /dev/null +++ b/src/api-engine/organization/apps.py @@ -0,0 +1,6 @@ +from django.apps import AppConfig + + +class OrganizationConfig(AppConfig): + default_auto_field = 'django.db.models.BigAutoField' + name = 'organization' diff --git a/src/api-engine/organization/migrations/0001_initial.py b/src/api-engine/organization/migrations/0001_initial.py new file mode 100644 index 000000000..fc8095a3f --- /dev/null +++ b/src/api-engine/organization/migrations/0001_initial.py @@ -0,0 +1,29 @@ +# Generated by Django 4.2.16 on 2025-09-28 23:48 + +import common.utils +import common.validators +from django.db import migrations, models + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ] + + operations = [ + migrations.CreateModel( + name='Organization', + fields=[ + ('id', models.UUIDField(default=common.utils.make_uuid, help_text='ID of organization', primary_key=True, serialize=False)), + ('name', models.CharField(help_text='Name of organization', max_length=64, unique=True, validators=[common.validators.validate_host])), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('msp', models.TextField(help_text='msp of organization', null=True)), + ('tls', models.TextField(help_text='tls of organization', null=True)), + ], + options={ + 'ordering': ('-created_at',), + }, + ), + ] diff --git a/src/api-engine/api/lib/peer/__init__.py b/src/api-engine/organization/migrations/__init__.py similarity index 100% rename from src/api-engine/api/lib/peer/__init__.py rename to src/api-engine/organization/migrations/__init__.py diff --git a/src/api-engine/organization/models.py b/src/api-engine/organization/models.py new file mode 100644 index 000000000..654838f77 --- /dev/null +++ b/src/api-engine/organization/models.py @@ -0,0 +1,24 @@ +from django.db import models + +from common.validators import validate_host +from common.utils import make_uuid + + +class Organization(models.Model): + id = models.UUIDField( + primary_key=True, + help_text="ID of organization", + default=make_uuid, + ) + name = models.CharField( + max_length=64, + help_text="Name of organization", + unique=True, + validators=[validate_host] + ) + created_at = models.DateTimeField(auto_now_add=True) + msp = models.TextField(help_text="msp of organization", null=True) + tls = models.TextField(help_text="tls of organization", null=True) + + class Meta: + ordering = ("-created_at",) diff --git a/src/api-engine/organization/serializeres.py b/src/api-engine/organization/serializeres.py new file mode 100644 index 000000000..17780fc30 --- /dev/null +++ b/src/api-engine/organization/serializeres.py @@ -0,0 +1,24 @@ +from rest_framework import serializers + +from api.common.serializers import ListResponseSerializer +from organization.models import Organization + + +class OrganizationID(serializers.ModelSerializer): + class Meta: + model = Organization + fields = ("id",) + + +class OrganizationResponse(serializers.ModelSerializer): + class Meta: + model = Organization + fields = ( + "id", + "name", + "created_at" + ) + + +class OrganizationList(ListResponseSerializer): + data = OrganizationResponse(many=True, help_text="Organizations list") diff --git a/src/api-engine/organization/tests.py b/src/api-engine/organization/tests.py new file mode 100644 index 000000000..7ce503c2d --- /dev/null +++ b/src/api-engine/organization/tests.py @@ -0,0 +1,3 @@ +from django.test import TestCase + +# Create your tests here. diff --git a/src/api-engine/organization/views.py b/src/api-engine/organization/views.py new file mode 100644 index 000000000..ebb65a97c --- /dev/null +++ b/src/api-engine/organization/views.py @@ -0,0 +1,75 @@ +from typing import Optional + +from django.core.paginator import Paginator +from drf_yasg.utils import swagger_auto_schema +from rest_framework import viewsets, status +from rest_framework.permissions import IsAuthenticated +from rest_framework.request import Request +from rest_framework.response import Response + +from api.common import ok +from api.common.response import make_response_serializer +from api.exceptions import CustomError +from api.utils.common import with_common_response +from common.responses import err +from common.serializers import PageQuerySerializer +from organization.models import Organization +from organization.serializeres import OrganizationList, OrganizationResponse + + +class OrganizationViewSet(viewsets.ViewSet): + """Class represents organization related operations.""" + permission_classes = [IsAuthenticated] + + @swagger_auto_schema( + operation_summary="Get Organization", + responses=with_common_response( + {status.HTTP_200_OK: make_response_serializer(OrganizationResponse)} + ), + ) + def retrieve(self, request, pk=None): + try: + res = Organization.objects.get(pk=pk) + except Organization.DoesNotExist: + return Response( + status=status.HTTP_404_NOT_FOUND, + data=err("Organization not found") + ) + return Response( + status=status.HTTP_200_OK, + data=ok(OrganizationResponse(res).data) + ) + + @swagger_auto_schema( + operation_summary="Get Organizations", + query_serializer=PageQuerySerializer(), + responses=with_common_response( + {status.HTTP_200_OK: make_response_serializer(OrganizationList)} + ), + ) + def list(self, request): + serializer = PageQuerySerializer(data=request.GET) + p = serializer.get_paginator(Organization.objects.all()) + return Response( + status=status.HTTP_200_OK, + data=ok(OrganizationList({ + "total": p.count, + "data": OrganizationResponse( + p.page(serializer.data["page"]).object_list, + many=True + ).data + }).data) + ) + + @swagger_auto_schema( + operation_summary="Delete Organizations", + responses=with_common_response( + {status.HTTP_204_NO_CONTENT: "No Content"} + ) + ) + def destroy(self, request: Request, pk: Optional[str] = None) -> Response: + try: + Organization.objects.get(id=pk).delete() + except Exception as e: + raise CustomError(detail=str(e)) + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/src/api-engine/requirements.txt b/src/api-engine/requirements.txt index c8945c4eb..ea890607d 100644 --- a/src/api-engine/requirements.txt +++ b/src/api-engine/requirements.txt @@ -1,12 +1,7 @@ -asgiref==3.6.0 -attrs==20.3.0 -cachetools==4.1.1 -celery==5.2.6 -certifi==2024.7.4 +async-timeout==5.0.1 chardet==3.0.4 coreapi==2.3.3 -coreschema==0.0.4 -defusedxml==0.6.0 +cryptography==45.0.6 Django==4.2.16 django-allauth==0.55.0 django-cors-headers==3.5.0 @@ -15,46 +10,19 @@ django-filter==2.4.0 django-rest-auth==0.9.5 djangorestframework==3.15.2 djangorestframework-simplejwt==5.3.1 -docker==4.2.0 +docker==7.1.0 drf-yasg==1.21.7 flex==6.14.1 -google-auth==1.23.0 +fqdn==1.5.1 holdup==1.8.0 -idna==3.7 -inflection==0.5.1 -itypes==1.2.0 -Jinja2==3.1.5 -jsonpointer==2.0 -jsonschema==3.2.0 +jaraco.collections==5.1.0 kubernetes==11.0.0 -MarkupSafe==2.0.1 -oauthlib==3.1.0 -packaging==21.0 pathtools==0.1.2 +platformdirs==4.2.2 psycopg2-binary==2.8.4 -pyasn1==0.4.8 -pyasn1-modules==0.2.8 -pygraphviz==1.5 pyparsing==2.4.7 -pyrsistent==0.17.3 -python-dateutil==2.8.1 -python3-openid==3.2.0 -PyYAML==5.3.1 -redis==4.4.4 -requests==2.32.2 -requests-oauthlib==1.3.0 -rfc3987==1.3.8 -rsa==4.7 ruamel.yaml==0.16.12 -ruamel.yaml.clib==0.2.2 -six==1.16.0 -sqlparse==0.5.0 -strict-rfc3339==0.7 swagger-spec-validator==2.5.0 -uritemplate==3.0.1 -urllib3==1.26.19 +tomli==2.0.1 +pygraphviz==1.5 uWSGI==2.0.22 -validate-email==1.3 -watchdog==0.10.2 -websocket-client==0.57.0 -fqdn===1.5.1 diff --git a/build_image/docker/common/api-engine/server.ini b/src/api-engine/server.ini similarity index 100% rename from build_image/docker/common/api-engine/server.ini rename to src/api-engine/server.ini diff --git a/src/api-engine/api/management/__init__.py b/src/api-engine/user/__init__.py similarity index 100% rename from src/api-engine/api/management/__init__.py rename to src/api-engine/user/__init__.py diff --git a/src/api-engine/user/admin.py b/src/api-engine/user/admin.py new file mode 100644 index 000000000..8c38f3f3d --- /dev/null +++ b/src/api-engine/user/admin.py @@ -0,0 +1,3 @@ +from django.contrib import admin + +# Register your models here. diff --git a/src/api-engine/user/apps.py b/src/api-engine/user/apps.py new file mode 100644 index 000000000..36cce4c8e --- /dev/null +++ b/src/api-engine/user/apps.py @@ -0,0 +1,6 @@ +from django.apps import AppConfig + + +class UserConfig(AppConfig): + default_auto_field = 'django.db.models.BigAutoField' + name = 'user' diff --git a/src/api-engine/user/enums.py b/src/api-engine/user/enums.py new file mode 100644 index 000000000..d74d6d8d8 --- /dev/null +++ b/src/api-engine/user/enums.py @@ -0,0 +1,7 @@ +from enum import unique, Enum, auto + + +@unique +class UserRole(Enum): + ADMIN = auto() + USER = auto() diff --git a/src/api-engine/user/migrations/0001_initial.py b/src/api-engine/user/migrations/0001_initial.py new file mode 100644 index 000000000..c06ac0ba4 --- /dev/null +++ b/src/api-engine/user/migrations/0001_initial.py @@ -0,0 +1,50 @@ +# Generated by Django 4.2.16 on 2025-09-28 23:48 + +import common.utils +import django.contrib.auth.models +import django.contrib.auth.validators +from django.db import migrations, models +import django.db.models.deletion +import django.utils.timezone + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ('auth', '0012_alter_user_first_name_max_length'), + ('organization', '0001_initial'), + ] + + operations = [ + migrations.CreateModel( + name='UserProfile', + fields=[ + ('password', models.CharField(max_length=128, verbose_name='password')), + ('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')), + ('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')), + ('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')), + ('first_name', models.CharField(blank=True, max_length=150, verbose_name='first name')), + ('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')), + ('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')), + ('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')), + ('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')), + ('id', models.UUIDField(default=common.utils.make_uuid, help_text='User ID', primary_key=True, serialize=False)), + ('email', models.EmailField(db_index=True, max_length=254, unique=True)), + ('role', models.CharField(choices=[('ADMIN', 'Admin'), ('USER', 'User')], default='USER', help_text='User Role', max_length=64)), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.group', verbose_name='groups')), + ('organization', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='users', to='organization.organization')), + ('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.permission', verbose_name='user permissions')), + ], + options={ + 'verbose_name': 'User Info', + 'verbose_name_plural': 'User Info', + 'ordering': ['-date_joined'], + }, + managers=[ + ('objects', django.contrib.auth.models.UserManager()), + ], + ), + ] diff --git a/src/api-engine/api/management/commands/__init__.py b/src/api-engine/user/migrations/__init__.py similarity index 100% rename from src/api-engine/api/management/commands/__init__.py rename to src/api-engine/user/migrations/__init__.py diff --git a/src/api-engine/user/models.py b/src/api-engine/user/models.py new file mode 100644 index 000000000..41315004a --- /dev/null +++ b/src/api-engine/user/models.py @@ -0,0 +1,52 @@ +from django.contrib.auth.models import AbstractUser +from django.db import models + +from common.utils import make_uuid +from organization.models import Organization + + +# Create your models here. + +class UserProfile(AbstractUser): + class Role(models.TextChoices): + ADMIN = "ADMIN", "Admin" + USER = "USER", "User" + + id = models.UUIDField( + primary_key=True, + help_text="User ID", + default=make_uuid, + ) + email = models.EmailField(db_index=True, unique=True) + role = models.CharField( + choices=Role.choices, + default=Role.USER, + max_length=64, + help_text="User Role", + ) + organization = models.ForeignKey( + Organization, + on_delete=models.CASCADE, + related_name="users", + ) + created_at = models.DateTimeField(auto_now_add=True) + + USERNAME_FIELD = "email" + + REQUIRED_FIELDS = [] + + class Meta: + verbose_name = "User Info" + verbose_name_plural = verbose_name + ordering = ["-date_joined"] + + def __str__(self): + return self.username + + @property + def is_admin(self): + return self.role == self.Role.ADMIN + + @property + def is_common_user(self): + return self.role == self.Role.USER diff --git a/src/api-engine/user/serializers.py b/src/api-engine/user/serializers.py new file mode 100644 index 000000000..757a37397 --- /dev/null +++ b/src/api-engine/user/serializers.py @@ -0,0 +1,69 @@ +# +# SPDX-License-Identifier: Apache-2.0 +# +from typing import Dict, Any + +from rest_framework import serializers +from api.common.serializers import ListResponseSerializer +from organization.serializeres import OrganizationID, OrganizationResponse +from user.models import UserProfile + + +class UserCreateBody(serializers.ModelSerializer): + class Meta: + model = UserProfile + fields = ("role", "password", "email") + extra_kwargs = { + "role": {"required": True}, + "email": {"required": True}, + "password": {"required": True}, + } + + def create(self, validated_data: Dict[str, Any]) -> UserProfile: + user = UserProfile( + username=validated_data["email"], + email=validated_data["email"], + role=validated_data["role"], + organization=self.context["organization"], + ) + + user.set_password(validated_data["password"]) + user.save() + return user + + +class UserID(serializers.ModelSerializer): + class Meta: + model = UserProfile + fields = ("id",) + + +class UserInfo(serializers.ModelSerializer): + organization = OrganizationResponse() + + class Meta: + model = UserProfile + fields = ( + "id", + "email", + "role", + "organization", + "created_at" + ) + + +class UserList(ListResponseSerializer): + data = UserInfo(many=True, help_text="Users list") + + +class UserPasswordUpdate(serializers.Serializer): + password = serializers.CharField( + help_text="New password for login", max_length=64 + ) + + def create(self, validated_data: Dict[str, any]) -> UserProfile: + request = self.context["request"] + user = request.user + user.set_password(validated_data["password"]) + user.save() + return user diff --git a/src/api-engine/user/tests.py b/src/api-engine/user/tests.py new file mode 100644 index 000000000..7ce503c2d --- /dev/null +++ b/src/api-engine/user/tests.py @@ -0,0 +1,3 @@ +from django.test import TestCase + +# Create your tests here. diff --git a/src/api-engine/user/views.py b/src/api-engine/user/views.py new file mode 100644 index 000000000..d78ef62fe --- /dev/null +++ b/src/api-engine/user/views.py @@ -0,0 +1,117 @@ +# Create your views here. +# +# SPDX-License-Identifier: Apache-2.0 +# +import logging +from typing import Optional + +from drf_yasg.utils import swagger_auto_schema +from rest_framework import viewsets, status +from rest_framework.decorators import action +from rest_framework.request import Request +from rest_framework.response import Response +from rest_framework.permissions import IsAuthenticated + +from api.common import ok +from api.common.response import make_response_serializer + +from api.exceptions import CustomError +from common.serializers import PageQuerySerializer +from user.serializers import ( + UserCreateBody, + UserID, + UserList, + UserPasswordUpdate, UserInfo, +) +from api.utils.common import with_common_response +from user.models import UserProfile + +LOG = logging.getLogger(__name__) + + +class UserViewSet(viewsets.ViewSet): + permission_classes = [IsAuthenticated] + + @swagger_auto_schema( + operation_summary="List users in the current organization", + query_serializer=PageQuerySerializer(), + responses=with_common_response( + {status.HTTP_200_OK: make_response_serializer(UserList)} + ), + ) + def list(self, request: Request) -> Response: + serializer = PageQuerySerializer(data=request.GET) + p = serializer.get_paginator(UserProfile.objects.filter(organization=request.user.organization)) + return Response( + status=status.HTTP_200_OK, + data=ok(UserList({ + "total": p.count, + "data": UserInfo( + p.page(serializer.data['page']).object_list, + many=True + ).data, + }).data), + ) + + @swagger_auto_schema( + operation_summary="Create a user in the current organization", + request_body=UserCreateBody, + responses=with_common_response( + {status.HTTP_201_CREATED: make_response_serializer(UserID)} + ), + ) + def create(self, request: Request) -> Response: + serializer = UserCreateBody(data=request.data, context={"organization": request.user.organization}) + serializer.is_valid(raise_exception=True) + response = UserID(data={"id": serializer.save().id}) + response.is_valid(raise_exception=True) + return Response( + status=status.HTTP_201_CREATED, + data=ok(response.data), + ) + + @swagger_auto_schema( + operation_summary="Delete a user in the current organization", + responses=with_common_response( + {status.HTTP_204_NO_CONTENT: "No Content"} + ) + ) + def destroy(self, request: Request, pk: Optional[str] = None) -> Response: + try: + UserProfile.objects.get(organzation=request.user.organization, id=pk).delete() + except Exception as e: + raise CustomError(detail=str(e)) + return Response(status=status.HTTP_204_NO_CONTENT) + + @swagger_auto_schema( + method="PUT", + operation_summary="Update the current user's password", + request_body=UserPasswordUpdate, + responses=with_common_response({status.HTTP_204_NO_CONTENT: "No Content"}), + ) + @action( + methods=["PUT"], + detail=False, + url_path="password", + ) + def password(self, request: Request) -> Response: + serializer = UserPasswordUpdate(data=request.data, context={"request": request}) + serializer.is_valid(raise_exception=True) + serializer.save() + return Response(status=status.HTTP_204_NO_CONTENT) + + @swagger_auto_schema( + method="GET", + operation_summary="Get the current user", + responses=with_common_response({status.HTTP_200_OK: make_response_serializer(UserInfo)}), + ) + @action( + methods=["GET"], + detail=False, + url_path="profile", + ) + def profile(self, request: Request) -> Response: + return Response( + status=status.HTTP_200_OK, + data=ok(UserInfo(request.user).data), + ) diff --git a/build_image/docker/common/dashboard/Dockerfile.in b/src/dashboard/Dockerfile similarity index 55% rename from build_image/docker/common/dashboard/Dockerfile.in rename to src/dashboard/Dockerfile index e9e9dabe1..b30b6acd9 100644 --- a/build_image/docker/common/dashboard/Dockerfile.in +++ b/src/dashboard/Dockerfile @@ -2,15 +2,12 @@ FROM node:20.15 WORKDIR /usr/src/app/ USER root -RUN mkdir -p /usr/src/app && cd /usr/src/app -COPY src/dashboard /usr/src/app +COPY . . RUN export NODE_OPTIONS=--openssl-legacy-provider && yarn --network-timeout 600000 && yarn run build FROM nginx:1.15.12 COPY --from=0 /usr/src/app/dist /usr/share/nginx/html -COPY build_image/docker/common/dashboard/config-nginx.sh / -RUN chmod +x /config-nginx.sh -COPY build_image/docker/common/dashboard/nginx.conf /etc/nginx/ +COPY cello.conf /etc/nginx/conf.d/ EXPOSE 8081 diff --git a/src/dashboard/cello.conf b/src/dashboard/cello.conf new file mode 100644 index 000000000..08bb7ea8c --- /dev/null +++ b/src/dashboard/cello.conf @@ -0,0 +1,32 @@ +server { + listen 8081; + server_name localhost; + + gzip on; + gzip_min_length 1k; + gzip_comp_level 9; + gzip_types text/plain application/javascript application/x-javascript text/css application/xml text/javascript application/x-httpd-php image/jpeg image/gif image/png; + gzip_vary on; + gzip_disable "MSIE [1-6]\."; + + root /usr/share/nginx/html; + index index.html index.html; + + client_max_body_size 50M; + + location / { + try_files $uri /index.html; + } + + location /api { + proxy_pass http://cello-api-engine:8080; + proxy_set_header Host $host:$server_port; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + } + + location ~ /\. { + deny all; + } +} diff --git a/src/dashboard/config/router.config.js b/src/dashboard/config/router.config.js index 952e7d49f..f05c3f537 100755 --- a/src/dashboard/config/router.config.js +++ b/src/dashboard/config/router.config.js @@ -38,24 +38,24 @@ export default [ icon: 'team', component: './Organization/Organization', }, - { - path: '/agent', - name: 'agent', - icon: 'agent', - component: './Agent/Agent', - }, - { - path: '/agent/newAgent', - name: 'newAgent', - component: './Agent/newAgent', - hideInMenu: true, - }, - { - path: '/agent/editAgent', - name: 'editAgent', - component: './Agent/newAgent', - hideInMenu: true, - }, + // { + // path: '/agent', + // name: 'agent', + // icon: 'agent', + // component: './Agent/Agent', + // }, + // { + // path: '/agent/newAgent', + // name: 'newAgent', + // component: './Agent/newAgent', + // hideInMenu: true, + // }, + // { + // path: '/agent/editAgent', + // name: 'editAgent', + // component: './Agent/newAgent', + // hideInMenu: true, + // }, { path: '/node', name: 'node', @@ -84,18 +84,6 @@ export default [ }, ], }, - { - path: '/network', - name: 'network', - icon: 'network', - component: './Network/Network', - }, - { - path: '/network/newNetwork', - name: 'newNetwork', - component: './Network/newNetwork', - hideInMenu: true, - }, { path: '/channel', name: 'channel', @@ -114,6 +102,29 @@ export default [ icon: 'user', component: './UserManagement/UserManagement', }, + { + path: '/api/v1/docs', + name: 'REST API', + icon: 'api', + isExternal: true, + isBottom: true, + }, + { + path: 'https://github.com/hyperledger-cello', + name: 'GitHub', + icon: 'github', + isExternal: true, + isBottom: true, + target: '_blank', + }, + { + path: 'https://hyperledger-cello.readthedocs.io', + name: 'docs', + icon: 'docs', + isExternal: true, + isBottom: true, + target: '_blank', + }, ], }, { diff --git a/src/dashboard/package.json b/src/dashboard/package.json index cb2c067db..1a4fabfd8 100644 --- a/src/dashboard/package.json +++ b/src/dashboard/package.json @@ -24,8 +24,8 @@ "lint:style": "stylelint 'src/**/*.less' --syntax less", "prettier": "node ./scripts/prettier.js", "site": "umi build && yarn run functions:build", - "start": "cross-env umi dev PORT=8001", - "start:no-mock": "cross-env MOCK=none umi dev PORT=8002", + "start": "cross-env PORT=8001 umi dev", + "start:no-mock": "cross-env PORT=8002 MOCK=none umi dev", "test": "umi test", "test:all": "node ./tests/run-tests.js", "test:component": "umi test ./src/components", diff --git a/src/dashboard/src/app.js b/src/dashboard/src/app.js index 2e98b22be..a84827e38 100644 --- a/src/dashboard/src/app.js +++ b/src/dashboard/src/app.js @@ -8,4 +8,38 @@ export const dva = { export function render(oldRender) { oldRender(); + // runtime 自動註冊 models(支援使用 createModel(...) 的檔案) + // umi 提供 getDvaApp 與 require.context(webpack) + // eslint-disable-next-line global-require + const { getDvaApp } = require('umi'); + const app = typeof getDvaApp === 'function' ? getDvaApp() : null; + + if (!app) { + // dva 尚未就緒或 plugin 未啟用 + return; + } + + // 使用 webpack 的 require.context 來載入 src/models 下的所有 .js/.ts/.tsx 檔案 + let req; + try { + req = require.context('./models', true, /\.(j|t)sx?$/); + } catch (e) { + // 若環境不支援 require.context(極少見),直接結束 + return; + } + + req.keys().forEach(key => { + const mod = req(key); + const m = mod && (mod.default || mod); // 支援 default export 或 module.exports + if (!m || !m.namespace) { + // skip 非 model 檔或未正確 export 的檔案 + return; + } + + // eslint-disable-next-line no-underscore-dangle + const exists = (app._models || []).some(mm => mm.namespace === m.namespace); + if (!exists) { + app.model(m); + } + }); } diff --git a/src/dashboard/src/components/GlobalHeader/RightContent.js b/src/dashboard/src/components/GlobalHeader/RightContent.js index cde1acd56..c5e789a0e 100644 --- a/src/dashboard/src/components/GlobalHeader/RightContent.js +++ b/src/dashboard/src/components/GlobalHeader/RightContent.js @@ -1,6 +1,6 @@ import React, { PureComponent } from 'react'; import { injectIntl } from 'umi'; -import { Spin, Menu, Avatar } from 'antd'; +import { Spin, Avatar } from 'antd'; import { LogoutOutlined } from '@ant-design/icons'; import HeaderDropdown from '../HeaderDropdown'; import SelectLang from '../SelectLang'; @@ -9,28 +9,30 @@ import styles from './index.less'; class GlobalHeaderRight extends PureComponent { render() { const { currentUser, onMenuClick, intl } = this.props; - const menu = ( - - - - {intl.formatMessage({ - id: 'menu.account.logout', - defaultMessage: 'logout', - })} - - - ); + const menuItems = [ + { + key: 'logout', + icon: , + label: ( + + {intl.formatMessage({ + id: 'menu.account.logout', + defaultMessage: 'logout', + })} + + ), + onClick: onMenuClick, + }, + ]; const className = styles.right; - // if (theme === 'dark') { - // className = `${styles.right} ${styles.dark}`; - // } + return (
- {currentUser.username ? ( - + {currentUser.id ? ( + - {currentUser.username} + {currentUser.email} ) : ( diff --git a/src/dashboard/src/components/GlobalHeader/index.less b/src/dashboard/src/components/GlobalHeader/index.less index bac55dc22..3679c4fcd 100644 --- a/src/dashboard/src/components/GlobalHeader/index.less +++ b/src/dashboard/src/components/GlobalHeader/index.less @@ -24,13 +24,16 @@ } } -.menu { - :global(.anticon) { - margin-right: 8px; - } - :global(.ant-dropdown-menu-item) { - min-width: 160px; - } +:global(.anticon) { + margin-right: 8px; +} + +:global(.ant-dropdown-menu) { + width: 120px; +} + +:global(.ant-dropdown-menu-item) { + width: 120px; } .trigger { diff --git a/src/dashboard/src/components/HeaderDropdown/index.js b/src/dashboard/src/components/HeaderDropdown/index.js index a19c471ac..6db801107 100644 --- a/src/dashboard/src/components/HeaderDropdown/index.js +++ b/src/dashboard/src/components/HeaderDropdown/index.js @@ -5,9 +5,7 @@ import styles from './index.less'; export default class HeaderDropdown extends PureComponent { render() { - const { overlayClassName, ...props } = this.props; - return ( - - ); + const { className, ...props } = this.props; + return ; } } diff --git a/src/dashboard/src/components/SelectLang/index.js b/src/dashboard/src/components/SelectLang/index.js index 11463fd40..1dc19a2e7 100644 --- a/src/dashboard/src/components/SelectLang/index.js +++ b/src/dashboard/src/components/SelectLang/index.js @@ -1,6 +1,5 @@ import React, { PureComponent } from 'react'; -import { injectIntl, setLocale, getLocale } from 'umi'; -import { Menu } from 'antd'; +import { injectIntl, setLocale } from 'umi'; import { GlobalOutlined } from '@ant-design/icons'; import classNames from 'classnames'; import HeaderDropdown from '../HeaderDropdown'; @@ -14,7 +13,6 @@ class SelectLang extends PureComponent { render() { const { className, intl } = this.props; - const selectedLang = getLocale(); const locales = ['zh-CN', 'en-US']; const languageLabels = { 'zh-CN': '简体中文', @@ -24,22 +22,20 @@ class SelectLang extends PureComponent { 'zh-CN': '🇨🇳', 'en-US': '🇬🇧', }; - const langMenu = ( - - {locales.map(locale => ( - - - {languageIcons[locale]} - {' '} - {languageLabels[locale]} - - ))} - - ); + const langMenuItems = locales.map(locale => ({ + key: locale, + label: ( + + {languageIcons[locale]} {languageLabels[locale]} + + ), + onClick: this.changeLang, + })); return ( - + - + + {intl.formatMessage({ id: 'navBar.lang' })} ); diff --git a/src/dashboard/src/components/SelectLang/index.less b/src/dashboard/src/components/SelectLang/index.less index 9f41ade9a..9894b3098 100644 --- a/src/dashboard/src/components/SelectLang/index.less +++ b/src/dashboard/src/components/SelectLang/index.less @@ -1,14 +1,5 @@ @import '~antd/lib/style/themes/default.less'; -.menu { - :global(.anticon) { - margin-right: 8px; - } - :global(.ant-dropdown-menu-item) { - min-width: 160px; - } -} - .dropDown { line-height: @layout-header-height; vertical-align: top; diff --git a/src/dashboard/src/components/SiderMenu/BaseMenu.js b/src/dashboard/src/components/SiderMenu/BaseMenu.js index 013009c49..c856e89fa 100644 --- a/src/dashboard/src/components/SiderMenu/BaseMenu.js +++ b/src/dashboard/src/components/SiderMenu/BaseMenu.js @@ -11,15 +11,14 @@ import { DeploymentUnitOutlined, FunctionOutlined, UserOutlined, + BookOutlined, + GithubOutlined, + ApiOutlined, } from '@ant-design/icons'; import { Link } from 'umi'; import { urlToList } from '../_utils/pathTools'; import { getMenuMatches } from './SiderMenuUtils'; -// import { isUrl } from '@/utils/utils'; -// import styles from './index.less'; -// import IconFont from '@/components/IconFont'; -const { SubMenu } = Menu; const menus = { eye: , dashboard: , @@ -30,6 +29,9 @@ const menus = { chaincode: , user: , agent: , + docs: , + github: , + api: , }; // Allow menu.js config icon as string or ReactNode @@ -55,14 +57,47 @@ export default class BaseMenu extends PureComponent { * 获得菜单子节点 * @memberof SiderMenu */ - getNavMenuItems = menusData => { + getNavMenuItems = (menusData, isFromTop) => { if (!menusData) { return []; } + const { isMobile, onCollapse, location } = this.props; return menusData - .filter(item => item.name && !item.hideInMenu) - .map(item => this.getSubMenuOrItem(item)) - .filter(item => item); + .filter(item => item.name && !item.hideInMenu && isFromTop !== (item.isBottom ?? false)) + .map(item => { + const itemNode = { + key: item.path, + icon: getIcon(item.icon), + // label 可以是 ReactNode(Link / / 字串) + label: item.isExternal ? ( + + {item.name} + + ) : ( + { + onCollapse(true); + } + : undefined + } + > + {item.name} + + ), + }; + + // 如果有子節點且沒有 hideChildrenInMenu,遞迴產生 children + if (item.children && !item.hideChildrenInMenu && item.children.some(c => c.name)) { + itemNode.children = this.getNavMenuItems(item.children, isFromTop); + } + + return itemNode; + }); }; // Get the currently selected menu @@ -71,73 +106,6 @@ export default class BaseMenu extends PureComponent { return urlToList(pathname).map(itemPath => getMenuMatches(flatMenuKeys, itemPath).pop()); }; - /** - * get SubMenu or Item - */ - getSubMenuOrItem = item => { - // doc: add hideChildrenInMenu - if (item.children && !item.hideChildrenInMenu && item.children.some(child => child.name)) { - const { name } = item; - return ( - - {getIcon(item.icon)} - {name} - - ) : ( - name - ) - } - key={item.path} - > - {this.getNavMenuItems(item.children)} - - ); - } - return {this.getMenuItemPath(item)}; - }; - - /** - * 判断是否是http链接.返回 Link 或 a - * Judge whether it is http link.return a or Link - * @memberof SiderMenu - */ - getMenuItemPath = item => { - const { name } = item; - const itemPath = this.conversionPath(item.path); - const icon = getIcon(item.icon); - const { target } = item; - // Is it a http link - if (/^https?:\/\//.test(itemPath)) { - return ( - - {icon} - {name} - - ); - } - const { location, isMobile, onCollapse } = this.props; - return ( - { - onCollapse(true); - } - : undefined - } - > - {icon} - {name} - - ); - }; - conversionPath = path => { if (path && path.indexOf('http') === 0) { return path; @@ -184,9 +152,22 @@ export default class BaseMenu extends PureComponent { }); return ( - <> +
+ this.getPopupContainer(fixedHeader, layout)} + items={this.getNavMenuItems(menuData, true)} + /> +
this.getPopupContainer(fixedHeader, layout)} - > - {this.getNavMenuItems(menuData)} - + items={this.getNavMenuItems(menuData, false)} + />
- +
); } } diff --git a/src/dashboard/src/components/SiderMenu/SiderMenuUtils.js b/src/dashboard/src/components/SiderMenu/SiderMenuUtils.js index 9353d135f..9146e26db 100644 --- a/src/dashboard/src/components/SiderMenu/SiderMenuUtils.js +++ b/src/dashboard/src/components/SiderMenu/SiderMenuUtils.js @@ -9,6 +9,9 @@ import { urlToList } from '../_utils/pathTools'; export const getFlatMenuKeys = menuData => { let keys = []; menuData.forEach(item => { + if (item.isExternal) { + return; + } keys.push(item.path); if (item.children) { keys = keys.concat(getFlatMenuKeys(item.children)); diff --git a/src/dashboard/src/hooks/index.js b/src/dashboard/src/hooks/index.js new file mode 100644 index 000000000..15a0f939c --- /dev/null +++ b/src/dashboard/src/hooks/index.js @@ -0,0 +1,6 @@ +/* + SPDX-License-Identifier: Apache-2.0 +*/ +export { useTableManagement } from './useTableManagement'; +export { useModalForm } from './useModalForm'; +export { useDeleteConfirm } from './useDeleteConfirm'; diff --git a/src/dashboard/src/hooks/useDeleteConfirm.js b/src/dashboard/src/hooks/useDeleteConfirm.js new file mode 100644 index 000000000..69bb33049 --- /dev/null +++ b/src/dashboard/src/hooks/useDeleteConfirm.js @@ -0,0 +1,107 @@ +/* + SPDX-License-Identifier: Apache-2.0 +*/ +import { useCallback } from 'react'; +import { Modal, message } from 'antd'; + +/** + * Hook for handling delete confirmation dialog + * + * @param {Object} options - Configuration options + * @param {Function} options.dispatch - Dva dispatch function + * @param {Object} options.intl - react-intl instance for i18n + * @returns {Object} Delete confirmation handlers + * + * @example + * const { showDeleteConfirm } = useDeleteConfirm({ dispatch, intl }); + * + * showDeleteConfirm({ + * record: agent, + * nameField: 'name', + * deleteAction: 'agent/deleteAgent', + * titleId: 'app.agent.form.delete.title', + * contentId: 'app.agent.form.delete.content', + * successId: 'app.agent.delete.success', + * failId: 'app.agent.delete.fail', + * onSuccess: () => refreshList(), + * }); + */ +export function useDeleteConfirm({ dispatch, intl }) { + /** + * Show delete confirmation dialog + */ + const showDeleteConfirm = useCallback( + ({ + record, + nameField = 'name', + deleteAction, + titleId, + titleDefault = 'Delete', + contentId, + contentDefault = 'Confirm to delete {name}?', + successId, + successDefault = 'Delete success', + failId, + failDefault = 'Delete failed', + getPayload = r => r.id, + onSuccess, + onFail, + }) => { + const name = record[nameField]; + + Modal.confirm({ + title: intl.formatMessage({ + id: titleId, + defaultMessage: titleDefault, + }), + content: intl.formatMessage( + { + id: contentId, + defaultMessage: contentDefault, + }, + { name } + ), + okText: intl.formatMessage({ id: 'form.button.confirm', defaultMessage: 'Confirm' }), + cancelText: intl.formatMessage({ id: 'form.button.cancel', defaultMessage: 'Cancel' }), + onOk() { + dispatch({ + type: deleteAction, + payload: getPayload(record), + callback: response => { + if (response.status === 'successful' || !response.code) { + message.success( + intl.formatMessage( + { + id: successId, + defaultMessage: successDefault, + }, + { name } + ) + ); + if (onSuccess) onSuccess(response); + } else { + message.error( + intl.formatMessage( + { + id: failId, + defaultMessage: failDefault, + }, + { name } + ) + ); + if (onFail) onFail(response); + } + }, + }); + }, + }); + }, + [dispatch, intl] + ); + + return { + showDeleteConfirm, + }; +} + +export default useDeleteConfirm; diff --git a/src/dashboard/src/hooks/useModalForm.js b/src/dashboard/src/hooks/useModalForm.js new file mode 100644 index 000000000..83bf65a6e --- /dev/null +++ b/src/dashboard/src/hooks/useModalForm.js @@ -0,0 +1,107 @@ +/* + SPDX-License-Identifier: Apache-2.0 +*/ +import { useState, useCallback } from 'react'; + +/** + * Hook for managing modal form state (visibility, method, current record) + * + * @param {Object} options - Configuration options + * @param {Object} [options.defaultRecord={}] - Default record when creating new item + * @returns {Object} Modal form state and handlers + * + * @example + * const { + * modalVisible, + * modalMethod, + * currentRecord, + * openCreateModal, + * openUpdateModal, + * closeModal, + * } = useModalForm(); + * + * // Open for creating + * + * + * // Open for updating + * openUpdateModal(record)}>Edit + * + * // Modal component + * + */ +export function useModalForm(options = {}) { + const { defaultRecord = {} } = options; + + const [modalVisible, setModalVisible] = useState(false); + const [modalMethod, setModalMethod] = useState('create'); + const [currentRecord, setCurrentRecord] = useState(defaultRecord); + + /** + * Open modal for creating new item + */ + const openCreateModal = useCallback(() => { + setCurrentRecord(defaultRecord); + setModalMethod('create'); + setModalVisible(true); + }, [defaultRecord]); + + /** + * Open modal for updating existing item + */ + const openUpdateModal = useCallback(record => { + setCurrentRecord(record); + setModalMethod('update'); + setModalVisible(true); + }, []); + + /** + * Close modal and reset state + */ + const closeModal = useCallback(() => { + setModalVisible(false); + // Reset after modal close animation + setTimeout(() => { + setModalMethod('create'); + setCurrentRecord(defaultRecord); + }, 300); + }, [defaultRecord]); + + /** + * Toggle modal visibility (legacy support for handleModalVisible pattern) + */ + const handleModalVisible = useCallback( + (visible, method, record) => { + if (visible) { + setModalMethod(method || 'create'); + setCurrentRecord(record || defaultRecord); + } + setModalVisible(!!visible); + }, + [defaultRecord] + ); + + return { + // State + modalVisible, + modalMethod, + currentRecord, + + // Setters + setModalVisible, + setModalMethod, + setCurrentRecord, + + // Handlers + openCreateModal, + openUpdateModal, + closeModal, + handleModalVisible, // Legacy support + }; +} + +export default useModalForm; diff --git a/src/dashboard/src/hooks/useTableManagement.js b/src/dashboard/src/hooks/useTableManagement.js new file mode 100644 index 000000000..441f09da1 --- /dev/null +++ b/src/dashboard/src/hooks/useTableManagement.js @@ -0,0 +1,116 @@ +/* + SPDX-License-Identifier: Apache-2.0 +*/ +import { useState, useCallback } from 'react'; + +/** + * Hook for managing table state (selected rows, pagination, filters) + * + * @param {Object} options - Configuration options + * @param {Function} options.dispatch - Dva dispatch function + * @param {string} options.listAction - The action type to dispatch for listing (e.g., 'agent/listAgent') + * @returns {Object} Table management state and handlers + * + * @example + * const { + * selectedRows, + * formValues, + * handleSelectRows, + * handleTableChange, + * handleFormReset, + * setFormValues, + * } = useTableManagement({ + * dispatch, + * listAction: 'agent/listAgent', + * }); + */ +export function useTableManagement({ dispatch, listAction }) { + const [selectedRows, setSelectedRows] = useState([]); + const [formValues, setFormValues] = useState({}); + + /** + * Handle row selection in table + */ + const handleSelectRows = useCallback(rows => { + setSelectedRows(rows); + }, []); + + /** + * Clear selected rows + */ + const clearSelectedRows = useCallback(() => { + setSelectedRows([]); + }, []); + + /** + * Handle table pagination/sorting/filtering changes + */ + const handleTableChange = useCallback( + (pagination, filters = {}, sorter = {}) => { + const { current, pageSize } = pagination; + const params = { + page: current, + per_page: pageSize, + ...formValues, + ...filters, + }; + + // Add sorting if present + if (sorter.field) { + params.sortField = sorter.field; + params.sortOrder = sorter.order; + } + + dispatch({ + type: listAction, + payload: params, + }); + }, + [dispatch, listAction, formValues] + ); + + /** + * Reset form filters and refresh list + */ + const handleFormReset = useCallback(() => { + setFormValues({}); + dispatch({ + type: listAction, + }); + }, [dispatch, listAction]); + + /** + * Refresh list with current filters + */ + const refreshList = useCallback( + (extraParams = {}) => { + dispatch({ + type: listAction, + payload: { + ...formValues, + ...extraParams, + }, + }); + }, + [dispatch, listAction, formValues] + ); + + return { + // State + selectedRows, + formValues, + + // Setters + setSelectedRows, + setFormValues, + + // Handlers + handleSelectRows, + handleTableChange, + handleFormReset, + clearSelectedRows, + refreshList, + }; +} + +export default useTableManagement; diff --git a/src/dashboard/src/layouts/SecurityLayout.jsx b/src/dashboard/src/layouts/SecurityLayout.jsx index 7676f4ff5..1d9998cc4 100644 --- a/src/dashboard/src/layouts/SecurityLayout.jsx +++ b/src/dashboard/src/layouts/SecurityLayout.jsx @@ -23,14 +23,11 @@ class SecurityLayout extends React.Component { render() { const { isReady } = this.state; - const { children, loading, currentUser } = this.props; // You can replace it to your authentication rule (such as check token exists) - // 你可以把它替换成你自己的登录认证规则(比如判断 token 是否存在) - - const isLogin = currentUser && currentUser.username; + const { children, loading, currentUser } = this.props; + const isLogin = currentUser && currentUser.id; const queryString = stringify({ redirect: window.location.href, }); - if ((!isLogin && loading) || !isReady) { return ; } diff --git a/src/dashboard/src/layouts/UserLayout.less b/src/dashboard/src/layouts/UserLayout.less index ba3d3235f..ac4fbe51d 100644 --- a/src/dashboard/src/layouts/UserLayout.less +++ b/src/dashboard/src/layouts/UserLayout.less @@ -69,3 +69,7 @@ color: @text-color-secondary; font-size: @font-size-base; } + +:global(.anticon) { + margin-right: 8px; +} diff --git a/src/dashboard/src/locales/en-US.js b/src/dashboard/src/locales/en-US.js index d35032a56..6339d3520 100755 --- a/src/dashboard/src/locales/en-US.js +++ b/src/dashboard/src/locales/en-US.js @@ -16,6 +16,7 @@ import fabricCa from './en-US/fabric/ca'; import Network from './en-US/Network'; import Channel from './en-US/Channel'; import ChainCode from './en-US/Chaincode'; +import Overview from './en-US/Overview'; export default { 'navBar.lang': 'Languages', @@ -64,4 +65,5 @@ export default { ...Network, ...Channel, ...ChainCode, + ...Overview, }; diff --git a/src/dashboard/src/locales/en-US/Chaincode.js b/src/dashboard/src/locales/en-US/Chaincode.js index a08c83b77..40d33cc6e 100755 --- a/src/dashboard/src/locales/en-US/Chaincode.js +++ b/src/dashboard/src/locales/en-US/Chaincode.js @@ -29,8 +29,8 @@ export default { 'app.chainCode.form.install.fail': 'Install chaincode failed', 'app.chainCode.form.install.success': 'Install chaincode succeed', 'app.chainCode.form.install.header.title': 'Install Chaincode', - 'app.chainCode.form.install.nodes': 'Please select nodes', - 'app.chainCode.form.install.checkNodes': 'Please select nodes', + 'app.chainCode.form.install.peers': 'Peers', + 'app.chainCode.form.install.checkPeers': 'Please select peers', 'app.chainCode.form.approve.fail': 'Approve chaincode failed', 'app.chainCode.form.approve.success': 'Approve chaincode succeed', 'app.chainCode.form.approve.header.title': 'Approve Chaincode', diff --git a/src/dashboard/src/locales/en-US/Overview.js b/src/dashboard/src/locales/en-US/Overview.js new file mode 100644 index 000000000..d4fdbb689 --- /dev/null +++ b/src/dashboard/src/locales/en-US/Overview.js @@ -0,0 +1,4 @@ +export default { + "overview.title": "User Overview", + "overview.welcome.message": "Welcome!" +} diff --git a/src/dashboard/src/locales/en-US/menu.js b/src/dashboard/src/locales/en-US/menu.js index 55ad32dd4..025dd6e97 100755 --- a/src/dashboard/src/locales/en-US/menu.js +++ b/src/dashboard/src/locales/en-US/menu.js @@ -27,4 +27,5 @@ export default { 'menu.account.settings': 'Account Settings', 'menu.account.trigger': 'Trigger Error', 'menu.account.logout': 'Logout', + 'menu.docs': 'Online Documentation', }; diff --git a/src/dashboard/src/locales/zh-CN.js b/src/dashboard/src/locales/zh-CN.js index 4df9f6659..be2e3f8a5 100755 --- a/src/dashboard/src/locales/zh-CN.js +++ b/src/dashboard/src/locales/zh-CN.js @@ -16,6 +16,7 @@ import fabricCa from './zh-CN/fabric/ca'; import Network from './zh-CN/Network'; import Channel from './zh-CN/Channel'; import ChainCode from './zh-CN/Chaincode'; +import Overview from './zh-CN/Overview'; export default { 'navBar.lang': '语言', @@ -64,4 +65,5 @@ export default { ...Network, ...Channel, ...ChainCode, + ...Overview, }; diff --git a/src/dashboard/src/locales/zh-CN/Chaincode.js b/src/dashboard/src/locales/zh-CN/Chaincode.js index 53e5bb905..1fa983bba 100755 --- a/src/dashboard/src/locales/zh-CN/Chaincode.js +++ b/src/dashboard/src/locales/zh-CN/Chaincode.js @@ -29,8 +29,8 @@ export default { 'app.chainCode.form.install.fail': '安装链码失败', 'app.chainCode.form.install.success': '安装链码成功', 'app.chainCode.form.install.header.title': '安装链码', - 'app.chainCode.form.install.nodes': '请选择节点', - 'app.chainCode.form.install.checkNodes': '请选择节点', + 'app.chainCode.form.install.peers': '节点', + 'app.chainCode.form.install.checkPeers': '请选择节点', 'app.chainCode.form.approve.fail': '批准链码失败', 'app.chainCode.form.approve.success': '批准链码成功', 'app.chainCode.form.approve.header.title': '批准链码', diff --git a/src/dashboard/src/locales/zh-CN/Overview.js b/src/dashboard/src/locales/zh-CN/Overview.js new file mode 100644 index 000000000..603fa50cf --- /dev/null +++ b/src/dashboard/src/locales/zh-CN/Overview.js @@ -0,0 +1,4 @@ +export default { + "overview.title": "用户总览", + "overview.welcome.message": "欢迎!" +} diff --git a/src/dashboard/src/locales/zh-CN/menu.js b/src/dashboard/src/locales/zh-CN/menu.js index 5b36a11de..7a4df1005 100755 --- a/src/dashboard/src/locales/zh-CN/menu.js +++ b/src/dashboard/src/locales/zh-CN/menu.js @@ -27,4 +27,5 @@ export default { 'menu.account.settings': '个人设置', 'menu.account.trigger': '触发报错', 'menu.account.logout': '退出登录', + 'menu.docs': '线上文档', }; diff --git a/src/dashboard/src/models/agent.js b/src/dashboard/src/models/agent.js index 6f36b4cd4..04dab0896 100755 --- a/src/dashboard/src/models/agent.js +++ b/src/dashboard/src/models/agent.js @@ -1,3 +1,6 @@ +/* + SPDX-License-Identifier: Apache-2.0 +*/ import { listAgent, getAgent, @@ -7,74 +10,38 @@ import { applyAgent, releaseAgent, } from '@/services/agent'; +import { createModel, createListEffect, createSimpleEffect } from '@/utils/modelFactory'; -export default { +export default createModel({ namespace: 'agent', state: { agent: {}, agents: [], - pagination: { - total: 0, - current: 1, - pageSize: 10, - }, currentAgent: {}, }, effects: { - *listAgent({ payload, callback }, { call, put, select }) { - const response = yield call(listAgent, payload); - const pagination = yield select(state => state.agent.pagination); - const pageSize = payload ? payload.per_page || pagination.pageSize : pagination.pageSize; - const current = payload ? payload.page || pagination.current : pagination.current; + listAgent: createListEffect({ + service: listAgent, + namespace: 'agent', + dataKey: 'agents', + }), - pagination.total = response.total; - pagination.pageSize = pageSize; - pagination.current = current; - yield put({ - type: 'save', - payload: { - pagination, - agents: response.data.data, - }, - }); - if (callback) { - callback(); - } - }, - *getAgent({ payload, callback }, { call, put }) { - const response = yield call(getAgent, payload); - yield put({ - type: 'save', - payload: { - agent: response, - }, - }); - if (callback) { - callback({ - ...response, - }); - } - }, - *createAgent({ payload, callback }, { call }) { - const response = yield call(createAgent, payload.formData); - if (callback) { - callback({ - payload, - ...response, - }); - } - }, - *applyAgent({ payload, callback }, { call }) { - const response = yield call(applyAgent, payload.data); - if (callback) { - callback({ - payload, - ...response, - }); - } - }, + getAgent: createSimpleEffect(getAgent, { + saveKey: 'agent', + includePayloadInCallback: false, + }), + + createAgent: createSimpleEffect(createAgent, { + getServiceParams: payload => payload.formData, + }), + + applyAgent: createSimpleEffect(applyAgent, { + getServiceParams: payload => payload.data, + }), + + // Custom effect to include action in callback (original behavior) *updateAgent({ payload, callback }, { call }) { const response = yield call(updateAgent, payload.data); if (callback) { @@ -84,43 +51,9 @@ export default { }); } }, - *deleteAgent({ payload, callback }, { call }) { - const response = yield call(deleteAgent, payload); - if (callback) { - callback({ - payload, - ...response, - }); - } - }, - *releaseAgent({ payload, callback }, { call }) { - const response = yield call(releaseAgent, payload); - if (callback) { - callback({ - payload, - ...response, - }); - } - }, - }, - reducers: { - save(state, { payload }) { - return { - ...state, - ...payload, - }; - }, - clear() { - return { - agent: {}, - agents: [], - pagination: { - total: 0, - current: 1, - pageSize: 10, - }, - currentAgent: {}, - }; - }, + + deleteAgent: createSimpleEffect(deleteAgent), + + releaseAgent: createSimpleEffect(releaseAgent), }, -}; +}); diff --git a/src/dashboard/src/models/chaincode.js b/src/dashboard/src/models/chaincode.js index 0f8f00818..d63461dba 100644 --- a/src/dashboard/src/models/chaincode.js +++ b/src/dashboard/src/models/chaincode.js @@ -1,82 +1,50 @@ +/* + SPDX-License-Identifier: Apache-2.0 +*/ import { listChainCode, + createChainCode, uploadChainCode, installChainCode, approveChainCode, commitChainCode, } from '@/services/chaincode'; +import { createModel, createListEffect, createSimpleEffect } from '@/utils/modelFactory'; -export default { +export default createModel({ namespace: 'chainCode', state: { chainCodes: [], - pagination: { - total: 0, - current: 1, - pageSize: 10, - }, }, effects: { - *listChainCode({ payload }, { call, put, select }) { - const response = yield call(listChainCode, payload); - const pagination = yield select(state => state.chainCode.pagination); - const pageSize = payload ? payload.per_page || pagination.pageSize : pagination.pageSize; - const current = payload ? payload.page || pagination.current : pagination.current; + listChainCode: createListEffect({ + service: listChainCode, + namespace: 'chainCode', + dataKey: 'chainCodes', + // ChainCode API returns total in response.data.total instead of response.total + getTotalFromResponse: response => response.data.total, + }), - pagination.total = response.data.total; - pagination.pageSize = pageSize; - pagination.current = current; - yield put({ - type: 'save', - payload: { - pagination, - chainCodes: response.data.data, - }, - }); - }, - *uploadChainCode({ payload, callback }, { call }) { - const response = yield call(uploadChainCode, payload); - if (callback) { - callback(response); - } - }, - *installChainCode({ payload, callback }, { call }) { - const response = yield call(installChainCode, payload); - if (callback) { - callback(response); - } - }, - *approveChainCode({ payload, callback }, { call }) { - const response = yield call(approveChainCode, payload); - if (callback) { - callback(response); - } - }, - *commitChainCode({ payload, callback }, { call }) { - const response = yield call(commitChainCode, payload); - if (callback) { - callback(response); - } - }, - }, - reducers: { - save(state, { payload }) { - return { - ...state, - ...payload, - }; - }, - clear() { - return { - chainCodes: [], - pagination: { - total: 0, - current: 1, - pageSize: 10, - }, - }; - }, + createChainCode: createSimpleEffect(createChainCode, { + includePayloadInCallback: false, + }), + + uploadChainCode: createSimpleEffect(uploadChainCode, { + includePayloadInCallback: false, + }), + + installChainCode: createSimpleEffect(installChainCode, { + includePayloadInCallback: false, + }), + + approveChainCode: createSimpleEffect(approveChainCode, { + includePayloadInCallback: false, + }), + + commitChainCode: createSimpleEffect(commitChainCode, { + includePayloadInCallback: false, + }), }, -}; +}); diff --git a/src/dashboard/src/models/channel.js b/src/dashboard/src/models/channel.js index 8de345097..43902ff35 100644 --- a/src/dashboard/src/models/channel.js +++ b/src/dashboard/src/models/channel.js @@ -1,48 +1,33 @@ +/* + SPDX-License-Identifier: Apache-2.0 +*/ import { listChannel, createChannel, getNodeConfig, updateChannelConfig } from '@/services/channel'; +import { createModel, createListEffect, createSimpleEffect } from '@/utils/modelFactory'; -export default { +export default createModel({ namespace: 'channel', state: { channels: [], - pagination: { - total: 0, - current: 1, - pageSize: 10, - }, currentChannel: {}, }, effects: { - *listChannel({ payload }, { call, put, select }) { - const response = yield call(listChannel, payload); - const pagination = yield select(state => state.channel.pagination); - const pageSize = payload ? payload.per_page || pagination.pageSize : pagination.pageSize; - const current = payload ? payload.page || pagination.current : pagination.current; + listChannel: createListEffect({ + service: listChannel, + namespace: 'channel', + dataKey: 'channels', + }), - pagination.total = response.data.total; - pagination.pageSize = pageSize; - pagination.current = current; - yield put({ - type: 'save', - payload: { - pagination, - channels: response.data.data, - }, - }); - }, - *createChannel({ payload, callback }, { call }) { - const response = yield call(createChannel, payload); - if (callback) { - callback(response); - } - }, - *getNodeConfig({ payload, callback }, { call }) { - const response = yield call(getNodeConfig, payload); - if (callback) { - callback(response); - } - }, + createChannel: createSimpleEffect(createChannel, { + includePayloadInCallback: false, + }), + + getNodeConfig: createSimpleEffect(getNodeConfig, { + includePayloadInCallback: false, + }), + + // Custom effect for updateChannel with special parameter structure *updateChannel({ id, payload, callback }, { call }) { const response = yield call(updateChannelConfig, id, payload); if (callback) { @@ -50,23 +35,4 @@ export default { } }, }, - reducers: { - save(state, { payload }) { - return { - ...state, - ...payload, - }; - }, - clear() { - return { - channels: [], - pagination: { - total: 0, - current: 1, - pageSize: 10, - }, - currentChannel: {}, - }; - }, - }, -}; +}); diff --git a/src/dashboard/src/models/login.js b/src/dashboard/src/models/login.js index f6fd66d6b..c15e7d4bf 100644 --- a/src/dashboard/src/models/login.js +++ b/src/dashboard/src/models/login.js @@ -1,6 +1,6 @@ import { history } from 'umi'; import { stringify } from 'qs'; -import { fakeAccountLogin, register } from '@/services/api'; +import { login, register } from '@/services/api'; import { setAuthority } from '@/utils/authority'; import { getPageQuery } from '@/utils/utils'; import { reloadAuthorized } from '@/utils/Authorized'; @@ -18,7 +18,7 @@ export default { effects: { *login({ payload }, { call, put }) { - const response = yield call(fakeAccountLogin, payload); + const response = yield call(login, payload); // Login successfully if (response.data.token) { const { user, token } = response.data; @@ -52,11 +52,12 @@ export default { *register({ payload }, { call, put }) { const response = yield call(register, payload); + const isSuccessful = response.status.toLowerCase() === 'successful'; yield put({ type: 'changeRegisterStatus', payload: { - success: response.status === 'successful', - msg: response.status === 'successful' ? 'Register successfully!' : response.msg, + success: isSuccessful, + msg: isSuccessful ? 'Register successfully!' : response.msg, }, }); }, diff --git a/src/dashboard/src/models/menu.js b/src/dashboard/src/models/menu.js index 423b35e79..f3fe370ad 100644 --- a/src/dashboard/src/models/menu.js +++ b/src/dashboard/src/models/menu.js @@ -85,6 +85,9 @@ const getBreadcrumbNameMap = menuData => { const flattenMenuData = data => { data.forEach(menuItem => { + if (menuItem.isExternal) { + return; + } if (menuItem.children) { flattenMenuData(menuItem.children); } diff --git a/src/dashboard/src/models/network.js b/src/dashboard/src/models/network.js index fbb6b5e78..d5dfd3265 100644 --- a/src/dashboard/src/models/network.js +++ b/src/dashboard/src/models/network.js @@ -1,74 +1,28 @@ +/* + SPDX-License-Identifier: Apache-2.0 +*/ import { listNetwork, createNetwork, deleteNetwork } from '@/services/network'; +import { createModel, createListEffect, createSimpleEffect } from '@/utils/modelFactory'; -export default { +export default createModel({ namespace: 'network', state: { networks: [], - pagination: { - total: 0, - current: 1, - pageSize: 10, - }, currentNetwork: {}, }, effects: { - *listNetwork({ payload, callback }, { call, put, select }) { - const response = yield call(listNetwork, payload); - const pagination = yield select(state => state.network.pagination); - const pageSize = payload ? payload.per_page || pagination.pageSize : pagination.pageSize; - const current = payload ? payload.page || pagination.current : pagination.current; + listNetwork: createListEffect({ + service: listNetwork, + namespace: 'network', + dataKey: 'networks', + }), - pagination.total = response.total; - pagination.pageSize = pageSize; - pagination.current = current; - yield put({ - type: 'save', - payload: { - pagination, - networks: response.data.data, - }, - }); - if (callback) { - callback(); - } - }, - *createNetwork({ payload, callback }, { call }) { - const response = yield call(createNetwork, payload); - if (callback) { - callback({ - ...response, - }); - } - }, - *deleteNetwork({ payload, callback }, { call }) { - const response = yield call(deleteNetwork, payload); - if (callback) { - callback({ - payload, - ...response, - }); - } - }, - }, - reducers: { - save(state, { payload }) { - return { - ...state, - ...payload, - }; - }, - clear() { - return { - networks: [], - pagination: { - total: 0, - current: 1, - pageSize: 10, - }, - currentNetwork: {}, - }; - }, + createNetwork: createSimpleEffect(createNetwork, { + includePayloadInCallback: false, + }), + + deleteNetwork: createSimpleEffect(deleteNetwork), }, -}; +}); diff --git a/src/dashboard/src/models/node.js b/src/dashboard/src/models/node.js index 81f7d6d1f..4dafa7436 100644 --- a/src/dashboard/src/models/node.js +++ b/src/dashboard/src/models/node.js @@ -1,3 +1,6 @@ +/* + SPDX-License-Identifier: Apache-2.0 +*/ import { listNode, getNode, @@ -9,131 +12,46 @@ import { uploadNodeConfig, nodeJoinChannel, } from '@/services/node'; +import { createModel, createListEffect, createSimpleEffect } from '@/utils/modelFactory'; -export default { +export default createModel({ namespace: 'node', state: { node: {}, nodes: [], - pagination: { - total: 0, - current: 1, - pageSize: 10, - }, }, effects: { - *listNode({ payload, callback }, { call, put, select }) { - const response = yield call(listNode, payload); - const pagination = yield select(state => state.node.pagination); - const pageSize = payload ? payload.per_page || pagination.pageSize : pagination.pageSize; - const current = payload ? payload.page || pagination.current : pagination.current; + listNode: createListEffect({ + service: listNode, + namespace: 'node', + dataKey: 'nodes', + }), - pagination.total = response.total; - pagination.pageSize = pageSize; - pagination.current = current; - yield put({ - type: 'save', - payload: { - pagination, - nodes: response.data.data, - }, - }); - if (callback) { - callback(); - } - }, - *createNode({ payload, callback }, { call }) { - const response = yield call(createNode, payload); - if (callback) { - callback({ - ...response, - }); - } - }, - *getNode({ payload, callback }, { call, put }) { - const response = yield call(getNode, payload); - yield put({ - type: 'save', - payload: { - node: response, - }, - }); - if (callback) { - callback({ - ...response, - }); - } - }, - *registerUserToNode({ payload, callback }, { call }) { - const response = yield call(registerUserToNode, payload); - if (callback) { - callback({ - payload, - ...response, - }); - } - }, - *deleteNode({ payload, callback }, { call }) { - const response = yield call(deleteNode, payload); - if (callback) { - callback({ - ...response, - }); - } - }, - *operateNode({ payload, callback }, { call }) { - const response = yield call(operateNode, payload); - if (callback) { - callback({ - payload, - ...response, - }); - } - }, - *downloadNodeConfig({ payload, callback }, { call }) { - const response = yield call(downloadNodeConfig, payload); - if (callback) { - callback(response); - } - }, - *uploadNodeConfig({ payload, callback }, { call }) { - const response = yield call(uploadNodeConfig, payload); - if (callback) { - callback({ - payload, - ...response, - }); - } - }, - *nodeJoinChannel({ payload, callback }, { call }) { - const response = yield call(nodeJoinChannel, payload); - if (callback) { - callback({ - payload, - ...response, - }); - } - }, - }, - reducers: { - save(state, { payload }) { - return { - ...state, - ...payload, - }; - }, - clear() { - return { - node: {}, - nodes: [], - pagination: { - total: 0, - current: 1, - pageSize: 10, - }, - }; - }, + createNode: createSimpleEffect(createNode, { + includePayloadInCallback: false, + }), + + getNode: createSimpleEffect(getNode, { + saveKey: 'node', + includePayloadInCallback: false, + }), + + registerUserToNode: createSimpleEffect(registerUserToNode), + + deleteNode: createSimpleEffect(deleteNode, { + includePayloadInCallback: false, + }), + + operateNode: createSimpleEffect(operateNode), + + downloadNodeConfig: createSimpleEffect(downloadNodeConfig, { + includePayloadInCallback: false, + }), + + uploadNodeConfig: createSimpleEffect(uploadNodeConfig), + + nodeJoinChannel: createSimpleEffect(nodeJoinChannel), }, -}; +}); diff --git a/src/dashboard/src/models/organization.js b/src/dashboard/src/models/organization.js index bb7a7d3fd..b6d0a7106 100644 --- a/src/dashboard/src/models/organization.js +++ b/src/dashboard/src/models/organization.js @@ -1,89 +1,40 @@ +/* + SPDX-License-Identifier: Apache-2.0 +*/ import { + getOrganization, listOrganization, createOrganization, updateOrganization, deleteOrganization, } from '@/services/organization'; +import { createModel, createListEffect, createSimpleEffect } from '@/utils/modelFactory'; -export default { +export default createModel({ namespace: 'organization', state: { organizations: [], - pagination: { - total: 0, - current: 1, - pageSize: 10, - }, currentOrganization: {}, }, effects: { - *listOrganization({ payload, callback }, { call, put, select }) { - const response = yield call(listOrganization, payload); - const pagination = yield select(state => state.organization.pagination); - const pageSize = payload ? payload.per_page || pagination.pageSize : pagination.pageSize; - const current = payload ? payload.page || pagination.current : pagination.current; + listOrganization: createListEffect({ + service: listOrganization, + namespace: 'organization', + dataKey: 'organizations', + }), - pagination.total = response.total; - pagination.pageSize = pageSize; - pagination.current = current; - yield put({ - type: 'save', - payload: { - pagination, - organizations: response.data.data, - }, - }); - if (callback) { - callback(); - } - }, - *createOrganization({ payload, callback }, { call }) { - const response = yield call(createOrganization, payload); - if (callback) { - callback({ - payload, - ...response, - }); - } - }, - *updateOrganization({ payload, callback }, { call }) { - const response = yield call(updateOrganization, payload); - if (callback) { - callback({ - payload, - ...response, - }); - } - }, - *deleteOrganization({ payload, callback }, { call }) { - const response = yield call(deleteOrganization, payload.id); - if (callback) { - callback({ - payload, - ...response, - }); - } - }, - }, - reducers: { - save(state, { payload }) { - return { - ...state, - ...payload, - }; - }, - clear() { - return { - organizations: [], - pagination: { - total: 0, - current: 1, - pageSize: 10, - }, - currentOrganization: {}, - }; - }, + getOrganization: createSimpleEffect(getOrganization, { + includePayloadInCallback: false, + }), + + createOrganization: createSimpleEffect(createOrganization), + + updateOrganization: createSimpleEffect(updateOrganization), + + deleteOrganization: createSimpleEffect(deleteOrganization, { + getServiceParams: payload => payload.id, + }), }, -}; +}); diff --git a/src/dashboard/src/pages/Agent/Agent.js b/src/dashboard/src/pages/Agent/Agent.js index c4af089e3..0a1772a0f 100755 --- a/src/dashboard/src/pages/Agent/Agent.js +++ b/src/dashboard/src/pages/Agent/Agent.js @@ -1,10 +1,11 @@ -import React, { PureComponent } from 'react'; -import { connect, useIntl, injectIntl } from 'umi'; +import React, { useCallback, useEffect } from 'react'; +import { connect, useIntl } from 'umi'; import { Card, Button, message, List, Badge, Row, Col, Modal, Form, Select, Input } from 'antd'; import { PlusOutlined, DesktopOutlined } from '@ant-design/icons'; import moment from 'moment'; import PageHeaderWrapper from '@/components/PageHeaderWrapper'; import { getAuthority } from '@/utils/authority'; +import { useDeleteConfirm, useModalForm, useTableManagement } from '@/hooks'; import styles from '../styles.less'; const FormItem = Form.Item; @@ -128,351 +129,239 @@ const ApplyAgentForm = props => { ); }; -@connect(({ agent, organization, user, loading }) => ({ - agent, - organization, - user, - loadingAgents: loading.effects['agent/listAgent'], - applyingAgent: loading.effects['agent/applyAgent'], -})) -class Agent extends PureComponent { - state = { - modalVisible: false, - action: 'create', - agentData: {}, - }; - - componentDidMount() { - this.queryAgentList(); - } - - componentWillUnmount() { - const { dispatch } = this.props; - - dispatch({ - type: 'agent/clear', - }); - } +const Agent = ({ dispatch, agent = {}, loadingAgents, applyingAgent }) => { + const intl = useIntl(); + const { agents = [], pagination = {} } = agent; + const pageSize = pagination.pageSize || 10; + const currentPage = pagination.current || 1; + const userRole = getAuthority()[0]; - queryAgentList = () => { - const { - dispatch, - agent: { pagination }, - } = this.props; - const userRole = getAuthority()[0]; + const { refreshList } = useTableManagement({ + dispatch, + listAction: 'agent/listAgent', + }); + const { showDeleteConfirm } = useDeleteConfirm({ dispatch, intl }); + const { + modalVisible, + modalMethod, + currentRecord: agentData, + openCreateModal, + openUpdateModal, + closeModal, + handleModalVisible, + } = useModalForm(); - dispatch({ - type: 'agent/listAgent', - payload: { - per_page: pagination.pageSize, - page: pagination.current, - }, - }); - if (userRole === 'admin') { - dispatch({ - type: 'organization/listOrganization', + const queryAgentList = useCallback( + (page = currentPage, perPage = pageSize) => { + refreshList({ + page, + per_page: perPage, }); - } - }; - - submitCallback = response => { - if (response.status === 'successful') { - const { intl } = this.props; - if (response.action === 'create') { - message.success( - intl.formatMessage({ - id: 'app.applyAgent.success', - defaultMessage: 'Successful application for agent.', - }) - ); - } else { - message.success( - intl.formatMessage({ - id: 'app.updateAgent.success', - defaultMessage: 'Successful application for agent.', - }) - ); + if (userRole === 'admin') { + dispatch({ type: 'organization/listOrganization' }); } - this.queryAgentList(); - this.handleModalVisible(); - } - }; + }, + [dispatch, refreshList, userRole, currentPage, pageSize] + ); - handleModalVisible = (visible, action, data) => { - this.setState({ - modalVisible: !!visible, - action: action || 'create', - agentData: data || {}, - }); - }; + useEffect(() => { + queryAgentList(); + return () => { + dispatch({ type: 'agent/clear' }); + }; + }, [dispatch, queryAgentList]); - handleSubmit = (values, action) => { - const { dispatch } = this.props; - if (action === 'create') { - dispatch({ - type: 'agent/applyAgent', - payload: { data: values, action }, - callback: this.submitCallback, - }); - } else { + const submitCallback = useCallback( + response => { + if (response.status === 'successful') { + if (response.action === 'create') { + message.success( + intl.formatMessage({ + id: 'app.applyAgent.success', + defaultMessage: 'Successful application for agent.', + }) + ); + } else { + message.success( + intl.formatMessage({ + id: 'app.updateAgent.success', + defaultMessage: 'Successful application for agent.', + }) + ); + } + queryAgentList(); + closeModal(); + } + }, + [closeModal, intl, queryAgentList] + ); + + const handleSubmit = useCallback( + (values, action) => { + const type = action === 'create' ? 'agent/applyAgent' : 'agent/updateAgent'; dispatch({ - type: 'agent/updateAgent', + type, payload: { data: values, action }, - callback: this.submitCallback, + callback: submitCallback, }); - } - }; - - onAddAgent = () => { - this.handleModalVisible(true); - }; - - deleteCallback = res => { - const { intl } = this.props; - const userRole = getAuthority()[0]; - if (res.status === 'successful') { - const id = userRole === 'admin' ? 'app.agent.delete.success' : 'app.agent.release.success'; - const defaultMessage = - userRole === 'admin' ? 'Delete agent success.' : 'Release agent success.'; - - message.success( - intl.formatMessage({ - id, - defaultMessage, - }) - ); - this.queryAgentList(); - } else { - const id = userRole === 'admin' ? 'app.agent.delete.fail' : 'app.agent.release.fail'; - const defaultMessage = - userRole === 'admin' ? 'Delete agent failed.' : 'Release agent failed.'; - - message.error( - intl.formatMessage({ - id, - defaultMessage, - }) - ); - } - }; + }, + [dispatch, submitCallback] + ); - handleTableChange = page => { - const { - dispatch, - agent: { pagination }, - } = this.props; - const params = { - page, - per_page: pagination.pageSize, - }; - dispatch({ - type: 'agent/listAgent', - payload: params, - }); - }; + const handlePageChange = useCallback( + (page, perPage) => { + queryAgentList(page, perPage || pageSize); + }, + [queryAgentList, pageSize] + ); - editAgent = agent => { - this.handleModalVisible(true, 'update', agent); - }; + const handleDelete = useCallback( + agentItem => { + const titleId = + userRole === 'admin' ? 'app.agent.form.delete.title' : 'app.agent.form.release.title'; + const contentId = + userRole === 'admin' ? 'app.agent.form.delete.content' : 'app.agent.form.release.content'; + const successId = + userRole === 'admin' ? 'app.agent.delete.success' : 'app.agent.release.success'; + const failId = userRole === 'admin' ? 'app.agent.delete.fail' : 'app.agent.release.fail'; - // TODO: remove these two comment lines after add the functional code - // eslint-disable-next-line no-unused-vars - nodeList = agent => {}; + showDeleteConfirm({ + record: agentItem, + deleteAction: 'agent/deleteAgent', + titleId, + contentId, + successId, + failId, + onSuccess: () => queryAgentList(), + }); + }, + [queryAgentList, showDeleteConfirm, userRole] + ); - deleteAgent = agent => { - const { dispatch } = this.props; - const userRole = getAuthority()[0]; + const nodeList = useCallback(agentItem => agentItem, []); - if (userRole === 'admin') { - dispatch({ - type: 'agent/deleteAgent', - payload: agent.id, - callback: this.deleteCallback, - }); - } else { - dispatch({ - type: 'agent/deleteAgent', - payload: agent.id, - callback: this.deleteCallback, - }); + const badgeStatus = status => { + let statusOfBadge = 'default'; + switch (status) { + case 'active': + statusOfBadge = 'success'; + break; + case 'inactive': + statusOfBadge = 'error'; + break; + default: + break; } + return statusOfBadge; }; - handleDelete = agent => { - const { intl } = this.props; - const userRole = getAuthority()[0]; - const titleMessageId = - userRole === 'admin' ? 'app.agent.form.delete.title' : 'app.agent.form.release.title'; - const titleDefaultMessage = userRole === 'admin' ? 'Delete Agent' : 'Release Agent'; - const contentMessageId = - userRole === 'admin' ? 'app.agent.form.delete.content' : 'app.agent.form.release.content'; - const contentDefaultMessage = - userRole === 'admin' - ? 'Confirm to delete the agent {name}?' - : 'Confirm to release the agent {name}?'; - - Modal.confirm({ - title: intl.formatMessage({ - id: titleMessageId, - defaultMessage: titleDefaultMessage, - }), - content: intl.formatMessage( - { - id: contentMessageId, - defaultMessage: contentDefaultMessage, - }, - { - name: agent.name, - } - ), - okText: intl.formatMessage({ id: 'form.button.confirm', defaultMessage: 'Confirm' }), - cancelText: intl.formatMessage({ id: 'form.button.cancel', defaultMessage: 'Cancel' }), - onOk: () => this.deleteAgent(agent), - }); + const paginationProps = { + showQuickJumper: true, + total: pagination.total, + pageSize, + current: currentPage, + onChange: handlePageChange, }; - render() { - const { - agent: { agents, pagination }, - // eslint-disable-next-line no-unused-vars - organization: { organizations }, - loadingAgents, - applyingAgent, - user: { - // eslint-disable-next-line no-unused-vars - currentUser: { organization = {} }, - }, - intl, - } = this.props; + const ListContent = ({ data: { type, created_at: createdAt, status } }) => ( +
+ + +

{intl.formatMessage({ id: 'app.agent.type', defaultMessage: 'Type' })}

+

{type}

+ + +

+ {intl.formatMessage({ + id: 'app.agent.table.header.creationTime', + defaultMessage: 'Creation Time', + })} +

+

{moment(createdAt).format('YYYY-MM-DD HH:mm:ss')}

+ + + + +
+
+ ); - const { modalVisible, action, agentData } = this.state; - // eslint-disable-next-line no-unused-vars - const userRole = getAuthority()[0]; + const formProps = { + visible: modalVisible, + handleSubmit, + handleModalVisible, + confirmLoading: applyingAgent, + action: modalMethod, + agentData, + }; - function badgeStatus(status) { - let statusOfBadge = 'default'; - switch (status) { - case 'active': - statusOfBadge = 'success'; - break; - case 'inactive': - statusOfBadge = 'error'; - break; - default: - break; + return ( + + + {intl.formatMessage({ + id: 'app.agent.title', + defaultMessage: 'Agent Management', + })} + } - - return statusOfBadge; - } - - const paginationProps = { - showQuickJumper: true, - total: pagination.total, - pageSize: pagination.pageSize, - currentPage: pagination.current, - onChange: this.handleTableChange, - }; - - const ListContent = ({ data: { type, created_at: createdAt, status } }) => ( -
- - -

{intl.formatMessage({ id: 'app.agent.type', defaultMessage: 'Type' })}

-

{type}

- - -

- {intl.formatMessage({ - id: 'app.agent.table.header.creationTime', - defaultMessage: 'Creation Time', - })} -

-

{moment(createdAt).format('YYYY-MM-DD HH:mm:ss')}

- - - - -
-
- ); - - const formProps = { - visible: modalVisible, - handleSubmit: this.handleSubmit, - handleModalVisible: this.handleModalVisible, - confirmLoading: applyingAgent, - action, - agentData, - }; - - return ( - - {} - {intl.formatMessage({ - id: 'app.agent.title', - defaultMessage: 'Agent Management', - })} - - } - > - -
-
- -
- 0 ? paginationProps : false} - dataSource={agents} - renderItem={item => ( - this.editAgent(item)}> - {intl.formatMessage({ - id: 'form.menu.item.update', - defaultMessage: 'Update', - })} - , - this.nodeList(item)}> - {intl.formatMessage({ id: 'menu.node', defaultMessage: 'Node' })} - , - this.handleDelete(item)}> - {intl.formatMessage({ - id: 'form.menu.item.delete', - defaultMessage: 'Delete', - })} - , - ]} - > - {item.name}} - description={ -
-

{item.ip}

-
- } - /> - -
- )} - /> + > + +
+
+
- - - - ); - } -} + 0 ? paginationProps : false} + dataSource={agents} + renderItem={item => ( + openUpdateModal(item)}> + {intl.formatMessage({ + id: 'form.menu.item.update', + defaultMessage: 'Update', + })} + , + nodeList(item)}> + {intl.formatMessage({ id: 'menu.node', defaultMessage: 'Node' })} + , + handleDelete(item)}> + {intl.formatMessage({ + id: 'form.menu.item.delete', + defaultMessage: 'Delete', + })} + , + ]} + > + {item.name}} + description={ +
+

{item.ip}

+
+ } + /> + +
+ )} + /> +
+
+ + + ); +}; -export default injectIntl(Agent); +export default connect(({ agent, loading }) => ({ + agent, + loadingAgents: loading.effects['agent/listAgent'], + applyingAgent: loading.effects['agent/applyAgent'] || loading.effects['agent/updateAgent'], +}))(Agent); diff --git a/src/dashboard/src/pages/ChainCode/ChainCode.js b/src/dashboard/src/pages/ChainCode/ChainCode.js index ec1dbb0db..4c4ff2272 100644 --- a/src/dashboard/src/pages/ChainCode/ChainCode.js +++ b/src/dashboard/src/pages/ChainCode/ChainCode.js @@ -1,8 +1,8 @@ /* SPDX-License-Identifier: Apache-2.0 */ -import React, { PureComponent, Fragment } from 'react'; -import { connect, injectIntl } from 'umi'; +import React, { Fragment, useCallback, useEffect, useMemo, useState } from 'react'; +import { connect, useIntl } from 'umi'; import { Card, Button, Divider, Dropdown, Menu } from 'antd'; import { PlusOutlined, FunctionOutlined, DownOutlined } from '@ant-design/icons'; import PageHeaderWrapper from '@/components/PageHeaderWrapper'; @@ -11,393 +11,382 @@ import UploadForm from '@/pages/ChainCode/forms/UploadForm'; import InstallForm from '@/pages/ChainCode/forms/InstallForm'; import ApproveForm from '@/pages/ChainCode/forms/ApproveForm'; import CommitForm from './forms/CommitForm'; +import { useDeleteConfirm, useTableManagement } from '@/hooks'; import styles from './styles.less'; -@connect(({ chainCode, loading }) => ({ - chainCode, - loadingChainCodes: loading.effects['chainCode/listChainCode'], - uploading: loading.effects['chainCode/uploadChainCode'], - installing: loading.effects['chainCode/installChainCode'], - approving: loading.effects['chainCode/approveChainCode'], - committing: loading.effects['chainCode/commitChainCode'], -})) -class ChainCode extends PureComponent { - state = { - selectedRows: [], - formValues: {}, - newFile: '', - modalVisible: false, - installModalVisible: false, - approveModalVisible: false, - commitModalVisible: false, - chainCodeName: '', - }; - - componentDidMount() { - this.fetchChainCodes(); - } - - componentWillUnmount() { - const { dispatch } = this.props; - - dispatch({ - type: 'chainCode/clear', - }); - } - - fetchChainCodes = () => { - const { dispatch } = this.props; - - dispatch({ - type: 'chainCode/listChainCode', - }); - }; - - fetchNodes = () => { - const { dispatch } = this.props; - - dispatch({ - type: 'chainCode/listNode', - }); - }; - - handleTableChange = pagination => { - const { dispatch } = this.props; - const { formValues } = this.state; - const { current, pageSize } = pagination; - const params = { - page: current, - per_page: pageSize, - ...formValues, +const ChainCode = ({ + dispatch, + chainCode = {}, + loadingChainCodes, + uploading, + installing, + approving, + committing, +}) => { + const intl = useIntl(); + const { chainCodes = [], paginations = {}, nodes = {} } = chainCode; + + const { selectedRows, handleSelectRows, handleTableChange, refreshList } = useTableManagement({ + dispatch, + listAction: 'chainCode/listChainCode', + }); + const { showDeleteConfirm } = useDeleteConfirm({ dispatch, intl }); + + const [modalVisible, setModalVisible] = useState(false); + const [installModalVisible, setInstallModalVisible] = useState(false); + const [approveModalVisible, setApproveModalVisible] = useState(false); + const [commitModalVisible, setCommitModalVisible] = useState(false); + const [chainCodeName, setChainCodeName] = useState(''); + const [newFile, setFile] = useState(null); + + useEffect(() => { + dispatch({ type: 'chainCode/listChainCode' }); + return () => { + dispatch({ type: 'chainCode/clear' }); }; - dispatch({ - type: 'chainCode/listChainCode', - payload: params, - }); - }; - - handleModalVisible = visible => { - this.setState({ - modalVisible: !!visible, - }); - }; - - handleInstallModalVisible = (visible, record) => { - if (visible) { - this.fetchNodes(); - this.setState({ - chainCodeName: record.package_id, + }, [dispatch]); + + const fetchChainCodes = useCallback(() => { + refreshList(); + }, [refreshList]); + + const fetchNodes = useCallback(() => { + dispatch({ type: 'node/listNode' }); + }, [dispatch]); + + const handleModalVisible = useCallback(visible => { + setModalVisible(!!visible); + }, []); + + const handleInstallModalVisible = useCallback( + (visible, record = {}) => { + if (visible) { + fetchNodes(); + setChainCodeName(record.package_id); + } + setInstallModalVisible(!!visible); + }, + [fetchNodes] + ); + + const handleApproveModalVisible = useCallback(visible => { + setApproveModalVisible(!!visible); + }, []); + + const handleCommitModalVisible = useCallback(visible => { + setCommitModalVisible(!!visible); + }, []); + + const handleInstall = useCallback( + (values, callback) => { + const formData = new FormData(); + Object.keys(values) + .filter(key => !(key === 'description' && !values[key])) + .forEach(key => { + formData.append(key, values[key]); + }); + dispatch({ + type: 'chainCode/installChainCode', + payload: formData, + callback, }); - } - this.setState({ - installModalVisible: !!visible, - }); - }; - - handleApproveModalVisible = visible => { - this.setState({ - approveModalVisible: !!visible, - }); - }; - - handleCommitModalVisible = visible => { - this.setState({ - commitModalVisible: !!visible, - }); - }; - - handleInstall = (values, callback) => { - const { dispatch } = this.props; - const formData = new FormData(); - - Object.keys(values) - .filter(key => !(key === 'description' && !values[key])) // filter out empty description - .forEach(key => { - formData.append(key, values[key]); + }, + [dispatch] + ); + + const handleApprove = useCallback( + (values, callback) => { + const payload = { + channel_name: values.channel, + chaincode_name: values.name, + chaincode_version: values.version, + sequence: parseInt(values.sequence, 10), + policy: values.policy, + init_flag: !!values.initFlag, + }; + dispatch({ + type: 'chainCode/approveChainCode', + payload, + callback, }); - - dispatch({ - type: 'chainCode/installChainCode', - payload: formData, - callback, - }); - }; - - handleApprove = (values, callback) => { - const { dispatch } = this.props; - - const payload = { - channel_name: values.channel, - chaincode_name: values.name, - chaincode_version: values.version, - sequence: parseInt(values.sequence, 10), - policy: values.policy, - init_flag: !!values.initFlag, - }; - - dispatch({ - type: 'chainCode/approveChainCode', - payload, - callback, - }); - }; - - handleCommit = (values, callback) => { - const { dispatch } = this.props; - - const payload = { - channel_name: values.channel, - chaincode_name: values.name, - chaincode_version: values.version, - sequence: parseInt(values.sequence, 10), - policy: values.policy, - init_flag: !!values.initFlag, - }; - - dispatch({ - type: 'chainCode/commitChainCode', - payload, - callback, - }); - }; - - handleUpload = (values, callback) => { - const { dispatch } = this.props; - const formData = new FormData(); - - Object.keys(values) - .filter(key => !(key === 'description' && !values[key])) // filter out empty description - .forEach(key => { + }, + [dispatch] + ); + + const handleCommit = useCallback( + (values, callback) => { + const payload = { + channel_name: values.channel, + chaincode_name: values.name, + chaincode_version: values.version, + sequence: parseInt(values.sequence, 10), + policy: values.policy, + init_flag: !!values.initFlag, + }; + dispatch({ + type: 'chainCode/commitChainCode', + payload, + callback, + }); + }, + [dispatch] + ); + + const handleUpload = useCallback( + (values, callback) => { + const formData = new FormData(); + Object.keys(values).forEach(key => { formData.append(key, values[key]); }); + dispatch({ + type: 'chainCode/createChainCode', + payload: formData, + callback, + }); + }, + [dispatch] + ); + + const onUploadChainCode = useCallback(() => { + handleModalVisible(true); + }, [handleModalVisible]); + + const handleDeleteChaincode = useCallback( + record => { + showDeleteConfirm({ + record, + deleteAction: 'chainCode/deleteChainCode', + titleId: 'app.chainCode.table.operate.delete', + contentId: 'app.chainCode.table.operate.delete', + successId: 'app.chainCode.delete.success', + failId: 'app.chainCode.delete.fail', + getPayload: r => r.id, + onSuccess: () => refreshList(), + }); + }, + [refreshList, showDeleteConfirm] + ); - dispatch({ - type: 'chainCode/uploadChainCode', - payload: formData, - callback, - }); - }; - - onUploadChainCode = () => { - this.handleModalVisible(true); - }; - - setFile = file => { - this.setState({ newFile: file }); - }; - - handleSelectRows = rows => { - this.setState({ - selectedRows: rows, - }); - }; - - render() { - const { - selectedRows, - modalVisible, - newFile, - installModalVisible, - approveModalVisible, - commitModalVisible, - chainCodeName, - } = this.state; - const { - chainCode: { chainCodes, paginations, nodes }, - loadingChainCodes, - intl, - uploading, - installing, - approving, - committing, - } = this.props; - - const uploadFormProps = { + const uploadFormProps = useMemo( + () => ({ modalVisible, - handleUpload: this.handleUpload, - handleModalVisible: this.handleModalVisible, - fetchChainCodes: this.fetchChainCodes, + handleUpload, + handleModalVisible, + fetchChainCodes, uploading, newFile, - setFile: this.setFile, + setFile, intl, - }; + }), + [modalVisible, handleUpload, handleModalVisible, fetchChainCodes, uploading, newFile, intl] + ); - const installFormProps = { + const installFormProps = useMemo( + () => ({ installModalVisible, - handleInstallModalVisible: this.handleInstallModalVisible, - fetchChainCodes: this.fetchChainCodes, - handleInstall: this.handleInstall, + handleInstallModalVisible, + fetchChainCodes, + handleInstall, installing, chainCodeName, nodes, intl, - }; + }), + [ + installModalVisible, + handleInstallModalVisible, + fetchChainCodes, + handleInstall, + installing, + chainCodeName, + nodes, + intl, + ] + ); - const approveFormProps = { + const approveFormProps = useMemo( + () => ({ approveModalVisible, - handleApproveModalVisible: this.handleApproveModalVisible, - fetchChainCodes: this.fetchChainCodes, - handleApprove: this.handleApprove, + handleApproveModalVisible, + fetchChainCodes, + handleApprove, approving, selectedRows: [], initFlagChange: e => { - // this can be used to handle the initFlag change, currently only for demo + // 保留原本示範行為 + // eslint-disable-next-line no-console console.log('initFlag changed:', e.target.checked); }, intl, - }; + }), + [ + approveModalVisible, + handleApproveModalVisible, + fetchChainCodes, + handleApprove, + approving, + intl, + ] + ); - const commitFormProps = { + const commitFormProps = useMemo( + () => ({ commitModalVisible, - handleCommitModalVisible: this.handleCommitModalVisible, - handleCommit: this.handleCommit, - fetchChainCodes: this.fetchChainCodes, + handleCommitModalVisible, + handleCommit, + fetchChainCodes, committing, intl, - }; - - const menu = record => ( - - - { - this.handleDeleteChaincode(record); - }} - > + }), + [commitModalVisible, handleCommitModalVisible, handleCommit, fetchChainCodes, committing, intl] + ); + + const menu = record => ( + + + { + handleDeleteChaincode(record); + }} + > + {intl.formatMessage({ + id: 'app.chainCode.table.operate.delete', + defaultMessage: 'Delete', + })} + + + + ); + + const MoreBtn = record => ( + + + {intl.formatMessage({ + id: 'app.node.table.operation.more', + defaultMessage: 'More', + })}{' '} + + + + ); + + const columns = [ + { + title: intl.formatMessage({ + id: 'app.chainCode.table.header.packageID', + defaultMessage: 'PackageID', + }), + dataIndex: 'package_id', + ellipsis: true, + }, + { + title: intl.formatMessage({ + id: 'app.chainCode.table.header.version', + defaultMessage: 'Version', + }), + dataIndex: 'version', + }, + { + title: intl.formatMessage({ + id: 'app.chainCode.table.header.language', + defaultMessage: 'Chaincode Language', + }), + dataIndex: 'language', + }, + { + title: intl.formatMessage({ + id: 'app.chainCode.table.header.description', + defaultMessage: 'Description', + }), + dataIndex: 'description', + }, + { + title: intl.formatMessage({ + id: 'form.table.header.operation', + defaultMessage: 'Operation', + }), + render: (text, record) => ( + + handleInstallModalVisible(true, record)}> {intl.formatMessage({ - id: 'app.chainCode.table.operate.delete', - defaultMessage: 'Delete', + id: 'app.chainCode.table.operate.install', + defaultMessage: 'Install', })} - - - ); - - const MoreBtn = () => ( - - - {intl.formatMessage({ - id: 'app.node.table.operation.more', - defaultMessage: 'More', - })}{' '} - - - - ); - - const columns = [ - { - title: intl.formatMessage({ - id: 'app.chainCode.table.header.packageID', - defaultMessage: 'PackageID', - }), - dataIndex: 'package_id', - ellipsis: true, - }, - { - title: intl.formatMessage({ - id: 'app.chainCode.table.header.version', - defaultMessage: 'Version', - }), - dataIndex: 'version', - }, - { - title: intl.formatMessage({ - id: 'app.chainCode.table.header.language', - defaultMessage: 'Chaincode Language', - }), - dataIndex: 'language', - }, - { - title: intl.formatMessage({ - id: 'app.chainCode.table.header.description', - defaultMessage: 'Description', - }), - dataIndex: 'description', - }, - { - title: intl.formatMessage({ - id: 'form.table.header.operation', - defaultMessage: 'Operation', - }), - // eslint-disable-next-line no-unused-vars - render: (text, record) => ( - - this.handleInstallModalVisible(true, record)}> - {intl.formatMessage({ - id: 'app.chainCode.table.operate.install', - defaultMessage: 'Install', - })} - - - this.handleApproveModalVisible(true)}> - {intl.formatMessage({ - id: 'app.chainCode.table.operate.approve', - defaultMessage: 'Approve', - })} - - - this.handleCommitModalVisible(true)}> - {intl.formatMessage({ - id: 'app.chainCode.table.operate.commit', - defaultMessage: 'Commit', - })} - - - - - ), - }, - ]; - // TODO: remove dummy data after API is connected - const dummyPagination = { - total: 0, - current: 1, - pageSize: 10, - }; - return ( - - {} + + handleApproveModalVisible(true)}> {intl.formatMessage({ - id: 'app.chainCode.title', - defaultMessage: 'Chaincode Management', + id: 'app.chainCode.table.operate.approve', + defaultMessage: 'Approve', })} - - } - > - -
-
- -
- -
-
- - - - -
- ); - } -} + + + handleCommitModalVisible(true)}> + {intl.formatMessage({ + id: 'app.chainCode.table.operate.commit', + defaultMessage: 'Commit', + })} + + + + + ), + }, + ]; + + const dummyPagination = { + total: 0, + current: 1, + pageSize: 10, + }; -export default injectIntl(ChainCode); + return ( + + + {intl.formatMessage({ + id: 'app.chainCode.title', + defaultMessage: 'Chaincode Management', + })} + + } + > + +
+
+ +
+ +
+
+ + + + +
+ ); +}; + +export default connect(({ chainCode, loading }) => ({ + chainCode, + loadingChainCodes: loading.effects['chainCode/listChainCode'], + uploading: loading.effects['chainCode/uploadChainCode'], + installing: loading.effects['chainCode/installChainCode'], + approving: loading.effects['chainCode/approveChainCode'], + committing: loading.effects['chainCode/commitChainCode'], +}))(ChainCode); diff --git a/src/dashboard/src/pages/ChainCode/forms/ApproveForm.js b/src/dashboard/src/pages/ChainCode/forms/ApproveForm.js index 3870ac6a8..1121eb8e5 100644 --- a/src/dashboard/src/pages/ChainCode/forms/ApproveForm.js +++ b/src/dashboard/src/pages/ChainCode/forms/ApproveForm.js @@ -22,9 +22,9 @@ const ApproveForm = props => { useEffect(() => { async function fetchData() { const response = await listChannel(); - const newChannels = Object.keys(response.data.data).map(item => ({ - label: response.data.data[item].name, - value: response.data.data[item].name, + const newChannels = Object.keys(response.data).map(item => ({ + label: response.data[item].name, + value: response.data[item].name, })); setChannels(newChannels); } diff --git a/src/dashboard/src/pages/ChainCode/forms/CommitForm.js b/src/dashboard/src/pages/ChainCode/forms/CommitForm.js index b754485e4..426c5afe0 100644 --- a/src/dashboard/src/pages/ChainCode/forms/CommitForm.js +++ b/src/dashboard/src/pages/ChainCode/forms/CommitForm.js @@ -23,9 +23,9 @@ const CommitForm = props => { async function fetchData() { try { const response = await listChannel(); - const newChannels = Object.keys(response.data.data).map(item => ({ - label: response.data.data[item].name, - value: response.data.data[item].name, + const newChannels = Object.keys(response.data).map(item => ({ + label: response.data[item].name, + value: response.data[item].name, })); setChannels(newChannels); } catch (error) { diff --git a/src/dashboard/src/pages/ChainCode/forms/InstallForm.js b/src/dashboard/src/pages/ChainCode/forms/InstallForm.js index 1227e8243..9f2a4db0d 100644 --- a/src/dashboard/src/pages/ChainCode/forms/InstallForm.js +++ b/src/dashboard/src/pages/ChainCode/forms/InstallForm.js @@ -136,16 +136,16 @@ const InstallForm = props => { { + const { label, closable, onClose } = props; + const onPreventMouseDown = event => { + event.preventDefault(); + event.stopPropagation(); + }; + return ( + + {label} + + ); +}; + const UploadForm = props => { const [form] = Form.useForm(); const intl = useIntl(); + const [nodes, setNodes] = useState(); + const [channels, setChannels] = useState(); const { modalVisible, handleUpload, @@ -19,8 +43,28 @@ const UploadForm = props => { setFile, } = props; + useEffect(() => { + async function fecthData() { + const responseNodes = await listNode(); + const responseChannels = await listChannel(); + const nodeOptions = responseNodes.data.data + .filter(node => node.type.toLowerCase() === 'peer') + .map(node => ({ + label: node.name, + value: node.id, + })); + const channelOptions = responseChannels.data.data.map(channel => ({ + label: channel.name, + value: channel.id, + })); + setNodes(nodeOptions); + setChannels(channelOptions); + } + fecthData(); + }, []); + const uploadCallback = response => { - if (response.status !== 'successful') { + if (response.status.toLowerCase() !== 'successful') { message.error( intl.formatMessage({ id: 'app.chainCode.form.create.fail', @@ -97,7 +141,7 @@ const UploadForm = props => { id: 'app.chainCode.form.create.file', defaultMessage: 'Package', })} - name="file" + name="package" getValueFromEvent={normFile} rules={[ { @@ -120,6 +164,106 @@ const UploadForm = props => { + + + + + + + + + + + + + + + + + + { const { modalVisible, handleCreate, handleModalVisible, nodes, creating, fetchChannels } = props; const createCallback = response => { - if (response.status !== 'successful') { + if (response.status.toLowerCase() !== 'successful') { message.error( intl.formatMessage({ id: 'app.channel.form.create.fail', @@ -62,7 +63,7 @@ const CreateChannel = props => { const orderers = []; Object.keys(nodes).forEach(item => { - if (nodes[item].type === 'peer') { + if (nodes[item].type.toLowerCase() === 'peer') { peers.push({ label: nodes[item].name, value: nodes[item].id }); } else { orderers.push({ label: nodes[item].name, value: nodes[item].id }); @@ -133,7 +134,7 @@ const CreateChannel = props => { id: 'app.channel.form.create.orderer', defaultMessage: 'Please select orderer', })} - name="orderers" + name="orderer_ids" rules={[ { required: true, @@ -157,7 +158,7 @@ const CreateChannel = props => { id: 'app.channel.form.create.peer', defaultMessage: 'Peer', })} - name="peers" + name="peer_ids" rules={[ { required: true, @@ -342,255 +343,171 @@ const UpdateChannel = props => { ); }; -@connect(({ channel, node, loading }) => ({ - channel, - node, - loadingChannels: loading.effects['channel/listChannel'], - creating: loading.effects['channel/createChannel'], - updating: loading.effects['channel/updateChannel'], -})) -class Channel extends PureComponent { - state = { - selectedRows: [], - formValues: {}, - modalVisible: false, - updateModalVisible: false, - channelData: {}, - newFile: '', - }; - - componentDidMount() { - this.fetchChannels(); - } - - componentWillUnmount() { - const { dispatch } = this.props; - - dispatch({ - type: 'channel/clear', - }); - } - - fetchChannels = () => { - const { dispatch } = this.props; - - dispatch({ - type: 'channel/listChannel', - }); - - dispatch({ - type: 'node/listNode', - }); - }; - - handleTableChange = pagination => { - const { dispatch } = this.props; - const { formValues } = this.state; - const { current, pageSize } = pagination; - const params = { - page: current, - per_page: pageSize, - ...formValues, - }; - dispatch({ - type: 'channel/listChannel', - payload: params, - }); - }; - - handleModalVisible = visible => { - this.setState({ - modalVisible: !!visible, - }); - }; - - handleUpdateModalVisible = (visible, record) => { - this.setState({ - updateModalVisible: !!visible, - channelData: record, - }); - }; - - onUpdateChannel = record => { - this.handleUpdateModalVisible(true, record); - }; - - setFile = file => { - this.setState({ newFile: file }); - }; - - onCreateChannel = () => { - this.handleModalVisible(true); - }; +const Channel = ({ dispatch, channel = {}, node = {}, loadingChannels, creating, updating }) => { + const intl = useIntl(); + const { channels = [], pagination = {} } = channel; + const { nodes = {} } = node; - handleCreate = (values, callback) => { - const { dispatch } = this.props; + const { selectedRows, handleSelectRows, handleTableChange, refreshList } = useTableManagement({ + dispatch, + listAction: 'channel/listChannel', + }); - dispatch({ - type: 'channel/createChannel', - payload: values, - callback, - }); - }; + const [modalVisible, setModalVisible] = useState(false); + const [updateModalVisible, setUpdateModalVisible] = useState(false); + const [channelData, setChannelData] = useState({}); + const [newFile, setFile] = useState(null); - handleDownloadConfig = row => { - const { dispatch } = this.props; - const params = { - id: row.id, + useEffect(() => { + dispatch({ type: 'channel/listChannel' }); + dispatch({ type: 'node/listNode' }); + return () => { + dispatch({ type: 'channel/clear' }); }; - dispatch({ - type: 'channel/getNodeConfig', - payload: params, - callback: this.downloadCallBack, - }); - }; - - downloadCallBack = response => { - const { intl } = this.props; - message.success( - intl.formatMessage({ - id: 'app.channel.download.success', - defaultMessage: 'Download Channel Config File Successful.', - }) - ); - const blob = response.data; - const prettyJSON = JSON.stringify(blob, null, 2); - const link = document.createElement('a'); - link.href = URL.createObjectURL(new Blob([prettyJSON], { type: 'application/json' })); - link.download = 'configs.json'; - document.body.appendChild(link); - link.click(); - URL.revokeObjectURL(link.href); - }; - - handleUpdate = (id, values, callback) => { - const { dispatch } = this.props; - const formData = new FormData(); - - Object.keys(values).forEach(key => { - formData.append(key, values[key]); - }); - - dispatch({ - type: 'channel/updateChannel', - id, - payload: formData, - callback, - }); - }; - - handleSelectRows = rows => { - this.setState({ - selectedRows: rows, - }); - }; + }, [dispatch]); + + const fetchChannels = useCallback(() => { + refreshList(); + dispatch({ type: 'node/listNode' }); + }, [dispatch, refreshList]); + + const handleModalVisible = useCallback(visible => { + setModalVisible(!!visible); + }, []); + + const handleUpdateModalVisible = useCallback((visible, record) => { + setUpdateModalVisible(!!visible); + setChannelData(record || {}); + }, []); + + const onCreateChannel = useCallback(() => { + handleModalVisible(true); + }, [handleModalVisible]); + + const handleCreate = useCallback( + (values, callback) => { + dispatch({ + type: 'channel/createChannel', + payload: values, + callback, + }); + }, + [dispatch] + ); - render() { - const { selectedRows, modalVisible, channelData, updateModalVisible, newFile } = this.state; - const { - channel: { channels, pagination }, - node: { nodes }, - loadingChannels, - intl, - creating, - updating, - } = this.props; + const handleUpdate = useCallback( + (id, values, callback) => { + const formData = new FormData(); + Object.keys(values).forEach(key => { + formData.append(key, values[key]); + }); + dispatch({ + type: 'channel/updateChannel', + id, + payload: formData, + callback, + }); + }, + [dispatch] + ); - const formProps = { + const formProps = useMemo( + () => ({ modalVisible, - handleCreate: this.handleCreate, - handleModalVisible: this.handleModalVisible, - fetchChannels: this.fetchChannels, + handleCreate, + handleModalVisible, + fetchChannels, creating, intl, nodes, - }; + }), + [modalVisible, handleCreate, handleModalVisible, fetchChannels, creating, intl, nodes] + ); - const updateFormProps = { + const updateFormProps = useMemo( + () => ({ updateModalVisible, - handleUpdate: this.handleUpdate, - handleModalVisible: this.handleUpdateModalVisible, - fetchChannels: this.fetchChannels, + handleUpdate, + handleModalVisible: handleUpdateModalVisible, + fetchChannels, updating, + channelData, + newFile, + setFile, intl, + }), + [ + updateModalVisible, + handleUpdate, + handleUpdateModalVisible, + fetchChannels, + updating, channelData, newFile, - setFile: this.setFile, - }; + intl, + ] + ); - const columns = [ - { - title: intl.formatMessage({ - id: 'app.channel.table.header.name', - defaultMessage: 'Channel Name', - }), - dataIndex: 'name', - }, - { - title: intl.formatMessage({ - id: 'app.channel.table.header.network', - defaultMessage: 'Network', - }), - render: (text, record) => record.network.name, - }, - { - title: intl.formatMessage({ - id: 'form.table.header.operation', - defaultMessage: 'Operation', - }), - // eslint-disable-next-line no-unused-vars - render: (text, record) => ( - - this.onUpdateChannel(record)}> - {intl.formatMessage({ id: 'form.menu.item.update', defaultMessage: 'Update' })} - - - this.handleDownloadConfig(record)}> - {intl.formatMessage({ id: 'form.menu.item.download', defaultMessage: 'Download' })} - - - ), - }, - ]; - return ( - - {} - {intl.formatMessage({ - id: 'app.channel.title', - defaultMessage: 'Channel Management', - })} - - } - > - -
-
- -
- + const columns = [ + { + title: intl.formatMessage({ + id: 'app.channel.table.header.name', + defaultMessage: 'Channel Name', + }), + dataIndex: 'name', + }, + { + title: intl.formatMessage({ + id: 'form.table.header.operation', + defaultMessage: 'Operation', + }), + }, + ]; + + return ( + + + {intl.formatMessage({ + id: 'app.channel.title', + defaultMessage: 'Channel Management', + })} + + } + > + +
+
+
- - - - - ); - } -} + +
+
+ + +
+ ); +}; -export default injectIntl(Channel); +export default connect(({ channel, node, loading }) => ({ + channel, + node, + loadingChannels: loading.effects['channel/listChannel'], + creating: loading.effects['channel/createChannel'], + updating: loading.effects['channel/updateChannel'], +}))(Channel); diff --git a/src/dashboard/src/pages/Network/Network.js b/src/dashboard/src/pages/Network/Network.js deleted file mode 100644 index 690458473..000000000 --- a/src/dashboard/src/pages/Network/Network.js +++ /dev/null @@ -1,197 +0,0 @@ -/* - SPDX-License-Identifier: Apache-2.0 -*/ -import React, { PureComponent, Fragment } from 'react'; -import { connect, injectIntl, history } from 'umi'; -import { Card, Button, Modal, message, Divider } from 'antd'; -import { PlusOutlined, ApartmentOutlined } from '@ant-design/icons'; -import moment from 'moment'; -import PageHeaderWrapper from '@/components/PageHeaderWrapper'; -import StandardTable from '@/components/StandardTable'; -import styles from './styles.less'; - -@connect(({ network, loading }) => ({ - network, - loadingNetworks: loading.effects['network/listNetwork'], -})) -class Network extends PureComponent { - state = { - selectedRows: [], - formValues: {}, - }; - - componentDidMount() { - const { dispatch } = this.props; - - dispatch({ - type: 'network/listNetwork', - }); - } - - componentWillUnmount() { - this.queryNetworkList(); - } - - handleTableChange = pagination => { - const { dispatch } = this.props; - const { formValues } = this.state; - const { current, pageSize } = pagination; - const params = { - page: current, - per_page: pageSize, - ...formValues, - }; - dispatch({ - type: 'network/listNetwork', - payload: params, - }); - }; - - newNetwork = () => { - history.push('/network/newNetwork'); - }; - - queryNetworkList = () => { - const { dispatch } = this.props; - - dispatch({ - type: 'network/listNetwork', - }); - }; - - handleDeleteNetwork = row => { - const { dispatch, intl } = this.props; - const { deleteCallBack } = this; - const { id } = row; - - Modal.confirm({ - title: intl.formatMessage({ - id: 'app.network.form.delete.title', - defaultMessage: 'Delete Network', - }), - content: intl.formatMessage( - { - id: 'app.network.form.delete.content', - defaultMessage: 'Confirm to delete network {name}?', - }, - { - name: row.name, - } - ), - okText: intl.formatMessage({ id: 'form.button.confirm', defaultMessage: 'Confirm' }), - cancelText: intl.formatMessage({ id: 'form.button.cancel', defaultMessage: 'Cancel' }), - onOk() { - dispatch({ - type: 'network/deleteNetwork', - payload: id, - callback: deleteCallBack, - }); - }, - }); - }; - - deleteCallBack = response => { - const { intl } = this.props; - if (response.status === 'successful') { - message.success( - intl.formatMessage({ - id: 'app.network.delete.success', - defaultMessage: 'Delete Network success.', - }) - ); - this.queryNetworkList(); - } else { - message.error( - intl.formatMessage({ - id: 'app.network.delete.fail', - defaultMessage: 'Delete Network failed.', - }) - ); - } - }; - - handleSelectRows = rows => { - this.setState({ - selectedRows: rows, - }); - }; - - render() { - const { selectedRows } = this.state; - const { - network: { networks, pagination }, - loadingNetworks, - intl, - } = this.props; - const columns = [ - { - title: intl.formatMessage({ - id: 'app.network.table.header.name', - defaultMessage: 'Network Name', - }), - dataIndex: 'name', - }, - { - title: intl.formatMessage({ - id: 'app.network.table.header.creationTime', - defaultMessage: 'Create Time', - }), - dataIndex: 'created_at', - render: text => {moment(text).format('YYYY-MM-DD HH:mm:ss')}, - }, - { - title: intl.formatMessage({ - id: 'form.table.header.operation', - defaultMessage: 'Operation', - }), - render: (text, record) => ( - - {intl.formatMessage({ id: 'form.menu.item.update', defaultMessage: 'Update' })} - - this.handleDeleteNetwork(record)}> - {intl.formatMessage({ id: 'form.menu.item.delete', defaultMessage: 'Delete' })} - - - ), - }, - ]; - return ( - - {} - {intl.formatMessage({ - id: 'app.network.title', - defaultMessage: 'Network Management', - })} - - } - > - -
-
- -
- -
-
-
- ); - } -} - -export default injectIntl(Network); diff --git a/src/dashboard/src/pages/Network/newNetwork.js b/src/dashboard/src/pages/Network/newNetwork.js deleted file mode 100755 index 9bd6eab05..000000000 --- a/src/dashboard/src/pages/Network/newNetwork.js +++ /dev/null @@ -1,194 +0,0 @@ -import React from 'react'; -import { Card, Form, Input, Button, Select, message } from 'antd'; -import { compose } from 'redux'; -import { connect, withRouter, history, injectIntl } from 'umi'; -import PageHeaderWrapper from '@/components/PageHeaderWrapper'; -// import { getAuthority } from '@/utils/authority'; - -const FormItem = Form.Item; -const { Option } = Select; - -@connect(({ network, loading }) => ({ - network, - submitting: loading.effects['network/createNetwork'], -})) -class CreateNetwork extends React.Component { - clickCancel = () => { - history.push('/network'); - }; - - submitCallback = data => { - const { intl } = this.props; - if (data.status === 'successful') { - message.success( - intl.formatMessage( - { - id: 'app.newNetwork.success', - defaultMessage: 'Create network {name} success.', - }, - { - name: data.name, - } - ) - ); - history.push('/network'); - } else { - message.error( - intl.formatMessage( - { - id: 'app.newNetwork.fail', - defaultMessage: 'Create network {name} fail.', - }, - { - name: data.name, - } - ) - ); - } - }; - - handleSubmit = values => { - const { dispatch } = this.props; - - dispatch({ - type: 'network/createNetwork', - payload: { - ...values, - }, - callback: this.submitCallback, - }); - }; - - render() { - const { intl } = this.props; - const { submitting } = this.props; - - const formItemLayout = { - labelCol: { - xs: { span: 24 }, - sm: { span: 7 }, - }, - wrapperCol: { - xs: { span: 24 }, - sm: { span: 12 }, - md: { span: 10 }, - }, - }; - - const submitFormLayout = { - wrapperCol: { - xs: { span: 24, offset: 0 }, - sm: { span: 10, offset: 7 }, - }, - }; - const networkConsensus = ['etcdraft']; - const networkConsensusOptions = networkConsensus.map(item => ( - - )); - const networkDatabase = ['couchdb', 'leveldb']; - const networkDatabaseOptions = networkDatabase.map(item => ( - - )); - - return ( - - -
- - - - - - - - - - - - - - -
-
-
- ); - } -} - -export default compose(withRouter, injectIntl)(CreateNetwork); diff --git a/src/dashboard/src/pages/Network/styles.less b/src/dashboard/src/pages/Network/styles.less deleted file mode 100755 index d8af7dbdb..000000000 --- a/src/dashboard/src/pages/Network/styles.less +++ /dev/null @@ -1,90 +0,0 @@ -@import '~antd/lib/style/themes/default.less'; -@import '~@/utils/utils.less'; - -.tableList { - .tableListOperator { - margin-bottom: 16px; - button { - margin-right: 8px; - } - } -} - -.newAgentButton { - width: 100%; - margin-bottom: 8px; - background: fixed; - border-color: darkgrey; -} - -.tableListForm { - :global { - .ant-form-item { - display: flex; - margin-right: 0; - margin-bottom: 24px; - > .ant-form-item-label { - width: auto; - padding-right: 8px; - line-height: 32px; - } - .ant-form-item-control { - line-height: 32px; - } - } - .ant-form-item-control-wrapper { - flex: 1; - } - } - .submitButtons { - display: block; - margin-bottom: 24px; - white-space: nowrap; - } -} - -@media screen and (max-width: @screen-lg) { - .tableListForm :global(.ant-form-item) { - margin-right: 24px; - } -} - -@media screen and (max-width: @screen-md) { - .tableListForm :global(.ant-form-item) { - margin-right: 8px; - } -} - -.ListContentRow { - width: 400px; -} - -.ListItemTitle { - color: #1fffff; - font-size: 17px; -} - -.capitalize { - text-transform: capitalize; -} - -.danger { - color: @error-color; -} - -.disabled { - color: @disabled-color-dark; - - &:hover { - cursor: not-allowed; - } -} - -.optional { - color: @text-color-secondary; - font-style: normal; -} - -.auto-complete { - width: 100%; -} diff --git a/src/dashboard/src/pages/Node/index.js b/src/dashboard/src/pages/Node/index.js index 7c01df570..c4bf60491 100644 --- a/src/dashboard/src/pages/Node/index.js +++ b/src/dashboard/src/pages/Node/index.js @@ -1,8 +1,8 @@ /* SPDX-License-Identifier: Apache-2.0 */ -import React, { PureComponent, Fragment } from 'react'; -import { connect, injectIntl, useIntl } from 'umi'; +import React, { Fragment, useCallback, useEffect, useMemo, useState } from 'react'; +import { connect, useIntl } from 'umi'; import { Card, Button, @@ -22,19 +22,12 @@ import moment from 'moment'; import PageHeaderWrapper from '@/components/PageHeaderWrapper'; import StandardTable from '@/components/StandardTable'; import { getAuthority } from '@/utils/authority'; +import { useDeleteConfirm, useTableManagement } from '@/hooks'; import styles from '../styles.less'; const FormItem = Form.Item; const { Option } = Select; -// function str2bytes (str) { -// var bytes = new Uint8Array(str.length); -// for (var i=0; i { const { registerUserFormVisible, @@ -190,7 +183,7 @@ const CreateNode = props => { const { createModalVisible, handleCreate, handleModalVisible, creating, queryNodeList } = props; const createCallback = response => { - if (response.status !== 'successful') { + if (response.status.toLowerCase() !== 'successful') { message.error( intl.formatMessage({ id: 'app.node.new.createFail', @@ -230,7 +223,7 @@ const CreateNode = props => { }, }; - const types = ['orderer', 'peer']; + const types = ['ORDERER', 'PEER']; const typeOptions = types.map(item => (