diff --git a/.circleci/README.md b/.circleci/README.md new file mode 100644 index 000000000..0c9fecb34 --- /dev/null +++ b/.circleci/README.md @@ -0,0 +1,38 @@ +# CircleCI pipeline + +The pipeline consists of three phases: + +## Feature branch commit + +This phase contains the following jobs: +### Jobs + +## PR merge into develop + +This phase contains the following jobs: +### Jobs + +## PR merge into master + +This phase contains the following jobs: +### Jobs + +## Environment variables + +| Env Var | Description | Phase(s) | +| --------------------------------- | ------------- | ---------- | +| BUILD_ENV | | | +| CODACY_PROJECT_TOKEN | | | +| DEV_PROJECT | | | +| GITHUB_TOKEN | | | +| GITHUB_USER | | | +| GIT_USER_EMAIL | | | +| GIT_USER_NAME | | | +| GOOGLE_DEV_ENDPOINTS_CREDENTIALS | | | +| GOOGLE_GCR_CREDENTIALS | | | +| GOOGLE_PROD_ENDPOINTS_CREDENTIALS | | | +| PANTEL_SECRETS_FILE | | | +| PI_DEV_CLUSTER_CREDENTIALS | | | +| PI_PROD_CLUSTER_CREDENTIALS | | | +| PROD_PROJECT | | | +| STRIPE_API_KEY | | | diff --git a/.circleci/ci-docker-images/README.md b/.circleci/ci-docker-images/README.md new file mode 100644 index 000000000..69225d986 --- /dev/null +++ b/.circleci/ci-docker-images/README.md @@ -0,0 +1,6 @@ +# CI Docker Images + +This directory contains any custom images used for running the different CI jobs. + +- [Github Hub](hub/) +- [Python with Gcloud](python-gcloud/) \ No newline at end of file diff --git a/.circleci/ci-docker-images/hub/README.md b/.circleci/ci-docker-images/hub/README.md new file mode 100644 index 000000000..651db8cfe --- /dev/null +++ b/.circleci/ci-docker-images/hub/README.md @@ -0,0 +1,8 @@ +# Github Hub docker image + +- This docker image is pushed manually to Google Container Registry +- It is pushed as: eu.gcr.io/pi-ostelco-dev/github-hub: +- It is used for creating -and operating on- PRs. +- Hub requires an environment variable GITHUB_TOKEN to authenticate when creating a PR. +- Alternative authentication ways can be found on https://hub.github.com/hub.1.html +- For instructions on pushing the image, please refer to the [GCR official documentation](https://cloud.google.com/container-registry/docs/pushing-and-pulling) \ No newline at end of file diff --git a/.circleci/ci-docker-images/hub/dockerfile b/.circleci/ci-docker-images/hub/dockerfile new file mode 100644 index 000000000..d8c0b715b --- /dev/null +++ b/.circleci/ci-docker-images/hub/dockerfile @@ -0,0 +1,10 @@ +# This docker image is pushed manually to Google Container Registry +# It is pushed as: eu.gcr.io/pi-ostelco-dev/github-hub: +# It is used for creating -and operating on- PRs. +# Hub requires an environment variable GITHUB_TOKEN to authenticate when creating a PR. +# Alternative authentication ways can be found on https://hub.github.com/hub.1.html + +FROM fedora + +RUN dnf -yq install hub + diff --git a/.circleci/ci-docker-images/python-gcloud/README.md b/.circleci/ci-docker-images/python-gcloud/README.md new file mode 100644 index 000000000..5dd83a05a --- /dev/null +++ b/.circleci/ci-docker-images/python-gcloud/README.md @@ -0,0 +1,6 @@ +# Python-gcloud docker image + +- This docker image contains Python and Gcloud +- It is used for generating proto files for the swagger API spec and updating the Google endpoints. +- It is pushed manually as: eu.gcr.io/pi-ostelco-dev/python-gcloud +- For instructions on pushing the image, please refer to the [GCR official documentation](https://cloud.google.com/container-registry/docs/pushing-and-pulling) \ No newline at end of file diff --git a/.circleci/ci-docker-images/python-gcloud/dockerfile b/.circleci/ci-docker-images/python-gcloud/dockerfile new file mode 100644 index 000000000..dda147ec2 --- /dev/null +++ b/.circleci/ci-docker-images/python-gcloud/dockerfile @@ -0,0 +1,10 @@ +# This docker image contains Python and Gcloud +# It is used for generating proto files for the swagger API spec and updating the Google endpoints. +# It is pushed manually as: eu.gcr.io/pi-ostelco-dev/python-gcloud +FROM python:3.6.6-jessie + +RUN pip install grpcio grpcio-tools && apt-get update && apt-get install lsb-release && \ + export CLOUD_SDK_REPO="cloud-sdk-$(lsb_release -c -s)" && \ + echo "deb http://packages.cloud.google.com/apt $CLOUD_SDK_REPO main" | tee -a /etc/apt/sources.list.d/google-cloud-sdk.list && \ + curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add - && \ + apt-get update -y && apt-get install google-cloud-sdk -y \ No newline at end of file diff --git a/.circleci/config.yml b/.circleci/config.yml index 37ea8129a..bb33aa710 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,47 +1,358 @@ version: 2 jobs: - build: + ### JOBS FOR on-feature-branch-commit PIPELINE + build-test-repo: + # machine is needed to run Gradle build and to run docker compose tests + machine: + enabled: true - working_directory: ~/repo + steps: + - checkout + - run: + # checking for merge conflicts and merging locally if none exist + name: merging ${CIRCLE_BRANCH} into develop locally + command: | + git config --global user.email "${GIT_USER_EMAIL}" + git config --global user.name "${GIT_USER_NAME}" + git checkout ${CIRCLE_BRANCH} + git checkout develop + git merge ${CIRCLE_BRANCH} -m "Merging ${CIRCLE_BRANCH} into develop." + # Show the java version installed. + - run: java -version + + - run: + name: Pulling Gradle cache + command: | + sudo rm -f /etc/boto.cfg # needed to fix gsutil issue in circleci https://github.com/travis-ci/travis-ci/issues/7940 + export CLOUDSDK_CORE_PROJECT=${DEV_PROJECT} + echo $GOOGLE_GCR_CREDENTIALS > ${HOME}/gcloud-service-key.json + gcloud auth activate-service-account --key-file=${HOME}/gcloud-service-key.json + gsutil cp gs://pi-ostelco-core-gradle-cache/caches.tar.gz ~/caches.tar.gz + mkdir -p ~/.gradle/caches/ + tar -xzvf ~/caches.tar.gz -C ~/.gradle/caches/ . + # Copying pantel prod secret to locations where it is needed for docker compose tests. + - run: + name: Distribute pantel-prod.json secret from env var. + command: | + scripts/distribute-pantel-secrets.sh + # run gradle build. Skipping neo4j tests as they fail + - run: + name: Build entire repo + command: ./gradlew clean build -info -s -x :neo4j-store:test + + # persisting the entire project with its generated artifacts. They are needed in the build-image job below. + # the default working directory in circleci is ~/project/ + - persist_to_workspace: + root: ~/project/ + paths: + - . + # generating selfsigned certs. Needed for docker compose tests + - run: + name: Generate self signed certs + command: | + scripts/generate-selfsigned-ssl-certs.sh ocs.dev.ostelco.org + cp certs/ocs.dev.ostelco.org/nginx.crt ocsgw/config/ocs.crt + scripts/generate-selfsigned-ssl-certs.sh metrics.dev.ostelco.org + cp certs/metrics.dev.ostelco.org/nginx.crt ocsgw/config/metrics.crt + - run: + name: Acceptance Tests + command: docker-compose up --build --abort-on-container-exit + + - run: + name: notify slack on failure + when: on_fail + command: .circleci/notify-slack.sh on-feature-branch-commit false + + code-coverage: + environment: + CODACY_DOWNLOAD_URL: https://github.com/codacy/codacy-coverage-reporter/releases/download + CODACY_VERSION: 4.0.3 + CODACY_JAR_FILE: codacy-coverage-reporter-assembly-latest.jar + CODACY_MODULE: com.codacy.CodacyCoverageReporter + docker: - - image: circleci/openjdk:8u171-jdk-browsers + - image: circleci/openjdk:8u171-jdk steps: - - setup_remote_docker - - checkout - - restore_cache: - keys: - - gradle-cache - - run: - name: download codacy jar file - command: wget -O ~/codacy-coverage-reporter-assembly-latest.jar https://github.com/codacy/codacy-coverage-reporter/releases/download/4.0.1/codacy-coverage-reporter-4.0.1-assembly.jar - - run: - name: build entire repo (prime, ocsgw,.. etc) - command: ./gradlew clean build --parallel - -# deploy-prime: -# docker: -# - image: devth/helm:v2.9.1 -# working_directory: ~/repo -# steps: -# - run: -# name: running helm install -# command: | -# echo $GOOGLE_CREDENTIALS > $HOME/pantel-tests.json -# gcloud auth activate-service-account --key-file=$HOME/pantel-tests.json -# gcloud container clusters get-credentials my-cluster --zone europe-west2-c --project pantel-tests -# kubectl create serviceaccount -n kube-system tiller -# kubectl create clusterrolebinding tiller-binding --clusterrole=cluster-admin --serviceaccount kube-system:tiller -# helm init --service-account tiller -# helm repo add pantel-tests https://pantel-tests-charts.storage.googleapis.com/ -# helm install pantel-tests/prime --name prime --set firebaseServiceAccount=$(echo $FIREBASE_SERVICE_ACCOUNT | base64) + - run: + name: Download codacy + command: | + wget -O ~/${CODACY_JAR_FILE} \ + ${CODACY_DOWNLOAD_URL}/${CODACY_VERSION}/codacy-coverage-reporter-${CODACY_VERSION}-assembly.jar + - attach_workspace: + # Must be absolute path or relative path from working_directory + at: ~/project + + # the commands below need "CODACY_PROJECT_TOKEN" to be present as (circleci) ENV variable. + - run: + name: Generate Codacy code-coverage report + command: | + scripts/generate-codacy-coverage.sh + + - run: + name: notify slack on failure + when: on_fail + command: .circleci/notify-slack.sh on-feature-branch-commit false + + ### JOBS FOR on-PR-merge-to-dev PIPELINE + build-code: + machine: + enabled: true + + steps: + - checkout + - run: + name: Pulling Gradle cache + command: | + sudo rm -f /etc/boto.cfg # needed to fix gsutil issue in circleci https://github.com/travis-ci/travis-ci/issues/7940 + export CLOUDSDK_CORE_PROJECT=${DEV_PROJECT} + echo $GOOGLE_GCR_CREDENTIALS > ${HOME}/gcloud-service-key.json + gcloud auth activate-service-account --key-file=${HOME}/gcloud-service-key.json + gsutil cp gs://pi-ostelco-core-gradle-cache/caches.tar.gz ~/caches.tar.gz + mkdir -p ~/.gradle/caches/ + tar -xzvf ~/caches.tar.gz -C ~/.gradle/caches/ . + - run: + name: Gradle Build Prime + command: ./gradlew clean prime:build -info -s -x test -x integration + # - run: + # name: Gradle Build OCSGW + # command: | + # ./gradlew clean ocsgw:build -s + - run: + name: Push Gradle cache + command: | + rm -f ~/.gradle/caches/modules-2/modules-2.lock + rm -fr ~/.gradle/caches/*/plugin-resolution/ + tar -czvf ~/caches.tar.gz -C ~/.gradle/caches . + gsutil cp ~/caches.tar.gz gs://pi-ostelco-core-gradle-cache + + - persist_to_workspace: + root: ~/project/ + paths: + - . + + - run: + name: notify slack on failure + when: on_fail + command: .circleci/notify-slack.sh on-PR-merge-to-dev false + + build-image: + docker: + - image: google/cloud-sdk:latest + + working_directory: ~/project + + steps: + - run: + name: authenticate with gcloud + command: | + # $GOOGLE_GCR_CREDENTIALS env variable contains Google credentials + # with permission to write to Google container registry + echo $GOOGLE_GCR_CREDENTIALS > ${HOME}/gcloud-service-key.json + gcloud auth activate-service-account --key-file=${HOME}/gcloud-service-key.json + - attach_workspace: + # Must be absolute path or relative path from working_directory + at: ~/project + + # starts a remote docker environment to run docker commands + - setup_remote_docker + + - run: + name: build Prime docker image and push image to GCR + command: | + gcloud auth configure-docker --quiet + TAG=${CIRCLE_SHA1:0:9} + cd prime + docker build -t eu.gcr.io/pi-ostelco-dev/prime:$TAG . + docker push eu.gcr.io/pi-ostelco-dev/prime:$TAG + + # - run: + # name: build OCSGW docker image and push image to GCR + # command: | + # gcloud auth configure-docker --quiet + # TAG=${CIRCLE_SHA1:0:9} + # cd ocsgw + # docker build -t eu.gcr.io/pi-ostelco-dev/ocsgw:$TAG . + # docker push eu.gcr.io/pi-ostelco-dev/ocsgw:$TAG + - run: + name: notify slack on failure + when: on_fail + command: .circleci/notify-slack.sh on-PR-merge-to-dev false + + update-dev-endpoints: + docker: + - image: eu.gcr.io/pi-ostelco-dev/python-gcloud + steps: + - checkout + - run: + name: update endpoints spec + command: | + export CLOUDSDK_CORE_PROJECT=${DEV_PROJECT} + echo $GOOGLE_DEV_ENDPOINTS_CREDENTIALS > ${HOME}/gcloud-service-key.json + gcloud auth activate-service-account --key-file=${HOME}/gcloud-service-key.json + + python -m grpc_tools.protoc --include_imports --include_source_info --proto_path=ocs-grpc-api/src/main/proto --descriptor_set_out=ocs_descriptor.pb ocs.proto + python -m grpc_tools.protoc --include_imports --include_source_info --proto_path=analytics-grpc-api/src/main/proto --descriptor_set_out=metrics_descriptor.pb prime_metrics.proto + gcloud endpoints services deploy ocs_descriptor.pb prime/infra/new-dev/ocs-api.yaml + gcloud endpoints services deploy metrics_descriptor.pb prime/infra/new-dev/metrics-api.yaml + gcloud endpoints services deploy prime/infra/new-dev/prime-client-api.yaml + + - run: + name: notify slack on failure + when: on_fail + command: .circleci/notify-slack.sh on-PR-merge-to-dev false + + deploy-to-dev: + + working_directory: ~/project + + docker: + - image: praqma/gcloud-kubectl-helm:v2.8.1 + environment: + PROJECT: pi-ostelco-dev + CLUSTER: pi-dev + ZONE: europe-west1-c + SERVICE_ACCOUNT: terraform-dev-cluster@pi-ostelco-dev.iam.gserviceaccount.com + steps: + - checkout + + - run: + name: deploy prime to the dev cluster + command: | + export GOOGLE_CREDENTIALS=${PI_DEV_CLUSTER_CREDENTIALS} + /authenticate.bash + helm repo add ostelco https://storage.googleapis.com/pi-ostelco-helm-charts-repo/ + helm repo update + helm upgrade prime ostelco/prime --install --namespace dev \ + -f .circleci/prime-dev-values.yaml \ + --set prime.env.STRIPE_API_KEY=${STRIPE_API_KEY} \ + --set prime.tag=${CIRCLE_SHA1:0:9} \ + --set firebaseServiceAccount=${PANTEL_SECRETS_FILE} + + - run: + name: notify slack on failure + when: on_fail + command: .circleci/notify-slack.sh on-PR-merge-to-dev false + + create-PR-to-master: + working_directory: ~/project + + docker: + - image: eu.gcr.io/pi-ostelco-dev/github-hub:2.5.0 + + steps: + - checkout + - run: + name: create PR to merge develop into master + command: | + export PRIME_TAG=${CIRCLE_SHA1:0:9} + cd .circleci + ./substitute_prime_tag.sh + git config --global user.email "${GIT_USER_EMAIL}" + git config --global user.name "${GIT_USER_NAME}" + git add prime-prod-values.yaml + git commit -m "[ci skip] updating prime image tag to the latest built image." + git push https://${GITHUB_USER}:${GITHUB_TOKEN}@github.com/ostelco/ostelco-core.git develop + hub pull-request -m "merging develop into master" -b master + + - run: + name: notify slack on failure + when: on_fail + command: .circleci/notify-slack.sh on-PR-merge-to-dev false + + ### JOBS FOR on-PR-merge-to-master PIPELINE + update-prod-endpoints: + docker: + - image: eu.gcr.io/pi-ostelco-dev/python-gcloud + steps: + - checkout + - run: + name: update endpoints spec + command: | + export CLOUDSDK_CORE_PROJECT=${PROD_PROJECT} + echo $GOOGLE_PROD_ENDPOINTS_CREDENTIALS > ${HOME}/gcloud-service-key.json + gcloud auth activate-service-account --key-file=${HOME}/gcloud-service-key.json + + python -m grpc_tools.protoc --include_imports --include_source_info --proto_path=ocs-grpc-api/src/main/proto --descriptor_set_out=ocs_descriptor.pb ocs.proto + python -m grpc_tools.protoc --include_imports --include_source_info --proto_path=analytics-grpc-api/src/main/proto --descriptor_set_out=metrics_descriptor.pb prime_metrics.proto + gcloud endpoints services deploy ocs_descriptor.pb prime/infra/new-prod/ocs-api.yaml + gcloud endpoints services deploy metrics_descriptor.pb prime/infra/new-prod/metrics-api.yaml + gcloud endpoints services deploy prime/infra/new-prod/prime-client-api.yaml + + - run: + name: notify slack on failure + when: on_fail + command: .circleci/notify-slack.sh on-PR-merge-to-master false + + deploy-to-prod: + docker: + - image: praqma/gcloud-kubectl-helm:v2.8.1 + environment: + PROJECT: pi-ostelco-prod + CLUSTER: pi-prod + ZONE: europe-west1-c + SERVICE_ACCOUNT: terraform-manage-cluster-from@pi-ostelco-prod.iam.gserviceaccount.com + + steps: + - checkout + - run: + name: deploy prime to the prod cluster + command: | + export GOOGLE_CREDENTIALS=${PI_PROD_CLUSTER_CREDENTIALS} + #export TAG=$(git rev-parse --short=9 origin/circleci-dev) # fragile, gives latest develop commit but that may not be the correct tag! + /authenticate.bash + helm repo add ostelco https://storage.googleapis.com/pi-ostelco-helm-charts-repo/ + helm repo update + helm upgrade prime ostelco/prime --version 0.3.1 --install --namespace prod \ + -f .circleci/prime-prod-values.yaml \ + --set prime.env.STRIPE_API_KEY=${STRIPE_API_KEY} \ + --set firebaseServiceAccount=${PANTEL_SECRETS_FILE} + + - run: + name: notify slack on failure + when: on_fail + command: .circleci/notify-slack.sh on-PR-merge-to-master false + workflows: version: 2 - build-and-deploy-prime: + on-feature-branch-commit: + jobs: + - build-test-repo: + filters: + branches: + only: /feature/.*/ + - code-coverage: + requires: + - build-test-repo + + on-PR-merge-to-dev: + jobs: + - build-code: + filters: + branches: + only: + - develop + - build-image: + requires: + - build-code + - update-dev-endpoints: + requires: + - build-image + - deploy-to-dev: + requires: + - update-dev-endpoints + - create-PR-to-master: + requires: + - deploy-to-dev + + deploy-to-prod: jobs: - - build: -# filters: -# branches: -# only: -# - master + - update-prod-endpoints: + filters: + branches: + only: + - master + - deploy-to-prod: + requires: + - update-prod-endpoints \ No newline at end of file diff --git a/.circleci/notify-slack.sh b/.circleci/notify-slack.sh new file mode 100755 index 000000000..0c6694dc5 --- /dev/null +++ b/.circleci/notify-slack.sh @@ -0,0 +1,40 @@ +#!/bin/bash + +# This script sends a custom message as a custom slack notification. +# It is used in the pipeline to notify on failures or successes. + +#### sanity check +if [ -z $1 ] || [ -z $2 ]; then + echo "ERROR: missing input parameters to send slack notifications. Aborting!" + exit 1 +fi +if [ -z ${SLACK_WEBHOOK} ]; then + echo "WARN: SLACK_WEBHOOK environemnt variable is not set. Slack notifications are skipped." + exit 0 +fi +#### + +#### input +app="PRIME" # app name +pipeline=$1 # pipeline name +success=$2 # true or false +#### + +if [ "$success" = true ] ; then + color="#36a64f" + button_style="primary" + status="succeeded" +else + color="#FF0000" + button_style="danger" + status="failed" +fi + +echo "sending slack notification ..." +curl -X POST -H 'Content-type: application/json' \ +--data '{"attachments": [{"fallback": "'${app}' Build result","color": "'${color}'" ,"pretext": "'${app}' '${pipeline}' pipeline:","title": "Job: ['${CIRCLE_JOB}'] for ('${app}') '${status}'. Check circleci for details.","actions": [{"type": "button","name": "check_workflow","text": "Check Workflow","url": "https://circleci.com/workflow-run/'${CIRCLE_WORKFLOW_ID}'","style": "'${button_style}'"},{"type": "button","name": "check_job","text": "Check Job","url": "https://circleci.com/gh/ostelco/ostelco-core/'${CIRCLE_BUILD_NUM}'","style": "'${button_style}'"}]}] }' ${SLACK_WEBHOOK} + +if [ $? != 0 ]; then + echo "Failed to send a slack notification!" + exit 1 +fi \ No newline at end of file diff --git a/.circleci/prime-dev-values.yaml b/.circleci/prime-dev-values.yaml new file mode 100644 index 000000000..af7ba5042 --- /dev/null +++ b/.circleci/prime-dev-values.yaml @@ -0,0 +1,127 @@ +# DEV values for prime. +# This is a YAML-formatted file. +# Declare variables to be passed into your templates. + +replicaCount: 1 +firebaseServiceAccount: "" + + +prime: + image: eu.gcr.io/pi-ostelco-dev/prime + tag: 2f47ab570 + pullPolicy: Always + env: + FIREBASE_ROOT_PATH: dev_new + NEO4J_HOST: neo4j-neo4j.neo4j.svc.cluster.local + STRIPE_API_KEY: "" + DATA_TRAFFIC_TOPIC: "data-traffic" + PURCHASE_INFO_TOPIC: "purchase-info" + ports: + - 8080 + - 8081 + - 8082 + - 8083 + resources: + limits: + cpu: 200m + memory: 350Mi + requests: + cpu: 100m + memory: 200Mi + livenessProbe: {} + # path: / + # port: 8081 + readinessProbe: {} + # path: / + # port: 8081 + annotations: + prometheus.io/scrape: 'true' + prometheus.io/path: '/prometheus-metrics' + prometheus.io/port: '8081' + +esp: + image: gcr.io/endpoints-release/endpoints-runtime + tag: 1 + pullPolicy: IfNotPresent + +ocsEsp: + enabled: true + env: {} + endpointAddress: ocs.new.dev.ostelco.org + ports: + - 9000 + - 8443 + + +apiEsp: + enabled: true + env: {} + endpointAddress: api.new.dev.ostelco.org + ports: + - 9002 + - 443 + +metricsEsp: + enabled: true + env: {} + endpointAddress: metrics.new.dev.ostelco.org + ports: + - 9004 + - 9443 + + +services: + prime: + name: prime-service + type: LoadBalancer + port: 443 + targetPort: 8443 + portName: grpc + # loadBalancerIP: x.y.z.n + api: + name: prime-api + type: LoadBalancer + port: 443 + targetPort: 443 + portName: https + # loadBalancerIP: x.y.z.n + metrics: + name: prime-metrics + type: LoadBalancer + port: 443 + targetPort: 9443 + portName: grpc + # loadBalancerIP: x.y.z.n + +ingress: + enabled: false + annotations: {} + # kubernetes.io/ingress.class: nginx + path: / + hosts: + - prime.local + tls: [] + # - secretName: chart-example-tls + # hosts: + # - chart-example.local + +certs: + enabled: true + dnsProvider: dev-clouddns + issuer: letsencrypt-production # or letsencrypt-staging + apiDns: + - api.new.dev.ostelco.org + ocsDns: + - ocs.new.dev.ostelco.org + metricsDns: + - metrics.new.dev.ostelco.org + +disruptionBudget: + enabled: false + minAvailable: 1 + +nodeSelector: {} + +tolerations: [] + +affinity: {} \ No newline at end of file diff --git a/.circleci/prime-prod-values-template.yaml b/.circleci/prime-prod-values-template.yaml new file mode 100644 index 000000000..1664a9e9c --- /dev/null +++ b/.circleci/prime-prod-values-template.yaml @@ -0,0 +1,126 @@ +# PROD values for prime. +# This is a YAML-formatted file. +# Declare variables to be passed into your templates. + +replicaCount: 1 +firebaseServiceAccount: "" + +prime: + image: eu.gcr.io/pi-ostelco-dev/prime + tag: ${PRIME_TAG} + pullPolicy: Always + env: + FIREBASE_ROOT_PATH: dev_new + NEO4J_HOST: neo4j-neo4j.neo4j.svc.cluster.local + STRIPE_API_KEY: "" + DATA_TRAFFIC_TOPIC: "data-traffic" + PURCHASE_INFO_TOPIC: "purchase-info" + ports: + - 8080 + - 8081 + - 8082 + - 8083 + resources: + limits: + cpu: 200m + memory: 350Mi + requests: + cpu: 100m + memory: 200Mi + livenessProbe: {} + # path: / + # port: 8081 + readinessProbe: {} + # path: / + # port: 8081 + annotations: + prometheus.io/scrape: 'true' + prometheus.io/path: '/prometheus-metrics' + prometheus.io/port: '8081' + +esp: + image: gcr.io/endpoints-release/endpoints-runtime + tag: 1 + pullPolicy: IfNotPresent + +ocsEsp: + enabled: true + env: {} + endpointAddress: prod-ocs.new.dev.ostelco.org + ports: + - 9000 + - 8443 + + +apiEsp: + enabled: true + env: {} + endpointAddress: prod-api.new.dev.ostelco.org + ports: + - 9002 + - 443 + +metricsEsp: + enabled: true + env: {} + endpointAddress: prod-metrics.new.dev.ostelco.org + ports: + - 9004 + - 9443 + + +services: + prime: + name: prime-service + type: LoadBalancer + port: 443 + targetPort: 8443 + portName: grpc + # loadBalancerIP: x.y.z.n + api: + name: prime-api + type: LoadBalancer + port: 443 + targetPort: 443 + portName: https + # loadBalancerIP: x.y.z.n + metrics: + name: prime-metrics + type: LoadBalancer + port: 443 + targetPort: 9443 + portName: grpc + # loadBalancerIP: x.y.z.n + +ingress: + enabled: false + annotations: {} + # kubernetes.io/ingress.class: nginx + path: / + hosts: + - prime.local + tls: [] + # - secretName: chart-example-tls + # hosts: + # - chart-example.local + +certs: + enabled: true + dnsProvider: dev-clouddns + issuer: letsencrypt-production # or letsencrypt-staging + apiDns: + - prod-api.new.dev.ostelco.org + ocsDns: + - prod-ocs.new.dev.ostelco.org + metricsDns: + - prod-metrics.new.dev.ostelco.org + +disruptionBudget: + enabled: false + minAvailable: 1 + +nodeSelector: {} + +tolerations: [] + +affinity: {} \ No newline at end of file diff --git a/.circleci/prime-prod-values.yaml b/.circleci/prime-prod-values.yaml new file mode 100644 index 000000000..12a906c71 --- /dev/null +++ b/.circleci/prime-prod-values.yaml @@ -0,0 +1,127 @@ +# PROD values for prime. +# This is a YAML-formatted file. +# Declare variables to be passed into your templates. + +replicaCount: 1 +firebaseServiceAccount: "" + +prime: + image: eu.gcr.io/pi-ostelco-dev/prime + tag: 5990ea1d6 + pullPolicy: Always + env: + FIREBASE_ROOT_PATH: dev_new + NEO4J_HOST: neo4j-neo4j.neo4j.svc.cluster.local + STRIPE_API_KEY: "" + DATA_TRAFFIC_TOPIC: "data-traffic" + PURCHASE_INFO_TOPIC: "purchase-info" + + ports: + - 8080 + - 8081 + - 8082 + - 8083 + resources: + limits: + cpu: 200m + memory: 350Mi + requests: + cpu: 100m + memory: 200Mi + livenessProbe: {} + # path: / + # port: 8081 + readinessProbe: {} + # path: / + # port: 8081 + annotations: + prometheus.io/scrape: 'true' + prometheus.io/path: '/prometheus-metrics' + prometheus.io/port: '8081' + +esp: + image: gcr.io/endpoints-release/endpoints-runtime + tag: 1 + pullPolicy: IfNotPresent + +ocsEsp: + enabled: true + env: {} + endpointAddress: prod-ocs.new.dev.ostelco.org + ports: + - 9000 + - 8443 + + +apiEsp: + enabled: true + env: {} + endpointAddress: prod-api.new.dev.ostelco.org + ports: + - 9002 + - 443 + +metricsEsp: + enabled: true + env: {} + endpointAddress: prod-metrics.new.dev.ostelco.org + ports: + - 9004 + - 9443 + + +services: + prime: + name: prime-service + type: LoadBalancer + port: 443 + targetPort: 8443 + portName: grpc + # loadBalancerIP: x.y.z.n + api: + name: prime-api + type: LoadBalancer + port: 443 + targetPort: 443 + portName: https + # loadBalancerIP: x.y.z.n + metrics: + name: prime-metrics + type: LoadBalancer + port: 443 + targetPort: 9443 + portName: grpc + # loadBalancerIP: x.y.z.n + +ingress: + enabled: false + annotations: {} + # kubernetes.io/ingress.class: nginx + path: / + hosts: + - prime.local + tls: [] + # - secretName: chart-example-tls + # hosts: + # - chart-example.local + +certs: + enabled: true + dnsProvider: dev-clouddns + issuer: letsencrypt-production # or letsencrypt-staging + apiDns: + - prod-api.new.dev.ostelco.org + ocsDns: + - prod-ocs.new.dev.ostelco.org + metricsDns: + - prod-metrics.new.dev.ostelco.org + +disruptionBudget: + enabled: false + minAvailable: 1 + +nodeSelector: {} + +tolerations: [] + +affinity: {} \ No newline at end of file diff --git a/.circleci/substitute_prime_tag.sh b/.circleci/substitute_prime_tag.sh new file mode 100755 index 000000000..b211b6733 --- /dev/null +++ b/.circleci/substitute_prime_tag.sh @@ -0,0 +1,14 @@ +#!/bin/bash + +# This script is used to inject an image tag for PRIME taken from the PRIME_TAG environment variable +# into the prime-prod-values.yaml (the values file used for production deployment of the PRIME helm chart). +# The script is used in the pipeline after new PRIME image is created and before making a PR from develop into master. + +# This script should NOT be used to inject secrets into the values file as this file is version controlled in git. + +rm -f prime-prod-values.yaml temp.yml +( echo "cat <prime-prod-values.yaml"; + cat prime-prod-values-template.yaml; + echo "EOF"; +) >temp.yml +. temp.yml \ No newline at end of file diff --git a/.gitignore b/.gitignore index 538ab953a..1902c8e88 100644 --- a/.gitignore +++ b/.gitignore @@ -27,6 +27,7 @@ hs_err_pid* .gradle build out +bin .idea *.iml diff --git a/.travis.yml b/.travis.yml index 1734281fd..c8ca266a8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -11,7 +11,7 @@ cache: install: echo "skip 'gradle assemble' step" # TODO vihang: fix neo4j-store:test -script: ./gradlew clean build -info --stacktrace -x neo4j-store:test +script: ./gradlew clean build -info --stacktrace -x neo4j-store:test -x integration before_cache: - rm -f $HOME/.gradle/caches/modules-2/modules-2.lock @@ -21,6 +21,6 @@ after_success: # The curl command is not always working. Kept original command in comment incase codacy updates #- sudo apt-get install jq #- wget -O ~/codacy-coverage-reporter-assembly-latest.jar $(curl https://api.github.com/repos/codacy/codacy-coverage-reporter/releases/latest | jq -r .assets[0].browser_download_url) - - wget -O ~/codacy-coverage-reporter-assembly-latest.jar https://github.com/codacy/codacy-coverage-reporter/releases/download/4.0.2/codacy-coverage-reporter-4.0.2-assembly.jar + - wget -O ~/codacy-coverage-reporter-assembly-latest.jar https://github.com/codacy/codacy-coverage-reporter/releases/download/4.0.3/codacy-coverage-reporter-4.0.3-assembly.jar - for REPORT_TARGET in $(find . -name jacocoTestReport.xml | sort); do java -cp ~/codacy-coverage-reporter-assembly-latest.jar com.codacy.CodacyCoverageReporter report -l Java -r ${REPORT_TARGET} --partial; done - java -cp ~/codacy-coverage-reporter-assembly-latest.jar com.codacy.CodacyCoverageReporter final diff --git a/README.md b/README.md index ae502309c..8f5acb09d 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ -[![Kotlin version badge](https://img.shields.io/badge/kotlin-1.2.61-blue.svg)](http://kotlinlang.org/) +[![Kotlin version badge](https://img.shields.io/badge/kotlin-1.2.70-blue.svg)](http://kotlinlang.org/) [![Prime version](https://img.shields.io/github/tag/ostelco/ostelco-core.svg)](https://github.com/ostelco/ostelco-core/tags) [![GitHub license](https://img.shields.io/github/license/ostelco/ostelco-core.svg)](https://github.com/ostelco/ostelco-core/blob/master/LICENSE) @@ -8,6 +8,8 @@ [![Codacy Badge](https://api.codacy.com/project/badge/Grade/d15007ecfc2942f7901673177e147d09)](https://www.codacy.com/app/vihang.patil/ostelco-core?utm_source=github.com&utm_medium=referral&utm_content=ostelco/ostelco-core&utm_campaign=Badge_Grade) [![Codacy Badge](https://api.codacy.com/project/badge/Coverage/d15007ecfc2942f7901673177e147d09)](https://www.codacy.com/app/vihang.patil/ostelco-core?utm_source=github.com&utm_medium=referral&utm_content=ostelco/ostelco-core&utm_campaign=Badge_Coverage) +[![CircleCI](https://circleci.com/gh/ostelco/ostelco-core/tree/develop.svg?style=svg&circle-token=f6bf824c3910dcf1551bf3b2776715dabfc7dc41)](https://circleci.com/gh/ostelco/ostelco-core/tree/develop) + # ostelco-core Mono Repository for core protocols and services around a OCS/BSS for packet data. For each service please see the individual Readme.md files. diff --git a/acceptance-tests/Dockerfile b/acceptance-tests/Dockerfile index f897cb74a..35e8792f4 100644 --- a/acceptance-tests/Dockerfile +++ b/acceptance-tests/Dockerfile @@ -1,4 +1,4 @@ -FROM openjdk:8u171 +FROM azul/zulu-openjdk:8u181-8.31.0.1 MAINTAINER CSI "csi@telenordigital.com" diff --git a/acceptance-tests/build.gradle b/acceptance-tests/build.gradle index 312e6a21b..476060dc8 100644 --- a/acceptance-tests/build.gradle +++ b/acceptance-tests/build.gradle @@ -1,5 +1,5 @@ plugins { - id "org.jetbrains.kotlin.jvm" version "1.2.61" + id "org.jetbrains.kotlin.jvm" version "1.2.70" id "application" id "com.github.johnrengelman.shadow" version "2.0.4" } @@ -14,8 +14,6 @@ dependencies { implementation project(":prime-client-api") implementation project(':diameter-test') - implementation 'com.google.firebase:firebase-admin:6.4.0' - implementation "com.stripe:stripe-java:$stripeVersion" implementation 'io.jsonwebtoken:jjwt:0.9.1' // tests fail when updated to 2.27 @@ -23,6 +21,8 @@ dependencies { implementation "org.jetbrains.kotlin:kotlin-test:$kotlinVersion" implementation "org.jetbrains.kotlin:kotlin-test-junit:$kotlinVersion" + + implementation "io.dropwizard:dropwizard-testing:$dropwizardVersion" } shadowJar { diff --git a/acceptance-tests/script/wait.sh b/acceptance-tests/script/wait.sh old mode 100644 new mode 100755 diff --git a/acceptance-tests/src/main/kotlin/org/ostelco/at/admin/ImporterTest.kt b/acceptance-tests/src/main/kotlin/org/ostelco/at/admin/ImporterTest.kt new file mode 100644 index 000000000..ee1144b71 --- /dev/null +++ b/acceptance-tests/src/main/kotlin/org/ostelco/at/admin/ImporterTest.kt @@ -0,0 +1,21 @@ +package org.ostelco.at.admin + +import io.dropwizard.testing.FixtureHelpers.fixture +import org.junit.Test +import org.ostelco.at.jersey.post + + +class GetSubscriptions { + + @Test + fun `jersey test - POST import of sample-offer-products-segments`() { + + val theBody = fixture("sample-offer-products-segments.yaml") + + post { + path = "/imports" + body = theBody + headerParams = mapOf("Content-Type" to listOf("text/vnd.yaml")) + } + } +} diff --git a/acceptance-tests/src/main/kotlin/org/ostelco/at/common/Firebase.kt b/acceptance-tests/src/main/kotlin/org/ostelco/at/common/Firebase.kt deleted file mode 100644 index 35052c378..000000000 --- a/acceptance-tests/src/main/kotlin/org/ostelco/at/common/Firebase.kt +++ /dev/null @@ -1,41 +0,0 @@ -package org.ostelco.at.common - -import com.google.auth.oauth2.GoogleCredentials -import com.google.firebase.FirebaseApp -import com.google.firebase.FirebaseOptions -import com.google.firebase.database.FirebaseDatabase -import java.io.FileInputStream -import java.nio.file.Files -import java.nio.file.Paths - -object Firebase { - - private fun setupFirebaseInstance(): FirebaseDatabase { - - try { - FirebaseApp.getInstance() - } catch (e: Exception) { - val databaseName = "pantel-2decb" - val configFile = System.getenv("GOOGLE_APPLICATION_CREDENTIALS") ?: "config/pantel-prod.json" - - val credentials: GoogleCredentials = if (Files.exists(Paths.get(configFile))) { - FileInputStream(configFile).use { serviceAccount -> GoogleCredentials.fromStream(serviceAccount) } - } else { - throw Exception() - } - - val options = FirebaseOptions.Builder() - .setCredentials(credentials) - .setDatabaseUrl("https://$databaseName.firebaseio.com/") - .build() - - FirebaseApp.initializeApp(options) - } - - return FirebaseDatabase.getInstance() - } - - fun deleteAllPaymentCustomers() { - setupFirebaseInstance().getReference("test/paymentId").removeValueAsync().get() - } -} \ No newline at end of file diff --git a/acceptance-tests/src/main/kotlin/org/ostelco/at/common/LoggerDelegate.kt b/acceptance-tests/src/main/kotlin/org/ostelco/at/common/LoggerDelegate.kt index e73aae6f1..e17c914aa 100644 --- a/acceptance-tests/src/main/kotlin/org/ostelco/at/common/LoggerDelegate.kt +++ b/acceptance-tests/src/main/kotlin/org/ostelco/at/common/LoggerDelegate.kt @@ -4,10 +4,10 @@ import org.slf4j.Logger import org.slf4j.LoggerFactory /** - * This is a function to which the member variable of type {@link org.slf4j.Logger} is delegated to be instantiated. - * The syntax to do so is `private val logger by logger()`. - * This function will then return the logger for calling class. + * This is a function to which the member variable of type [org.slf4j.Logger] is delegated to be instantiated. + * The syntax to do so is private val logger by getLogger(). + * This function will then return the [org.slf4j.Logger] for calling class. */ -fun R.logger(): Lazy = lazy { +fun R.getLogger(): Lazy = lazy { LoggerFactory.getLogger(this.javaClass) } diff --git a/acceptance-tests/src/main/kotlin/org/ostelco/at/common/StripePayment.kt b/acceptance-tests/src/main/kotlin/org/ostelco/at/common/StripePayment.kt index 55351aa4d..5e933ac13 100644 --- a/acceptance-tests/src/main/kotlin/org/ostelco/at/common/StripePayment.kt +++ b/acceptance-tests/src/main/kotlin/org/ostelco/at/common/StripePayment.kt @@ -28,8 +28,20 @@ object StripePayment { // https://stripe.com/docs/api/java#create_source Stripe.apiKey = System.getenv("STRIPE_API_KEY") - // TODO martin: set valid map values - val sourceMap = mapOf() + val sourceMap = mapOf( + "type" to "card", + "card" to mapOf( + "number" to "4242424242424242", + "exp_month" to 8, + "exp_year" to 2019, + "cvc" to "314"), + "owner" to mapOf( + "address" to mapOf( + "city" to "Oslo", + "country" to "Norway" + ), + "email" to "me@somewhere.com") + ) val source = Source.create(sourceMap) return source.id } @@ -43,6 +55,15 @@ object StripePayment { return token.card.id } + fun getCardIdForSourceId(sourceId: String) : String { + + // https://stripe.com/docs/api/java#create_source + Stripe.apiKey = System.getenv("STRIPE_API_KEY") + + val source = Source.retrieve(sourceId) + return source.id + } + /** * Obtains 'default source' directly from Stripe. Use in tests to * verify that the correspondng 'setDefaultSource' API works as diff --git a/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/HttpClientUtil.kt b/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/HttpClientUtil.kt index 85717799e..4426da7e1 100644 --- a/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/HttpClientUtil.kt +++ b/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/HttpClientUtil.kt @@ -7,6 +7,7 @@ import org.ostelco.at.common.url import javax.ws.rs.client.Entity import javax.ws.rs.core.GenericType import javax.ws.rs.core.MediaType +import javax.ws.rs.core.MultivaluedHashMap import kotlin.test.assertEquals /** @@ -14,6 +15,7 @@ import kotlin.test.assertEquals */ class HttpRequest { lateinit var path: String + var headerParams: Map> = emptyMap() var queryParams: Map = emptyMap() var body: Any? = null var subscriberId = "foo@bar.com" @@ -24,7 +26,7 @@ class HttpRequest { */ inline fun get(execute: HttpRequest.() -> Unit): T { val request = HttpRequest().apply(execute) - val response = HttpClient.send(request.path, request.queryParams, request.subscriberId).get() + val response = HttpClient.send(request.path, request.queryParams, request.headerParams, request.subscriberId).get() assertEquals(200, response.status) { response.readEntity(String::class.java) } return response.readEntity(object : GenericType() {}) } @@ -34,18 +36,19 @@ inline fun get(execute: HttpRequest.() -> Unit): T { */ inline fun post(execute: HttpRequest.() -> Unit): T { val request = HttpRequest().apply(execute) - val response = HttpClient.send(request.path, request.queryParams, request.subscriberId) + val response = HttpClient.send(request.path, request.queryParams, request.headerParams, request.subscriberId) .post(Entity.entity(request.body ?: "", MediaType.APPLICATION_JSON_TYPE)) assertEquals(201, response.status) { response.readEntity(String::class.java) } return response.readEntity(object : GenericType() {}) } + /** * DSL function for PUT operation */ inline fun put(execute: HttpRequest.() -> Unit): T { val request = HttpRequest().apply(execute) - val response = HttpClient.send(request.path, request.queryParams, request.subscriberId) + val response = HttpClient.send(request.path, request.queryParams, request.headerParams, request.subscriberId) .put(Entity.entity(request.body ?: "", MediaType.APPLICATION_JSON_TYPE)) assertEquals(200, response.status) { response.readEntity(String::class.java) } return response.readEntity(object : GenericType() {}) @@ -67,13 +70,19 @@ object HttpClient { private val jerseyClient = JerseyClientBuilder.createClient() - private fun setup(path: String, queryParams: Map, url: String, subscriberId: String): JerseyInvocation.Builder { + private fun setup( + path: String, + queryParams: Map, + headerParams: Map>, + url: String, subscriberId: String): JerseyInvocation.Builder { + var target = jerseyClient.target(url).path(path) queryParams.forEach { target = target.queryParam(it.key, it.value) } return target.request(MediaType.APPLICATION_JSON_TYPE) + .headers(MultivaluedHashMap().apply { this.putAll(headerParams) }) .header("Authorization", "Bearer ${generateAccessToken(subscriberId)}") } - fun send(path: String, queryParams: Map, subscriberId: String): JerseyInvocation.Builder = - setup(path, queryParams, url, subscriberId) + fun send(path: String, queryParams: Map, headerParams: Map>, subscriberId: String): JerseyInvocation.Builder = + setup(path, queryParams, headerParams, url, subscriberId) } diff --git a/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt b/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt index 12b6a676c..8ada341c8 100644 --- a/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt +++ b/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt @@ -1,15 +1,15 @@ package org.ostelco.at.jersey import org.junit.Test -import org.ostelco.at.common.Firebase import org.ostelco.at.common.StripePayment import org.ostelco.at.common.createProfile import org.ostelco.at.common.createSubscription import org.ostelco.at.common.expectedProducts -import org.ostelco.at.common.logger +import org.ostelco.at.common.getLogger import org.ostelco.at.common.randomInt import org.ostelco.prime.client.model.ActivePseudonyms import org.ostelco.prime.client.model.ApplicationToken +import org.ostelco.prime.client.model.Bundle import org.ostelco.prime.client.model.Consent import org.ostelco.prime.client.model.PaymentSource import org.ostelco.prime.client.model.PaymentSourceList @@ -105,7 +105,7 @@ class ProfileTest { } @Test - fun `jersey test - GET application token`() { + fun `jersey test - POST application token`() { val email = "token-${randomInt()}@test.com" createProfile("Test Token User", email) @@ -153,7 +153,7 @@ class GetSubscriptions { class GetSubscriptionStatusTest { - private val logger by logger() + private val logger by getLogger() @Test fun `jersey test - GET subscription status`() { @@ -186,7 +186,7 @@ class GetSubscriptionStatusTest { class GetPseudonymsTest { - private val logger by logger() + private val logger by getLogger() @Test fun `jersey test - GET active pseudonyms`() { @@ -232,7 +232,6 @@ class SourceTest { fun `jersey test - POST source create`() { StripePayment.deleteAllCustomers() - Firebase.deleteAllPaymentCustomers() val email = "purchase-${randomInt()}@test.com" createProfile(name = "Test Payment Source", email = email) @@ -259,56 +258,74 @@ class SourceTest { } @Test - fun `okhttp test - GET list sources`() { + fun `jersey test - GET list sources`() { StripePayment.deleteAllCustomers() - Firebase.deleteAllPaymentCustomers() val email = "purchase-${randomInt()}@test.com" createProfile(name = "Test Payment Source", email = email) - val tokenId = StripePayment.createPaymentTokenId() - val cardId = StripePayment.getCardIdForTokenId(tokenId) + Thread.sleep(200) - // Ties source with user profile both local and with Stripe - post { + val createdIds = listOf(createTokenWithStripe(email), + createSourceWithStripe(email), + createTokenWithStripe(email), + createSourceWithStripe(email)) + + val sources : PaymentSourceList = get { path = "/paymentSources" subscriberId = email - queryParams = mapOf("sourceId" to tokenId) } - Thread.sleep(200) + val ids = createdIds.map { getIdFromStripe(it) } - val newTokenId = StripePayment.createPaymentTokenId() - val newCardId = StripePayment.getCardIdForTokenId(newTokenId) + assert(sources.isNotEmpty()) { "Expected at least one payment source for profile $email" } + assert(sources.map{ it.id }.containsAll(ids)) + { "Expected to find all of $ids in list of sources for profile $email" } + + sources.forEach { + assert(it.id.isNotEmpty()) { "Expected 'id' to be set in source account details for profile $email" } + assert(arrayOf("card", "source").contains(it.type)) { + "Unexpected source account type ${it.type} for profile $email" + } + } + } + + private fun getIdFromStripe(tokenId : String) : String { + if (tokenId.startsWith("src_")) { + return StripePayment.getCardIdForSourceId(tokenId) + } + return StripePayment.getCardIdForTokenId(tokenId) + } + + private fun createTokenWithStripe(email: String) : String { + val tokenId = StripePayment.createPaymentTokenId() post { path = "/paymentSources" subscriberId = email - queryParams = mapOf("sourceId" to newTokenId) + queryParams = mapOf("sourceId" to tokenId) } - val sources : PaymentSourceList = get { + return tokenId + } + + private fun createSourceWithStripe(email: String) : String { + val sourceId = StripePayment.createPaymentSourceId() + + post { path = "/paymentSources" subscriberId = email + queryParams = mapOf("sourceId" to sourceId) } - assert(sources.isNotEmpty()) { "Expected at least one payment source for profile $email" } - assert(sources.map{ it.id }.containsAll(listOf(cardId, newCardId))) - { "Expected to find both $cardId and $newCardId in list of sources for profile $email" } - - sources.forEach { - assert(it.details.id.isNotEmpty()) { "Expected 'id' to be set in source account details for profile $email" } - assertEquals("card", it.details.accountType, - "Unexpected source account type ${it.details.accountType} for profile $email") - } + return sourceId } @Test fun `jersey test - PUT source set default`() { StripePayment.deleteAllCustomers() - Firebase.deleteAllPaymentCustomers() val email = "purchase-${randomInt()}@test.com" createProfile(name = "Test Payment Source", email = email) @@ -359,16 +376,14 @@ class PurchaseTest { fun `jersey test - POST products purchase`() { StripePayment.deleteAllCustomers() - Firebase.deleteAllPaymentCustomers() val email = "purchase-${randomInt()}@test.com" createProfile(name = "Test Purchase User", email = email) - val subscriptionStatusBefore: SubscriptionStatus = get { - path = "/subscription/status" + val balanceBefore = get> { + path = "/bundles" subscriberId = email - } - val balanceBefore = subscriptionStatusBefore.remaining + }.first().balance val productSku = "1GB_249NOK" val sourceId = StripePayment.createPaymentTokenId() @@ -381,11 +396,10 @@ class PurchaseTest { Thread.sleep(100) // wait for 100 ms for balance to be updated in db - val subscriptionStatusAfter: SubscriptionStatus = get { - path = "/subscription/status" + val balanceAfter = get> { + path = "/bundles" subscriberId = email - } - val balanceAfter = subscriptionStatusAfter.remaining + }.first().balance assertEquals(1_000_000_000, balanceAfter - balanceBefore, "Balance did not increased by 1GB after Purchase") @@ -404,7 +418,57 @@ class PurchaseTest { fun `jersey test - POST products purchase using default source`() { StripePayment.deleteAllCustomers() - Firebase.deleteAllPaymentCustomers() + + val email = "purchase-${randomInt()}@test.com" + createProfile(name = "Test Purchase User with Default Payment Source", email = email) + + val sourceId = StripePayment.createPaymentTokenId() + + val paymentSource: PaymentSource = post { + path = "/paymentSources" + subscriberId = email + queryParams = mapOf("sourceId" to sourceId) + } + + assertNotNull(paymentSource.id, message = "Failed to create payment source") + + val balanceBefore = get> { + path = "/bundles" + subscriberId = email + }.first().balance + + val productSku = "1GB_249NOK" + + post { + path = "/products/$productSku/purchase" + subscriberId = email + } + + Thread.sleep(100) // wait for 100 ms for balance to be updated in db + + val balanceAfter = get> { + path = "/bundles" + subscriberId = email + }.first().balance + + assertEquals(1_000_000_000, balanceAfter - balanceBefore, "Balance did not increased by 1GB after Purchase") + + val purchaseRecords: PurchaseRecordList = get { + path = "/purchases" + subscriberId = email + } + + purchaseRecords.sortBy { it.timestamp } + + assert(Instant.now().toEpochMilli() - purchaseRecords.last().timestamp < 10_000) { "Missing Purchase Record" } + assertEquals(expectedProducts().first(), purchaseRecords.last().product, "Incorrect 'Product' in purchase record") + } + + + @Test + fun `jersey test - POST products purchase add source then pay with it`() { + + StripePayment.deleteAllCustomers() val email = "purchase-${randomInt()}@test.com" createProfile(name = "Test Purchase User with Default Payment Source", email = email) @@ -430,6 +494,7 @@ class PurchaseTest { post { path = "/products/$productSku/purchase" subscriberId = email + queryParams = mapOf("sourceId" to paymentSource.id) } Thread.sleep(100) // wait for 100 ms for balance to be updated in db @@ -453,17 +518,18 @@ class PurchaseTest { assertEquals(expectedProducts().first(), purchaseRecords.last().product, "Incorrect 'Product' in purchase record") } + + @Test fun `jersey test - POST products purchase without payment`() { val email = "purchase-legacy-${randomInt()}@test.com" createProfile(name = "Test Legacy Purchase User", email = email) - val subscriptionStatusBefore: SubscriptionStatus = get { - path = "/subscription/status" + val balanceBefore = get> { + path = "/bundles" subscriberId = email - } - val balanceBefore = subscriptionStatusBefore.remaining + }.first().balance val productSku = "1GB_249NOK" @@ -474,11 +540,10 @@ class PurchaseTest { Thread.sleep(100) // wait for 100 ms for balance to be updated in db - val subscriptionStatusAfter: SubscriptionStatus = get { - path = "/subscription/status" + val balanceAfter = get> { + path = "/bundles" subscriberId = email - } - val balanceAfter = subscriptionStatusAfter.remaining + }.first().balance assertEquals(1_000_000_000, balanceAfter - balanceBefore, "Balance did not increased by 1GB after Purchase") diff --git a/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt b/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt index 7a9e99ed3..715353b6c 100644 --- a/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt +++ b/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt @@ -1,14 +1,14 @@ package org.ostelco.at.okhttp import org.junit.Test -import org.ostelco.at.common.Firebase import org.ostelco.at.common.StripePayment import org.ostelco.at.common.createProfile import org.ostelco.at.common.createSubscription import org.ostelco.at.common.expectedProducts -import org.ostelco.at.common.logger +import org.ostelco.at.common.getLogger import org.ostelco.at.common.randomInt import org.ostelco.at.okhttp.ClientFactory.clientForSubject +import org.ostelco.prime.client.api.DefaultApi import org.ostelco.prime.client.model.ApplicationToken import org.ostelco.prime.client.model.Consent import org.ostelco.prime.client.model.PaymentSource @@ -128,7 +128,7 @@ class GetSubscriptions { class GetSubscriptionStatusTest { - private val logger by logger() + private val logger by getLogger() @Test fun `okhttp test - GET subscription status`() { @@ -160,7 +160,7 @@ class GetSubscriptionStatusTest { class GetPseudonymsTest { - private val logger by logger() + private val logger by getLogger() @Test fun `okhttp test - GET active pseudonyms`() { @@ -204,7 +204,6 @@ class SourceTest { fun `okhttp test - POST source create`() { StripePayment.deleteAllCustomers() - Firebase.deleteAllPaymentCustomers() val email = "purchase-${randomInt()}@test.com" createProfile(name = "Test Payment Source", email = email) @@ -230,44 +229,62 @@ class SourceTest { fun `okhttp test - GET list sources`() { StripePayment.deleteAllCustomers() - Firebase.deleteAllPaymentCustomers() val email = "purchase-${randomInt()}@test.com" createProfile(name = "Test Payment Source", email = email) val client = clientForSubject(subject = email) - val tokenId = StripePayment.createPaymentTokenId() - val cardId = StripePayment.getCardIdForTokenId(tokenId) - - // Ties source with user profile both local and with Stripe - client.createSource(tokenId) - Thread.sleep(200) - val newTokenId = StripePayment.createPaymentTokenId() - val newCardId = StripePayment.getCardIdForTokenId(newTokenId) - - client.createSource(newTokenId) + val createdIds = listOf(createTokenWithStripe(client), + createSourceWithStripe(client), + createTokenWithStripe(client), + createSourceWithStripe(client)) val sources = client.listSources() + val ids = createdIds.map { getIdFromStripe(it) } + assert(sources.isNotEmpty()) { "Expected at least one payment source for profile $email" } - assert(sources.map{ it.id }.containsAll(listOf(cardId, newCardId))) - { "Expected to find both $cardId and $newCardId in list of sources for profile $email" } + assert(sources.map{ it.id }.containsAll(ids)) + { "Expected to find all of $ids in list of sources for profile $email" } sources.forEach { - assert(it.details.id.isNotEmpty()) { "Expected 'id' to be set in source account details for profile $email" } - assertEquals("card", it.details.accountType, - "Unexpected source account type ${it.details.accountType} for profile $email") + assert(it.id.isNotEmpty()) { "Expected 'id' to be set in source account details for profile $email" } + assert(arrayOf("card", "source").contains(it.type)) { + "Unexpected source account type ${it.type} for profile $email" + } + } + } + + private fun getIdFromStripe(tokenId : String) : String { + if (tokenId.startsWith("src_")) { + return StripePayment.getCardIdForSourceId(tokenId) } + return StripePayment.getCardIdForTokenId(tokenId) + } + + private fun createTokenWithStripe(client : DefaultApi) : String { + val tokenId = StripePayment.createPaymentTokenId() + + client.createSource(tokenId) + + return tokenId + } + + private fun createSourceWithStripe(client : DefaultApi) : String { + val sourceId = StripePayment.createPaymentSourceId() + + client.createSource(sourceId) + + return sourceId } @Test fun `okhttp test - PUT source set default`() { StripePayment.deleteAllCustomers() - Firebase.deleteAllPaymentCustomers() val email = "purchase-${randomInt()}@test.com" createProfile(name = "Test Payment Source", email = email) @@ -308,14 +325,13 @@ class PurchaseTest { fun `okhttp test - POST products purchase`() { StripePayment.deleteAllCustomers() - Firebase.deleteAllPaymentCustomers() val email = "purchase-${randomInt()}@test.com" createProfile(name = "Test Purchase User", email = email) val client = clientForSubject(subject = email) - val balanceBefore = client.subscriptionStatus.remaining + val balanceBefore = client.bundles.first().balance val sourceId = StripePayment.createPaymentTokenId() @@ -323,7 +339,7 @@ class PurchaseTest { Thread.sleep(200) // wait for 200 ms for balance to be updated in db - val balanceAfter = client.subscriptionStatus.remaining + val balanceAfter = client.bundles.first().balance assertEquals(1_000_000_000, balanceAfter - balanceBefore, "Balance did not increased by 1GB after Purchase") @@ -339,7 +355,6 @@ class PurchaseTest { fun `okhttp test - POST products purchase using default source`() { StripePayment.deleteAllCustomers() - Firebase.deleteAllPaymentCustomers() val email = "purchase-${randomInt()}@test.com" createProfile(name = "Test Purchase User with Default Payment Source", email = email) @@ -352,7 +367,7 @@ class PurchaseTest { assertNotNull(paymentSource.id, message = "Failed to create payment source") - val balanceBefore = client.subscriptionStatus.remaining + val balanceBefore = client.bundles.first().balance val productSku = "1GB_249NOK" @@ -360,6 +375,42 @@ class PurchaseTest { Thread.sleep(200) // wait for 200 ms for balance to be updated in db + val balanceAfter = client.bundles.first().balance + + assertEquals(1_000_000_000, balanceAfter - balanceBefore, "Balance did not increased by 1GB after Purchase") + + val purchaseRecords = client.purchaseHistory + + purchaseRecords.sortBy { it.timestamp } + + assert(Instant.now().toEpochMilli() - purchaseRecords.last().timestamp < 10_000) { "Missing Purchase Record" } + assertEquals(expectedProducts().first(), purchaseRecords.last().product, "Incorrect 'Product' in purchase record") + } + + @Test + fun `okhttp test - POST products purchase add source then pay with it`() { + + StripePayment.deleteAllCustomers() + + val email = "purchase-${randomInt()}@test.com" + createProfile(name = "Test Purchase User with Default Payment Source", email = email) + + val sourceId = StripePayment.createPaymentTokenId() + + val client = clientForSubject(subject = email) + + val paymentSource: PaymentSource = client.createSource(sourceId) + + assertNotNull(paymentSource.id, message = "Failed to create payment source") + + val balanceBefore = client.subscriptionStatus.remaining + + val productSku = "1GB_249NOK" + + client.purchaseProduct(productSku, paymentSource.id, null) + + Thread.sleep(200) // wait for 200 ms for balance to be updated in db + val balanceAfter = client.subscriptionStatus.remaining assertEquals(1_000_000_000, balanceAfter - balanceBefore, "Balance did not increased by 1GB after Purchase") @@ -380,13 +431,13 @@ class PurchaseTest { val client = clientForSubject(subject = email) - val balanceBefore = client.subscriptionStatus.remaining + val balanceBefore = client.bundles.first().balance client.buyProductDeprecated("1GB_249NOK") Thread.sleep(200) // wait for 200 ms for balance to be updated in db - val balanceAfter = client.subscriptionStatus.remaining + val balanceAfter = client.bundles.first().balance assertEquals(1_000_000_000, balanceAfter - balanceBefore, "Balance did not increased by 1GB after Purchase") diff --git a/acceptance-tests/src/main/kotlin/org/ostelco/at/pgw/OcsTest.kt b/acceptance-tests/src/main/kotlin/org/ostelco/at/pgw/OcsTest.kt index 5e21982ac..44af16e46 100644 --- a/acceptance-tests/src/main/kotlin/org/ostelco/at/pgw/OcsTest.kt +++ b/acceptance-tests/src/main/kotlin/org/ostelco/at/pgw/OcsTest.kt @@ -8,13 +8,13 @@ import org.junit.BeforeClass import org.junit.Test import org.ostelco.at.common.createProfile import org.ostelco.at.common.createSubscription -import org.ostelco.at.common.logger +import org.ostelco.at.common.getLogger import org.ostelco.at.common.randomInt import org.ostelco.at.jersey.get import org.ostelco.diameter.model.RequestType import org.ostelco.diameter.test.TestClient import org.ostelco.diameter.test.TestHelper -import org.ostelco.prime.client.model.SubscriptionStatus +import org.ostelco.prime.client.model.Bundle import java.lang.Thread.sleep import kotlin.test.assertEquals import kotlin.test.fail @@ -27,7 +27,7 @@ import kotlin.test.fail */ class OcsTest { - private val logger by logger() + private val logger by getLogger() private var testClient: TestClient? = null @@ -102,11 +102,10 @@ class OcsTest { private fun getBalance(): Long { sleep(200) // wait for 200 ms for balance to be updated in db - val subscriptionStatus: SubscriptionStatus = get { - path = "/subscription/status" + return get> { + path = "/bundles" subscriberId = EMAIL - } - return subscriptionStatus.remaining + }.first().balance } @Test diff --git a/acceptance-tests/src/main/resources/sample-offer-products-segments.yaml b/acceptance-tests/src/main/resources/sample-offer-products-segments.yaml new file mode 100644 index 000000000..b1a6e2809 --- /dev/null +++ b/acceptance-tests/src/main/resources/sample-offer-products-segments.yaml @@ -0,0 +1,35 @@ +producingAgent: + name: Simple agent + version: 1.0 + +offer: + id: test-offer + + # list of existing products + # listing products to be created in this yaml is OPTIONAL + +# products: +# - 1GB_249NOK + + # list of existing segments + # listing segments to be created in this yaml is OPTIONAL + +# segments: +# - test-segment + +# These products will be created and linked to offer - 'test-offer' +products: + - sku: 1GB_249NOK + price: + amount: 249 + currency: NOK + properties: + noOfBytes: 1_000_000_000 + presentation: + isDefault: true + offerLabel: Default Offer + priceLabel: 249 NOK + +# These segments will be created and linked to offer - 'test-offer' +segments: + - id: test-segment diff --git a/admin-api/README.md b/admin-api/README.md index cdc42d917..6ad15a478 100644 --- a/admin-api/README.md +++ b/admin-api/README.md @@ -1,4 +1,4 @@ -# Admin API +# Module Admin API * Admin API is for back-office admin management. * Each of the _Resources_ are listed below shall have Create-Read-Update-Delete operations. @@ -47,4 +47,14 @@ validity (visibility): start_date: end_date: - \ No newline at end of file + + +# Importer API + +Documentation: TBD + +To test towards the docker-compose instance, do: + + curl -X POST -H "Content-Type: text/vnd.yaml" --data-binary @admin-api/src/test/resources/sample-offer-products-segments.yaml http://localhost:9090/importer + +(_very_ important to use the --data-binary option or else linebreaks will be broken, and that will mess up the input) \ No newline at end of file diff --git a/admin-api/build.gradle b/admin-api/build.gradle index 5cf370b1c..75e6f1408 100644 --- a/admin-api/build.gradle +++ b/admin-api/build.gradle @@ -1,9 +1,13 @@ plugins { - id "org.jetbrains.kotlin.jvm" version "1.2.61" + id "org.jetbrains.kotlin.jvm" version "1.2.70" id "java-library" } dependencies { - implementation project(":prime-api") + implementation project(":prime-modules") + implementation "com.fasterxml.jackson.module:jackson-module-kotlin:$jacksonVersion" + testImplementation "io.dropwizard:dropwizard-testing:$dropwizardVersion" -} \ No newline at end of file +} + +apply from: '../jacoco.gradle' \ No newline at end of file diff --git a/admin-api/src/main/kotlin/org/ostelco/prime/admin/api/AdminModule.kt b/admin-api/src/main/kotlin/org/ostelco/prime/admin/api/AdminModule.kt index d96ea3e2b..ef913f837 100644 --- a/admin-api/src/main/kotlin/org/ostelco/prime/admin/api/AdminModule.kt +++ b/admin-api/src/main/kotlin/org/ostelco/prime/admin/api/AdminModule.kt @@ -2,10 +2,11 @@ package org.ostelco.prime.admin.api import com.fasterxml.jackson.annotation.JsonTypeName import io.dropwizard.setup.Environment +import org.ostelco.prime.admin.importer.ImportAdapter import org.ostelco.prime.module.PrimeModule @JsonTypeName("admin") -class AdminModule: PrimeModule { +class AdminModule : PrimeModule { override fun init(env: Environment) { val jerseySever = env.jersey() @@ -14,5 +15,7 @@ class AdminModule: PrimeModule { jerseySever.register(SegmentResource()) jerseySever.register(ProductResource()) jerseySever.register(ProductClassResource()) + jerseySever.register(YamlMessageBodyReader::class.java) + jerseySever.register(ImporterResource(ImportAdapter())) } } diff --git a/admin-api/src/main/kotlin/org/ostelco/prime/admin/api/ImporterResource.kt b/admin-api/src/main/kotlin/org/ostelco/prime/admin/api/ImporterResource.kt new file mode 100644 index 000000000..3f5c81c0b --- /dev/null +++ b/admin-api/src/main/kotlin/org/ostelco/prime/admin/api/ImporterResource.kt @@ -0,0 +1,87 @@ +package org.ostelco.prime.admin.api + +import com.fasterxml.jackson.core.JsonProcessingException +import com.fasterxml.jackson.databind.ObjectMapper +import com.fasterxml.jackson.dataformat.yaml.YAMLFactory +import com.fasterxml.jackson.module.kotlin.registerKotlinModule +import org.ostelco.prime.admin.importer.ImportDeclaration +import org.ostelco.prime.admin.importer.ImportProcessor +import org.ostelco.prime.getLogger +import java.io.InputStream +import java.lang.reflect.Type +import javax.ws.rs.Consumes +import javax.ws.rs.POST +import javax.ws.rs.Path +import javax.ws.rs.WebApplicationException +import javax.ws.rs.core.MediaType +import javax.ws.rs.core.MultivaluedMap +import javax.ws.rs.core.Response +import javax.ws.rs.core.Response.Status.BAD_REQUEST +import javax.ws.rs.ext.MessageBodyReader + + +/** + * Resource used to handle the importer related REST calls. + */ +@Path("/importer") +class ImporterResource(val processor: ImportProcessor) { + + private val logger by getLogger() + + @POST + @Consumes("text/vnd.yaml") + fun postStatus(declaration: ImportDeclaration): Response { + logger.info("POST status for importer") + + return processor.import(declaration).fold( + { apiError -> Response.status(apiError.status).entity(asJson(apiError)) }, + { Response.status(Response.Status.CREATED) } + ).build() + } +} + +/// XXX This is a very generic message body reader, should +// be available anywhere we read yaml files. +@Consumes("text/vnd.yaml") +class YamlMessageBodyReader : MessageBodyReader { + + private val logger by getLogger() + + override fun isReadable( + type: Class<*>, + genericType: Type, + annotations: Array, + mediaType: MediaType): Boolean = true + + override fun readFrom( + type: Class, + genericType: Type, + annotations: Array, mediaType: MediaType, + httpHeaders: MultivaluedMap, + inputStream: InputStream): Any { + + try { + val mapper = ObjectMapper(YAMLFactory()).registerKotlinModule() + return mapper.readValue(inputStream, type) + } catch (e: Exception) { + logger.error("Failed to parse yaml: ${e.message}") + throw WebApplicationException(e.message, BAD_REQUEST.statusCode) + } + } +} + +/** + * Common 'helper' functions for resources. + * + */ +val objectMapper = ObjectMapper() + +fun R.asJson(`object`: Any): String { + try { + return objectMapper.writeValueAsString(`object`) + } catch (e: JsonProcessingException) { + val logger by getLogger() + logger.error("Error in json response {}", e) + } + return "" +} \ No newline at end of file diff --git a/admin-api/src/main/kotlin/org/ostelco/prime/admin/importer/ImportProcessor.kt b/admin-api/src/main/kotlin/org/ostelco/prime/admin/importer/ImportProcessor.kt new file mode 100644 index 000000000..3f1c653ee --- /dev/null +++ b/admin-api/src/main/kotlin/org/ostelco/prime/admin/importer/ImportProcessor.kt @@ -0,0 +1,26 @@ +package org.ostelco.prime.admin.importer + +import arrow.core.Either +import org.ostelco.prime.apierror.ApiError +import org.ostelco.prime.apierror.ApiErrorCode +import org.ostelco.prime.apierror.BadRequestError +import org.ostelco.prime.module.getResource +import org.ostelco.prime.storage.AdminDataSource + +interface ImportProcessor { + fun import(importDeclaration: ImportDeclaration): Either +} + +class ImportAdapter : ImportProcessor { + + private val adminDataStore by lazy { getResource() } + + override fun import(importDeclaration: ImportDeclaration): Either { + + return adminDataStore.atomicImport( + offer = importDeclaration.offer, + products = importDeclaration.products, + segments = importDeclaration.segments) + .mapLeft { BadRequestError(it.message, ApiErrorCode.FAILED_TO_IMPORT_OFFER) } + } +} \ No newline at end of file diff --git a/admin-api/src/main/kotlin/org/ostelco/prime/admin/importer/Model.kt b/admin-api/src/main/kotlin/org/ostelco/prime/admin/importer/Model.kt new file mode 100644 index 000000000..e0be4c00f --- /dev/null +++ b/admin-api/src/main/kotlin/org/ostelco/prime/admin/importer/Model.kt @@ -0,0 +1,68 @@ +package org.ostelco.prime.admin.importer + +import org.ostelco.prime.model.Offer +import org.ostelco.prime.model.Product +import org.ostelco.prime.model.Segment + +/** + * The input classes being parsed (as yaml). + */ + +data class ProducingAgent(val name: String, val version: String) + +class ImportDeclaration( + val producingAgent: ProducingAgent, + val offer: Offer, + val segments: Collection = emptyList(), + val products: Collection = emptyList()) + +/* +class TimeInterval(var from: String?= null, var to: String? = null) + +class Presentation( + var badgeLabel: String? = null, + var description: String? = null, + var shortDescription: String? = null, + var label: String? = null, + var name: String? = null, + var priceLabel: String? = null, + var hidden: Boolean? = null, + var imageUrl: String? = null +) + +class OfferFinancials( + var repurchability: String? = null, + var currencyLabel: String? = null, + var price: Int? = null, + var taxRate: BigDecimal? = null +) + +class SubscriberIdCollection( + var decryptionKey: String? = null, + var members : MutableList? = null +) + + +class Segment( + var type: String? = null, + var description: String? = null, + var members: SubscriberIdCollection? = null +) + +// XXX Should perhaps, apart from SKU, be a +// a keyword/value map, to be interpreted by +// something, somewhere that knows something about +// technical product parameters? +class Product( + var sku: String? = null, + var noOfBytes: BigInteger? = null +) + + +class Offer( + var visibility: TimeInterval? = null, + var presentation: Presentation? = null, + var financial: OfferFinancials? = null, + var product: Product? = null +) +*/ \ No newline at end of file diff --git a/admin-api/src/test/kotlin/org/ostelco/importer/ImporterResourceTest.kt b/admin-api/src/test/kotlin/org/ostelco/importer/ImporterResourceTest.kt new file mode 100644 index 000000000..0b6473fe2 --- /dev/null +++ b/admin-api/src/test/kotlin/org/ostelco/importer/ImporterResourceTest.kt @@ -0,0 +1,133 @@ +package org.ostelco.importer + +import arrow.core.Either +import io.dropwizard.testing.FixtureHelpers.fixture +import io.dropwizard.testing.junit.ResourceTestRule +import org.junit.Assert.assertEquals +import org.junit.ClassRule +import org.junit.Test +import org.ostelco.prime.admin.api.ImporterResource +import org.ostelco.prime.admin.api.YamlMessageBodyReader +import org.ostelco.prime.admin.importer.ImportDeclaration +import org.ostelco.prime.admin.importer.ImportProcessor +import org.ostelco.prime.apierror.ApiError +import org.ostelco.prime.model.Price +import javax.ws.rs.client.Entity +import javax.ws.rs.core.Response.Status + + +/** + * Class for unit testing ImporterResource. + */ +class ImporterResourceTest { + + companion object { + + lateinit var importedResource: ImportDeclaration + + private val processor: ImportProcessor = object : ImportProcessor { + override fun import(importDeclaration: ImportDeclaration): Either { + importedResource = importDeclaration + return Either.right(Unit) + } + } + + @ClassRule + @JvmField + val resources: ResourceTestRule? = ResourceTestRule.builder() + .addResource(ImporterResource(processor)) + .addProvider(YamlMessageBodyReader::class.java) + .build() + } + + @Test + fun `test creating offer with products and segments`() { + + val text: String = fixture("sample-offer-products-segments.yaml") + + val response = resources + ?.target("/importer") + ?.request("text/vnd.yaml") + ?.post(Entity.entity(text, "text/vnd.yaml")) + + assertEquals(response?.readEntity(String::class.java), Status.CREATED.statusCode, response?.status) + assertEquals("Simple agent", importedResource.producingAgent.name) + assertEquals("1.0", importedResource.producingAgent.version) + + // check offer + assertEquals("test-offer", importedResource.offer.id) + assertEquals(emptyList(), importedResource.offer.products) + assertEquals(emptyList(), importedResource.offer.segments) + + // check product + assertEquals(1, importedResource.products.size) + val product = importedResource.products.first() + assertEquals("1GB_249NOK", product.sku) + assertEquals(Price(249, "NOK"), product.price) + assertEquals(mapOf("noOfBytes" to "1_000_000_000"), product.properties) + assertEquals( + mapOf("isDefault" to "true", + "offerLabel" to "Default Offer", + "priceLabel" to "249 NOK"), + product.presentation) + + // check segment + assertEquals(1, importedResource.segments.size) + val segment = importedResource.segments.first() + assertEquals("test-segment", segment.id) + assertEquals(emptyList(), segment.subscribers) + } + + @Test + fun `test creating offer using existing products and segments`() { + + val text: String = fixture("sample-offer-only.yaml") + + val response = resources + ?.target("/importer") + ?.request("text/vnd.yaml") + ?.post(Entity.entity(text, "text/vnd.yaml")) + + assertEquals(response?.readEntity(String::class.java), Status.CREATED.statusCode, response?.status) + assertEquals("Simple agent", importedResource.producingAgent.name) + assertEquals("1.0", importedResource.producingAgent.version) + + // check offer + assertEquals("test-offer", importedResource.offer.id) + assertEquals(listOf("1GB_249NOK"), importedResource.offer.products) + assertEquals(listOf("test-segment"), importedResource.offer.segments) + + // check product + assertEquals(0, importedResource.products.size) + + // check segment + assertEquals(0, importedResource.segments.size) + } + + /** + * Testing reading a yaml file. + */ + /* + @Test + fun `test creating offer with products and segments`() { + + val text: String = + this::class.java.classLoader.getResource("sample-offer-legacy.yaml").readText(Charsets.UTF_8) + + val response = resources + ?.target("/importer") + ?.request("text/vnd.yaml") + ?.post(Entity.entity(text, "text/vnd.yaml")) + + assertEquals(Status.OK.statusCode, response?.status) + assertEquals("Simple agent", importedResource?.producingAgent?.name) + assertEquals("1.0", importedResource?.producingAgent?.version) + assertEquals("2018-02-22T12:41:49.871Z", importedResource?.offer?.visibility?.from) + assertEquals("2018-02-22T12:41:49.871Z", importedResource?.offer?.visibility?.to) + + // Missing tests for presentation, financials, product within offer, and everything within segment. + + System.out.println("members = " + importedResource?.segment?.members?.members) + } + */ +} \ No newline at end of file diff --git a/admin-api/src/test/resources/sample-offer-legacy.yaml b/admin-api/src/test/resources/sample-offer-legacy.yaml new file mode 100644 index 000000000..2a6f38ebe --- /dev/null +++ b/admin-api/src/test/resources/sample-offer-legacy.yaml @@ -0,0 +1,77 @@ +# +# This is a sample YAML format to be used by +# agents that produce offers. The general idea +# is that an offer has a set of parameters, +# and also a set of selected subscribers that will +# get it. +# +# YAML was chosen since it's more human readable than +# e.g. json or protobuffers, while still being +# easy to produce by an agent, and relatively compact, +# in particular when gzipped. +# + +producingAgent: + name: Simple agent + version: 1.0 + +# # All of the parameters below are just copied from the firebasr +# # realtime database we used in the demo, converted to +# # camel case. All the fields should be documented +# # in this document, and we should think through if this is +# # the best set of parameters we went. + +offer: + # XXX This offer does not have an ID, but if we were just + # updating the list of members of the segment, it would + # make sense to have an OfferID, or something that refers + # to a previously created offer. That id should be created + # by the importer, and used by the agent when updating + # membership. If any other parameters are going to be + # changed, it is necessary to produce a new offer. + # It may make sense to put the ID in the url when + # when we update (PUT method) changes, but then the + # ID in the yaml will be redundant. Figure out how to + # do this one way or another and just do it. + visibility: + from: "2018-02-22T12:41:49.871Z" + to: "2018-02-22T12:41:49.871Z" + presentation: + badgeLabel: "mbop" + description: "Best offer you will get today" + shortDescription: "Best offer!" + label: "3 GB" + name: "3 GB" + priceLabel: "49 NOK" + hidden: false + imageUrl: https://www.ft-associates.com/wp-content/uploads/2015/08/Best-Offer.jpg + financial: + repurchability: 1 + currencyLabel: "NOK" + price: 4900 + taxRate: 10.0 + product: + sku: 2 + # A possibly very long list of product parameters that are all + # dependent on the SKU's requirement. Details ignored here, + # that may pop up later. Deal with them then. + noOfBytes: 3000000000 + +# # We put the segment last, since it may have a long list of +# # members in it. We want that list to be last, since it contains +# # little information that humans are interested in, and we want +# # humans to start reading the file at the top. + +segment: + type: agent-specific-segment + description: "This is how this segment should be described" + members: + # The decryption key is what the de-anonymizer will use to + # make proper identifiers out of the members listed below. + # The special purpose key "none" indicatest that the member list + # is in clear text. + decryptionKey: none + members: + - 4790300157 + - 4790300144 + - 4333333333 diff --git a/admin-api/src/test/resources/sample-offer-only.yaml b/admin-api/src/test/resources/sample-offer-only.yaml new file mode 100644 index 000000000..5e11702f8 --- /dev/null +++ b/admin-api/src/test/resources/sample-offer-only.yaml @@ -0,0 +1,12 @@ +producingAgent: + name: Simple agent + version: 1.0 + +offer: + id: test-offer + # use existing product + products: + - 1GB_249NOK + # use existing segment + segments: + - test-segment diff --git a/admin-api/src/test/resources/sample-offer-products-segments.yaml b/admin-api/src/test/resources/sample-offer-products-segments.yaml new file mode 100644 index 000000000..b1a6e2809 --- /dev/null +++ b/admin-api/src/test/resources/sample-offer-products-segments.yaml @@ -0,0 +1,35 @@ +producingAgent: + name: Simple agent + version: 1.0 + +offer: + id: test-offer + + # list of existing products + # listing products to be created in this yaml is OPTIONAL + +# products: +# - 1GB_249NOK + + # list of existing segments + # listing segments to be created in this yaml is OPTIONAL + +# segments: +# - test-segment + +# These products will be created and linked to offer - 'test-offer' +products: + - sku: 1GB_249NOK + price: + amount: 249 + currency: NOK + properties: + noOfBytes: 1_000_000_000 + presentation: + isDefault: true + offerLabel: Default Offer + priceLabel: 249 NOK + +# These segments will be created and linked to offer - 'test-offer' +segments: + - id: test-segment diff --git a/analytics-grpc-api/README.md b/analytics-grpc-api/README.md index ab92604dc..6663d5b8f 100644 --- a/analytics-grpc-api/README.md +++ b/analytics-grpc-api/README.md @@ -1 +1 @@ -# Analytics API +# Module Analytics API diff --git a/analytics-grpc-api/build.gradle b/analytics-grpc-api/build.gradle index 308b1b22f..1a37a301e 100644 --- a/analytics-grpc-api/build.gradle +++ b/analytics-grpc-api/build.gradle @@ -4,10 +4,6 @@ plugins { id "idea" } -// Keeping it version 1.14.0 to be consistent with grpc via PubSub client lib -// Keeping it version 1.14.0 to be consistent with netty via Firebase lib -ext.grpcVersion = "1.14.0" - dependencies { api "io.grpc:grpc-netty-shaded:$grpcVersion" api "io.grpc:grpc-protobuf:$grpcVersion" diff --git a/analytics-grpc-api/src/main/proto/analytics.proto b/analytics-grpc-api/src/main/proto/analytics.proto index f2d68aa83..ab057ce8b 100644 --- a/analytics-grpc-api/src/main/proto/analytics.proto +++ b/analytics-grpc-api/src/main/proto/analytics.proto @@ -14,10 +14,14 @@ message DataTrafficInfo { uint64 bucketBytes = 2; uint64 bundleBytes = 3; google.protobuf.Timestamp timestamp = 4; + string apn = 5; + string mccMnc = 6; } message AggregatedDataTrafficInfo { string msisdn = 1; uint64 dataBytes = 2; google.protobuf.Timestamp timestamp = 3; + string apn = 4; + string mccMnc = 5; } \ No newline at end of file diff --git a/analytics-module/build.gradle b/analytics-module/build.gradle index 74bea1607..fea4ea378 100644 --- a/analytics-module/build.gradle +++ b/analytics-module/build.gradle @@ -1,17 +1,17 @@ plugins { - id "org.jetbrains.kotlin.jvm" version "1.2.61" + id "org.jetbrains.kotlin.jvm" version "1.2.70" id "java-library" } dependencies { - implementation project(":prime-api") + implementation project(":prime-modules") implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlinVersion" implementation "io.dropwizard:dropwizard-core:$dropwizardVersion" implementation "com.google.cloud:google-cloud-pubsub:$googleCloudVersion" implementation 'com.google.code.gson:gson:2.8.5' - //compile group: 'com.google.api', name: 'gax-grpc', version: '0.14.0' - testCompile group: 'com.google.api', name: 'gax-grpc', version: '1.30.0' + + testImplementation 'com.google.api:gax-grpc:1.32.0' testImplementation "io.dropwizard:dropwizard-testing:$dropwizardVersion" testImplementation "org.mockito:mockito-core:$mockitoVersion" diff --git a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsGrpcServer.kt b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsGrpcServer.kt index 8a2515f06..10c30752d 100644 --- a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsGrpcServer.kt +++ b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsGrpcServer.kt @@ -4,7 +4,7 @@ import io.dropwizard.lifecycle.Managed import io.grpc.BindableService import io.grpc.Server import io.grpc.ServerBuilder -import org.ostelco.prime.logger +import org.ostelco.prime.getLogger import java.io.IOException /** @@ -15,7 +15,7 @@ import java.io.IOException */ class AnalyticsGrpcServer(private val port: Int, service: BindableService) : Managed { - private val logger by logger() + private val logger by getLogger() // may add Transport Security with Certificates if needed. // may add executor for control over number of threads diff --git a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsGrpcService.kt b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsGrpcService.kt index 9cf9b7535..40de21abb 100644 --- a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsGrpcService.kt +++ b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsGrpcService.kt @@ -3,7 +3,7 @@ package org.ostelco.prime.analytics import io.grpc.stub.StreamObserver import org.ostelco.prime.analytics.PrimeMetric.ACTIVE_SESSIONS import org.ostelco.prime.analytics.metrics.CustomMetricsRegistry -import org.ostelco.prime.logger +import org.ostelco.prime.getLogger import org.ostelco.prime.metrics.api.OcsgwAnalyticsReply import org.ostelco.prime.metrics.api.OcsgwAnalyticsReport import org.ostelco.prime.metrics.api.OcsgwAnalyticsServiceGrpc @@ -24,7 +24,7 @@ import java.util.* class AnalyticsGrpcService : OcsgwAnalyticsServiceGrpc.OcsgwAnalyticsServiceImplBase() { - private val logger by logger() + private val logger by getLogger() /** * Handles the OcsgwAnalyticsEvent message. @@ -34,11 +34,6 @@ class AnalyticsGrpcService : OcsgwAnalyticsServiceGrpc.OcsgwAnalyticsServiceImpl return StreamObserverForStreamWithId(streamId) } - /** - * Return an unique ID based on Java's UUID generator that uniquely - * identifies a stream of values. - * @return A new unique identifier. - */ private fun newUniqueStreamId(): String { return UUID.randomUUID().toString() } diff --git a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsServiceImpl.kt b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsServiceImpl.kt index 7dafff765..453b77c5f 100644 --- a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsServiceImpl.kt +++ b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsServiceImpl.kt @@ -3,16 +3,16 @@ package org.ostelco.prime.analytics import org.ostelco.prime.analytics.metrics.CustomMetricsRegistry import org.ostelco.prime.analytics.publishers.DataConsumptionInfoPublisher import org.ostelco.prime.analytics.publishers.PurchaseInfoPublisher -import org.ostelco.prime.logger +import org.ostelco.prime.getLogger import org.ostelco.prime.model.PurchaseRecord class AnalyticsServiceImpl : AnalyticsService { - private val logger by logger() + private val logger by getLogger() - override fun reportTrafficInfo(msisdn: String, usedBytes: Long, bundleBytes: Long) { - logger.info("reportTrafficInfo : msisdn {} usedBytes {} bundleBytes {}", msisdn, usedBytes, bundleBytes) - DataConsumptionInfoPublisher.publish(msisdn, usedBytes, bundleBytes) + override fun reportTrafficInfo(msisdn: String, usedBytes: Long, bundleBytes: Long, apn: String?, mccMnc: String?) { + logger.info("reportTrafficInfo : msisdn {} usedBytes {} bundleBytes {} apn {} mccMnc {}", msisdn, usedBytes, bundleBytes, apn, mccMnc) + DataConsumptionInfoPublisher.publish(msisdn, usedBytes, bundleBytes, apn, mccMnc) } override fun reportMetric(primeMetric: PrimeMetric, value: Long) { diff --git a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/metrics/CustomMetricsRegistry.kt b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/metrics/CustomMetricsRegistry.kt index 0a1ae391f..abe683e10 100644 --- a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/metrics/CustomMetricsRegistry.kt +++ b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/metrics/CustomMetricsRegistry.kt @@ -58,16 +58,12 @@ object CustomMetricsRegistry { } } - /** - * Register counter with value from counterMap - */ + // Register counter with value from counterMap private fun registerCounter(primeMetric: PrimeMetric) { registry.register(primeMetric.metricName, counterMap[primeMetric]) } - /** - * Register gauge with value from gaugeValueMap as its source - */ + // Register gauge with value from gaugeValueMap as its source private fun registerGauge(primeMetric: PrimeMetric) { registry.register(primeMetric.metricName, Gauge { gaugeValueMap[primeMetric] }) } diff --git a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DataConsumptionInfoPublisher.kt b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DataConsumptionInfoPublisher.kt index 0ea3850af..954f764b9 100644 --- a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DataConsumptionInfoPublisher.kt +++ b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DataConsumptionInfoPublisher.kt @@ -7,7 +7,7 @@ import com.google.protobuf.util.Timestamps import com.google.pubsub.v1.PubsubMessage import org.ostelco.analytics.api.DataTrafficInfo import org.ostelco.prime.analytics.ConfigRegistry -import org.ostelco.prime.logger +import org.ostelco.prime.getLogger import org.ostelco.prime.module.getResource import org.ostelco.prime.pseudonymizer.PseudonymizerService import java.time.Instant @@ -18,11 +18,11 @@ import java.time.Instant object DataConsumptionInfoPublisher : PubSubPublisher by DelegatePubSubPublisher(topicId = ConfigRegistry.config.dataTrafficTopicId) { - private val logger by logger() + private val logger by getLogger() private val pseudonymizerService by lazy { getResource() } - fun publish(msisdn: String, usedBucketBytes: Long, bundleBytes: Long) { + fun publish(msisdn: String, usedBucketBytes: Long, bundleBytes: Long, apn: String?, mccMnc: String?) { if (usedBucketBytes == 0L) { return @@ -36,6 +36,8 @@ object DataConsumptionInfoPublisher : .setBucketBytes(usedBucketBytes) .setBundleBytes(bundleBytes) .setTimestamp(Timestamps.fromMillis(now)) + .setApn(apn) + .setMccMnc(mccMnc) .build() .toByteString() diff --git a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DelegatePubSubPublisher.kt b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DelegatePubSubPublisher.kt index 7db538c90..39ce4592c 100644 --- a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DelegatePubSubPublisher.kt +++ b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DelegatePubSubPublisher.kt @@ -30,10 +30,9 @@ class DelegatePubSubPublisher( val channel = ManagedChannelBuilder.forTarget(strSocketAddress).usePlaintext().build() // Create a publisher instance with default settings bound to the topic val channelProvider = FixedTransportChannelProvider.create(GrpcTransportChannel.create(channel)) - val credentialsProvider = NoCredentialsProvider() Publisher.newBuilder(topicName) .setChannelProvider(channelProvider) - .setCredentialsProvider(credentialsProvider) + .setCredentialsProvider(NoCredentialsProvider()) .build(); } else { Publisher.newBuilder(topicName).build() diff --git a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PurchaseInfoPublisher.kt b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PurchaseInfoPublisher.kt index ac8abe10a..8913ab360 100644 --- a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PurchaseInfoPublisher.kt +++ b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PurchaseInfoPublisher.kt @@ -12,7 +12,7 @@ import com.google.gson.reflect.TypeToken import com.google.protobuf.ByteString import com.google.pubsub.v1.PubsubMessage import org.ostelco.prime.analytics.ConfigRegistry -import org.ostelco.prime.logger +import org.ostelco.prime.getLogger import org.ostelco.prime.model.PurchaseRecord import org.ostelco.prime.model.PurchaseRecordInfo import org.ostelco.prime.module.getResource @@ -26,7 +26,7 @@ import java.net.URLEncoder object PurchaseInfoPublisher : PubSubPublisher by DelegatePubSubPublisher(topicId = ConfigRegistry.config.purchaseInfoTopicId) { - private val logger by logger() + private val logger by getLogger() private val pseudonymizerService by lazy { getResource() } @@ -36,7 +36,9 @@ object PurchaseInfoPublisher : val builder = GsonBuilder() // Type for this conversion is explicitly set to java.util.Map // This is needed because of kotlin's own Map interface + @Suppress("PLATFORM_CLASS_MAPPED_TO_KOTLIN") val mapType = object : TypeToken>() {}.type + @Suppress("PLATFORM_CLASS_MAPPED_TO_KOTLIN") val serializer = JsonSerializer> { src, _, _ -> val array = JsonArray() src.forEach { k, v -> diff --git a/app-notifier/build.gradle b/app-notifier/build.gradle index 4f8a2ff67..94b971821 100644 --- a/app-notifier/build.gradle +++ b/app-notifier/build.gradle @@ -1,12 +1,11 @@ plugins { - id "org.jetbrains.kotlin.jvm" version "1.2.61" + id "org.jetbrains.kotlin.jvm" version "1.2.70" id "java-library" } dependencies { - implementation project(":prime-api") - // Keep it to 6.4.0 to match netty via ocs-api - implementation 'com.google.firebase:firebase-admin:6.4.0' + implementation project(":prime-modules") + implementation project(":firebase-extensions") testImplementation "org.jetbrains.kotlin:kotlin-test:$kotlinVersion" testImplementation "org.jetbrains.kotlin:kotlin-test-junit:$kotlinVersion" diff --git a/app-notifier/src/main/kotlin/org/ostelco/prime/appnotifier/FirebaseModule.kt b/app-notifier/src/main/kotlin/org/ostelco/prime/appnotifier/FirebaseModule.kt index 4ddb234c5..f4ba95b01 100644 --- a/app-notifier/src/main/kotlin/org/ostelco/prime/appnotifier/FirebaseModule.kt +++ b/app-notifier/src/main/kotlin/org/ostelco/prime/appnotifier/FirebaseModule.kt @@ -2,38 +2,25 @@ package org.ostelco.prime.appnotifier import com.fasterxml.jackson.annotation.JsonProperty import com.fasterxml.jackson.annotation.JsonTypeName -import com.google.auth.oauth2.GoogleCredentials import com.google.firebase.FirebaseApp import com.google.firebase.FirebaseOptions -import org.hibernate.validator.constraints.NotEmpty +import org.ostelco.common.firebasex.usingCredentialsFile import org.ostelco.prime.module.PrimeModule -import java.io.FileInputStream import java.io.IOException -import java.nio.file.Files -import java.nio.file.Paths @JsonTypeName("firebase-app-notifier") class FirebaseModule : PrimeModule { @JsonProperty("config") fun setConfig(config: FirebaseConfig) { - setupFirebaseApp(config.databaseName, config.configFile) + setupFirebaseApp(config.configFile) } - private fun setupFirebaseApp( - databaseName: String, - configFile: String) { + private fun setupFirebaseApp(configFile: String) { try { - val credentials: GoogleCredentials = if (Files.exists(Paths.get(configFile))) { - FileInputStream(configFile).use { serviceAccount -> GoogleCredentials.fromStream(serviceAccount) } - } else { - GoogleCredentials.getApplicationDefault() - } - val options = FirebaseOptions.Builder() - .setCredentials(credentials) - .setDatabaseUrl("https://$databaseName.firebaseio.com/") + .usingCredentialsFile(configFile) .build() try { FirebaseApp.getInstance("fcm") @@ -50,13 +37,4 @@ class FirebaseModule : PrimeModule { } } -class FirebaseConfig { - - @NotEmpty - @JsonProperty("databaseName") - lateinit var databaseName: String - - @NotEmpty - @JsonProperty("configFile") - lateinit var configFile: String -} \ No newline at end of file +data class FirebaseConfig(val configFile: String) \ No newline at end of file diff --git a/auth-server/Dockerfile b/auth-server/Dockerfile index 8e0964cfa..50a1964b9 100644 --- a/auth-server/Dockerfile +++ b/auth-server/Dockerfile @@ -1,4 +1,4 @@ -FROM openjdk:8u171 +FROM azul/zulu-openjdk:8u181-8.31.0.1 MAINTAINER CSI "csi@telenordigital.com" diff --git a/auth-server/README.md b/auth-server/README.md index 9057959c0..35812cefb 100644 --- a/auth-server/README.md +++ b/auth-server/README.md @@ -1,8 +1,10 @@ -# Auth Server - +# Module Auth Server +```http request Path: /auth/token Port: 8080 Method: GET -Header: X-MSISDN - Will be header injected by P-GW. -Response: JWT Firebase Token for given MSISDN. \ No newline at end of file +Header: X-MSISDN - Will be header injected by P-GW. +``` + + Response: JWT Firebase Token for given MSISDN. diff --git a/auth-server/build.gradle b/auth-server/build.gradle index 9f4f2e9b2..a1c13fb54 100644 --- a/auth-server/build.gradle +++ b/auth-server/build.gradle @@ -1,5 +1,5 @@ plugins { - id "org.jetbrains.kotlin.jvm" version "1.2.61" + id "org.jetbrains.kotlin.jvm" version "1.2.70" id "application" id "com.github.johnrengelman.shadow" version "2.0.4" id "idea" @@ -8,7 +8,8 @@ plugins { dependencies { implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlinVersion" implementation "io.dropwizard:dropwizard-core:$dropwizardVersion" - implementation 'com.google.firebase:firebase-admin:6.4.0' + implementation project(":firebase-extensions") + testImplementation "io.dropwizard:dropwizard-testing:$dropwizardVersion" testImplementation "org.jetbrains.kotlin:kotlin-test-junit:$kotlinVersion" testRuntimeOnly 'org.hamcrest:hamcrest-all:1.3' diff --git a/auth-server/config/config.yaml b/auth-server/config/config.yaml index 36c72f81e..428ced5ac 100644 --- a/auth-server/config/config.yaml +++ b/auth-server/config/config.yaml @@ -1,5 +1,4 @@ serviceAccountKey: /config/pantel-prod.json -databaseName: pantel-2decb logging: level: INFO diff --git a/auth-server/src/integration-test/resources/config.yaml b/auth-server/src/integration-test/resources/config.yaml index c0c01688c..5bc9240bb 100644 --- a/auth-server/src/integration-test/resources/config.yaml +++ b/auth-server/src/integration-test/resources/config.yaml @@ -1,5 +1,4 @@ serviceAccountKey: config/pantel-prod.json -databaseName: pantel-2decb logging: level: INFO diff --git a/auth-server/src/main/kotlin/org/ostelco/auth/AuthServerApplication.kt b/auth-server/src/main/kotlin/org/ostelco/auth/AuthServerApplication.kt index cb7639b2c..5e689888a 100644 --- a/auth-server/src/main/kotlin/org/ostelco/auth/AuthServerApplication.kt +++ b/auth-server/src/main/kotlin/org/ostelco/auth/AuthServerApplication.kt @@ -1,14 +1,17 @@ package org.ostelco.auth -import com.google.auth.oauth2.GoogleCredentials +import com.fasterxml.jackson.module.kotlin.KotlinModule import com.google.firebase.FirebaseApp import com.google.firebase.FirebaseOptions import io.dropwizard.Application +import io.dropwizard.Configuration +import io.dropwizard.configuration.EnvironmentVariableSubstitutor +import io.dropwizard.configuration.SubstitutingSourceProvider +import io.dropwizard.setup.Bootstrap import io.dropwizard.setup.Environment -import org.ostelco.auth.config.AuthServerConfig import org.ostelco.auth.resources.AuthResource +import org.ostelco.common.firebasex.usingCredentialsFile import org.slf4j.LoggerFactory -import java.io.FileInputStream /** * Entry point for running the authentiation server application @@ -27,6 +30,13 @@ class AuthServerApplication : Application() { override fun getName(): String = "AuthServer" + override fun initialize(bootstrap: Bootstrap) { + bootstrap.configurationSourceProvider = SubstitutingSourceProvider( + bootstrap.configurationSourceProvider, + EnvironmentVariableSubstitutor()) + bootstrap.objectMapper.registerModule(KotlinModule()) + } + /** * Run the dropwizard application (called by the kotlin [main] wrapper). */ @@ -34,15 +44,14 @@ class AuthServerApplication : Application() { config: AuthServerConfig, env: Environment) { - val serviceAccount = FileInputStream(config.serviceAccountKey) - val options = FirebaseOptions.Builder() - .setCredentials(GoogleCredentials.fromStream(serviceAccount)) - .setDatabaseUrl("https://${config.databaseName}.firebaseio.com/") + .usingCredentialsFile(config.serviceAccountKey) .build() FirebaseApp.initializeApp(options) env.jersey().register(AuthResource()) } -} \ No newline at end of file +} + +data class AuthServerConfig(val serviceAccountKey: String) : Configuration() \ No newline at end of file diff --git a/auth-server/src/main/kotlin/org/ostelco/auth/config/AuthServerConfig.kt b/auth-server/src/main/kotlin/org/ostelco/auth/config/AuthServerConfig.kt deleted file mode 100644 index 0242fa9da..000000000 --- a/auth-server/src/main/kotlin/org/ostelco/auth/config/AuthServerConfig.kt +++ /dev/null @@ -1,10 +0,0 @@ -package org.ostelco.auth.config - -import io.dropwizard.Configuration - -class AuthServerConfig : Configuration() { - - var serviceAccountKey = "" - - var databaseName = "" -} diff --git a/auth-server/src/main/kotlin/org/ostelco/auth/resources/AuthResource.kt b/auth-server/src/main/kotlin/org/ostelco/auth/resources/AuthResource.kt index 63143a819..a1dae6970 100644 --- a/auth-server/src/main/kotlin/org/ostelco/auth/resources/AuthResource.kt +++ b/auth-server/src/main/kotlin/org/ostelco/auth/resources/AuthResource.kt @@ -50,9 +50,6 @@ class AuthResource { return Response.ok(customToken, MediaType.TEXT_PLAIN_TYPE).build() } - /** - * As of now, `msisdn` is considered as `user-id`. - * This is subjected to change in future. - */ + // As of now, `msisdn` is considered as `user-id`. This is subjected to change in future. private fun getUid(msisdn: String) = msisdn } diff --git a/bq-metrics-extractor/Dockerfile b/bq-metrics-extractor/Dockerfile index 3eefa70c0..ab899b783 100644 --- a/bq-metrics-extractor/Dockerfile +++ b/bq-metrics-extractor/Dockerfile @@ -1,4 +1,4 @@ -FROM alpine:3.7 +FROM azul/zulu-openjdk:8u181-8.31.0.1 MAINTAINER CSI "csi@telenordigital.com" @@ -18,7 +18,7 @@ COPY build/libs/bq-metrics-extractor-uber.jar /bq-metrics-extractor.jar # classes that should be cached. # -CMD ["java", "-Dfile.encoding=UTF-8", "-Xshare:on", "-Xshare:dump", "-jar", "/bq-metrics-extractor.jar", "quit", "config/config.yaml"] +RUN ["java", "-Dfile.encoding=UTF-8", "-Xshare:on", "-Xshare:dump", "-jar", "/bq-metrics-extractor.jar", "quit", "config/config.yaml"] # diff --git a/bq-metrics-extractor/Dockerfile.test b/bq-metrics-extractor/Dockerfile.test index 312aae869..a9e7587fe 100644 --- a/bq-metrics-extractor/Dockerfile.test +++ b/bq-metrics-extractor/Dockerfile.test @@ -1,4 +1,4 @@ -FROM alpine:3.7 +FROM azul/zulu-openjdk:8u181-8.31.0.1 MAINTAINER CSI "csi@telenordigital.com" @@ -19,7 +19,7 @@ COPY build/libs/bq-metrics-extractor-uber.jar /bq-metrics-extractor.jar # classes that should be cached. # -CMD ["java", "-Dfile.encoding=UTF-8", "-Xshare:on", "-Xshare:dump", "-jar", "/bq-metrics-extractor.jar", "quit", "config/config.yaml"] +RUN ["java", "-Dfile.encoding=UTF-8", "-Xshare:on", "-Xshare:dump", "-jar", "/bq-metrics-extractor.jar", "quit", "config/config.yaml"] # diff --git a/bq-metrics-extractor/build.gradle b/bq-metrics-extractor/build.gradle index e8256569e..a5155aa67 100644 --- a/bq-metrics-extractor/build.gradle +++ b/bq-metrics-extractor/build.gradle @@ -1,5 +1,5 @@ plugins { - id "org.jetbrains.kotlin.jvm" version "1.2.61" + id "org.jetbrains.kotlin.jvm" version "1.2.70" id "application" id "com.github.johnrengelman.shadow" version "2.0.4" id "idea" @@ -14,22 +14,21 @@ dependencies { testImplementation "io.dropwizard:dropwizard-client:$dropwizardVersion" testImplementation "io.dropwizard:dropwizard-testing:$dropwizardVersion" testImplementation "com.fasterxml.jackson.module:jackson-module-kotlin:$jacksonVersion" - testImplementation "org.mockito:mockito-core:2.18.3" - testImplementation 'org.assertj:assertj-core:3.10.0' + testImplementation "org.mockito:mockito-core:$mockitoVersion" + testImplementation 'org.assertj:assertj-core:3.11.1' implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlinVersion" // Bigquery dependency - compile 'com.google.cloud:google-cloud-bigquery:1.40.0' + implementation "com.google.cloud:google-cloud-bigquery:$googleCloudVersion" -runtimeOnly "io.dropwizard:dropwizard-json-logging:$dropwizardVersion" + runtimeOnly "io.dropwizard:dropwizard-json-logging:$dropwizardVersion" // Prometheus pushgateway dependencies (we might not need all of these) - // compile 'io.prometheus:simpleclient:0.5.0' - // compile 'io.prometheus:simpleclient_hotspot:0.5.0' - // compile 'io.prometheus:simpleclient_httpserver:0.5.0' - compile 'io.prometheus:simpleclient_pushgateway:0.5.0' - compile 'com.google.apis:google-api-services-pubsub:v1-rev399-1.25.0' - + // implementation 'io.prometheus:simpleclient:0.5.0' + // implementation 'io.prometheus:simpleclient_hotspot:0.5.0' + // implementation 'io.prometheus:simpleclient_httpserver:0.5.0' + implementation 'io.prometheus:simpleclient_pushgateway:0.5.0' + // implementation 'com.google.apis:google-api-services-pubsub:v1-rev401-1.25.0' } shadowJar { diff --git a/bq-metrics-extractor/config/config.yaml b/bq-metrics-extractor/config/config.yaml index 69a3b60d1..f92f105ce 100644 --- a/bq-metrics-extractor/config/config.yaml +++ b/bq-metrics-extractor/config/config.yaml @@ -29,15 +29,17 @@ bqmetrics: help: Total data used last 24 hours resultColumn: count sql: > - SELECT sum(bucketBytes) AS count FROM `pantel-2decb.data_consumption.raw_consumption` - WHERE timestamp >= TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 1 DAY) + SELECT COALESCE ( + (SELECT sum(bucketBytes) AS count FROM `pantel-2decb.data_consumption.raw_consumption` + WHERE timestamp >= TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 1 DAY)), 0) as count - type: gauge name: revenue_last24hours help: Revenue for last 24 hours resultColumn: revenue sql: > - SELECT SUM(product.price.amount) as revenue FROM `pantel-2decb.purchases.raw_purchases` - WHERE TIMESTAMP_MILLIS(timestamp) > TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 24 HOUR) + SELECT COALESCE ( + (SELECT SUM(product.price.amount) as revenue FROM `pantel-2decb.purchases.raw_purchases` + WHERE TIMESTAMP_MILLIS(timestamp) > TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 24 HOUR)), 0) as revenue - type: gauge name: total_paid_users help: Number of users who have purchased in last 24 hours @@ -45,3 +47,81 @@ bqmetrics: sql: > SELECT COUNT(DISTINCT subscriberId) as count FROM `pantel-2decb.purchases.raw_purchases` WHERE TIMESTAMP_MILLIS(timestamp) > TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 24 HOUR) + - type: gauge + name: active_users_right_now + help: Number of active users rigth now (with 1 minute interval) + resultColumn: count + sql: > + SELECT count(DISTINCT msisdn) AS count FROM `pantel-2decb.data_consumption.raw_consumption` + WHERE timestamp >= TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 1 MINUTE) +#----------------------------------------------- +# Metrics values split at day boundary. +# This done due to the pseudonymisation of data. +# The keys for data is rotated at the beginning of every week (currently). + - type: gauge + name: sims_who_have_used_data_today + help: Number of SIMs that has used data today + resultColumn: count + sql: > + SELECT count(DISTINCT msisdn) AS count FROM `pantel-2decb.data_consumption.raw_consumption` + WHERE timestamp >= TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY) + - type: gauge + name: sims_who_have_used_data_yesterday + help: Number of SIMs that has used data yesterday + resultColumn: count + sql: > + SELECT count(DISTINCT msisdn) AS count FROM `pantel-2decb.data_consumption.raw_consumption` + WHERE timestamp >= TIMESTAMP_SUB(TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY), INTERVAL 1 DAY) + AND timestamp < TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY) + + - type: gauge + name: total_data_used_today + help: Total data used today + resultColumn: count + sql: > + SELECT COALESCE ( + (SELECT sum(bucketBytes) AS count FROM `pantel-2decb.data_consumption.raw_consumption` + WHERE timestamp >= TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY)), 0) as count + - type: gauge + name: total_data_used_yesterday + help: Total data used yesterday + resultColumn: count + sql: > + SELECT COALESCE ( + ( SELECT sum(bucketBytes) AS count FROM `pantel-2decb.data_consumption.raw_consumption` + WHERE timestamp >= TIMESTAMP_SUB(TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY), INTERVAL 1 DAY) + AND timestamp < TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY) ), 0) as count + + - type: gauge + name: revenue_today + help: Revenue generated today + resultColumn: revenue + sql: > + SELECT COALESCE ( + ( SELECT SUM(product.price.amount) as revenue FROM `pantel-2decb.purchases.raw_purchases` + WHERE TIMESTAMP_MILLIS(timestamp) >= TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY) ), 0) as revenue + - type: gauge + name: revenue_yesterday + help: Revenue generated yesterday + resultColumn: revenue + sql: > + SELECT COALESCE ( + ( SELECT SUM(product.price.amount) as revenue FROM `pantel-2decb.purchases.raw_purchases` + WHERE TIMESTAMP_MILLIS(timestamp) >= TIMESTAMP_SUB(TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY), INTERVAL 1 DAY) + AND TIMESTAMP_MILLIS(timestamp) < TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY) ), 0) as revenue + + - type: gauge + name: total_paid_users_today + help: Number of users who have purchased today + resultColumn: count + sql: > + SELECT COUNT(DISTINCT subscriberId) as count FROM `pantel-2decb.purchases.raw_purchases` + WHERE TIMESTAMP_MILLIS(timestamp) >= TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY) + - type: gauge + name: total_paid_users_yesterday + help: Number of users who have purchased yesterday + resultColumn: count + sql: > + SELECT COUNT(DISTINCT subscriberId) as count FROM `pantel-2decb.purchases.raw_purchases` + WHERE TIMESTAMP_MILLIS(timestamp) >= TIMESTAMP_SUB(TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY), INTERVAL 1 DAY) + AND TIMESTAMP_MILLIS(timestamp) < TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY) diff --git a/bq-metrics-extractor/src/main/java/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt b/bq-metrics-extractor/src/main/java/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt index 2f821ac3e..455da297f 100644 --- a/bq-metrics-extractor/src/main/java/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt +++ b/bq-metrics-extractor/src/main/java/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt @@ -2,21 +2,25 @@ package org.ostelco.bqmetrics import com.fasterxml.jackson.annotation.JsonProperty -import com.google.cloud.bigquery.* +import com.google.cloud.bigquery.BigQueryOptions +import com.google.cloud.bigquery.Job +import com.google.cloud.bigquery.JobId +import com.google.cloud.bigquery.JobInfo +import com.google.cloud.bigquery.QueryJobConfiguration import io.dropwizard.Application +import io.dropwizard.Configuration +import io.dropwizard.cli.ConfiguredCommand import io.dropwizard.setup.Bootstrap import io.dropwizard.setup.Environment -import io.prometheus.client.exporter.PushGateway import io.prometheus.client.CollectorRegistry -import io.dropwizard.Configuration -import io.dropwizard.cli.ConfiguredCommand import io.prometheus.client.Gauge import io.prometheus.client.Summary +import io.prometheus.client.exporter.PushGateway import net.sourceforge.argparse4j.inf.Namespace import net.sourceforge.argparse4j.inf.Subparser +import org.slf4j.Logger import org.slf4j.LoggerFactory import java.util.* -import org.slf4j.Logger import javax.validation.Valid import javax.validation.constraints.NotNull @@ -65,7 +69,7 @@ fun main(args: Array) { * Config of a single metric that will be extracted using a BigQuery * query. */ -private class MetricConfig { +private class MetricConfig { /** * Type of the metric. Currently the only permitted type is @@ -108,7 +112,7 @@ private class MetricConfig { @Valid @NotNull @JsonProperty - lateinit var sql: String + lateinit var sql: String } @@ -116,7 +120,7 @@ private class MetricConfig { * Configuration for the extractor, default config * plus a list of metrics descriptions. */ -private class BqMetricsExtractorConfig: Configuration() { +private class BqMetricsExtractorConfig : Configuration() { @Valid @NotNull @JsonProperty("bqmetrics") @@ -175,7 +179,6 @@ private interface MetricBuilder { } val count = result.iterateAll().iterator().next().get(resultColumn).longValue - return count } } @@ -190,14 +193,18 @@ private class SummaryMetricBuilder( override fun buildMetric(registry: CollectorRegistry) { - val summary: Summary = Summary.build() - .name(metricName) - .help(help).register(registry) - val value: Long = getNumberValueViaSql(sql, resultColumn) + try { + val summary: Summary = Summary.build() + .name(metricName) + .help(help).register(registry) + val value: Long = getNumberValueViaSql(sql, resultColumn) - log.info("Summarizing metric $metricName to be $value") + log.info("Summarizing metric $metricName to be $value") - summary.observe(value * 1.0) + summary.observe(value * 1.0) + } catch (e: NullPointerException) { + log.error(e.toString()) + } } } @@ -210,14 +217,18 @@ private class GaugeMetricBuilder( private val log: Logger = LoggerFactory.getLogger(SummaryMetricBuilder::class.java) override fun buildMetric(registry: CollectorRegistry) { - val gauge: Gauge = Gauge.build() - .name(metricName) - .help(help).register(registry) - val value: Long = getNumberValueViaSql(sql, resultColumn) + try { + val gauge: Gauge = Gauge.build() + .name(metricName) + .help(help).register(registry) + val value: Long = getNumberValueViaSql(sql, resultColumn) - log.info("Gauge metric $metricName = $value") + log.info("Gauge metric $metricName = $value") - gauge.set(value * 1.0) + gauge.set(value * 1.0) + } catch (e: NullPointerException) { + log.error(e.toString()) + } } } @@ -225,10 +236,10 @@ private class GaugeMetricBuilder( * Thrown when something really bad is detected and it's necessary to terminate * execution immediately. No cleanup of anything will be done. */ -private class BqMetricsExtractionException: RuntimeException { - constructor(message: String, ex: Exception?): super(message, ex) - constructor(message: String): super(message) - constructor(ex: Exception): super(ex) +private class BqMetricsExtractionException : RuntimeException { + constructor(message: String, ex: Exception?) : super(message, ex) + constructor(message: String) : super(message) + constructor(ex: Exception) : super(ex) } @@ -241,7 +252,6 @@ private class PrometheusPusher(val pushGateway: String, val job: String) { val registry = CollectorRegistry() - @Throws(Exception::class) fun publishMetrics(metrics: List) { val metricSources: MutableList = mutableListOf() diff --git a/build.gradle b/build.gradle index 9e0df77e7..e319235f3 100644 --- a/build.gradle +++ b/build.gradle @@ -28,14 +28,18 @@ subprojects { options.encoding = 'UTF-8' } ext { - kotlinVersion = "1.2.61" + kotlinVersion = "1.2.70" dropwizardVersion = "1.3.5" - googleCloudVersion = "1.43.0" - jacksonVersion = "2.9.6" + googleCloudVersion = "1.45.0" + jacksonVersion = "2.9.7" stripeVersion = "6.12.0" guavaVersion = "26.0-jre" assertJVersion = "3.11.1" - mockitoVersion = "2.21.0" + mockitoVersion = "2.22.0" + firebaseVersion = "6.5.0" + // Keeping it version 1.15.0 to be consistent with grpc via PubSub client lib + // Keeping it version 1.15.0 to be consistent with netty via Firebase lib + grpcVersion = "1.15.0" } } @@ -59,4 +63,18 @@ task packProd(type: Zip, dependsOn: [':ocsgw:packProd', ':auth-server:pack']) { rename 'docker-compose.prod.yaml','docker-compose.override.yaml' archiveName = 'ostelco-core-prod.zip' destinationDir = file('build/deploy/') -} \ No newline at end of file +} + + +//dependencyUpdates.resolutionStrategy { +// componentSelection { rules -> +// rules.all { ComponentSelection selection -> +// boolean rejected = ['alpha', 'beta', 'rc', 'cr', 'm', 'redhat'].any { qualifier -> +// selection.candidate.version ==~ /(?i).*[.-]${qualifier}[.\d-]*/ +// } +// if (rejected) { +// selection.reject('Release candidate') +// } +// } +// } +//} \ No newline at end of file diff --git a/client-api/build.gradle b/client-api/build.gradle index 1f355da83..f0e158a68 100644 --- a/client-api/build.gradle +++ b/client-api/build.gradle @@ -1,12 +1,12 @@ plugins { - id "org.jetbrains.kotlin.jvm" version "1.2.61" + id "org.jetbrains.kotlin.jvm" version "1.2.70" id "java-library" } dependencies { implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlinVersion" - implementation project(":prime-api") + implementation project(":prime-modules") implementation "io.dropwizard:dropwizard-auth:$dropwizardVersion" implementation "io.dropwizard:dropwizard-client:$dropwizardVersion" diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/ClientApiModule.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/ClientApiModule.kt index b1d3f7105..7f8876c9d 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/ClientApiModule.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/ClientApiModule.kt @@ -16,6 +16,7 @@ import org.ostelco.prime.client.api.auth.OAuthAuthenticator import org.ostelco.prime.client.api.metrics.reportMetricsAtStartUp import org.ostelco.prime.client.api.resources.AnalyticsResource import org.ostelco.prime.client.api.resources.ApplicationTokenResource +import org.ostelco.prime.client.api.resources.BundlesResource import org.ostelco.prime.client.api.resources.ConsentsResource import org.ostelco.prime.client.api.resources.PaymentResource import org.ostelco.prime.client.api.resources.ProductsResource @@ -77,6 +78,7 @@ class ClientApiModule : PrimeModule { jerseyEnv.register(ReferralResource(dao)) jerseyEnv.register(PaymentResource(dao)) jerseyEnv.register(SubscriptionResource(dao)) + jerseyEnv.register(BundlesResource(dao)) jerseyEnv.register(SubscriptionsResource(dao)) jerseyEnv.register(ApplicationTokenResource(dao)) diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/auth/OAuthAuthenticator.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/auth/OAuthAuthenticator.kt index 97e359fbb..07ac2d8ca 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/auth/OAuthAuthenticator.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/auth/OAuthAuthenticator.kt @@ -6,7 +6,7 @@ import com.fasterxml.jackson.databind.ObjectMapper import io.dropwizard.auth.AuthenticationException import io.dropwizard.auth.Authenticator import org.ostelco.prime.client.api.core.UserInfo -import org.ostelco.prime.logger +import org.ostelco.prime.getLogger import java.io.IOException import java.util.* import javax.ws.rs.client.Client @@ -27,11 +27,10 @@ private const val DEFAULT_USER_INFO_ENDPOINT = "https://ostelco.eu.auth0.com/use class OAuthAuthenticator(private val client: Client) : Authenticator { - private val logger by logger() + private val logger by getLogger() private val mapper = ObjectMapper() - @Throws(AuthenticationException::class) override fun authenticate(accessToken: String): Optional { var userInfoEndpoint: String @@ -54,7 +53,6 @@ class OAuthAuthenticator(private val client: Client) : Authenticator + val issuer: String? get() = get("issuer") - val id: Optional + val id: String? get() = get("id") - val email: Optional + val email: String? get() = get("email") - init { - var obj: JsonNode? = null - try { - obj = mapper.readTree(decode(enc)) - } catch (e: JsonParseException) { - logger.error("Parsing of the provided json doc {} failed: {}", enc, e) - } catch (e: IOException) { - logger.error("Unexpected error when parsing the json doc {}: {}", enc, e) - } + private operator fun get(key: String): String? = obj.get(key)?.textValue() - this.obj = obj - } - - fun hasIssuer(): Boolean { - return has("issuer") - } - - fun hasId(): Boolean { - return has("id") - } - - fun hasEmail(): Boolean { - return has("email") - } - - private fun has(key: String): Boolean { - return obj != null && obj.has(key) - } - - private operator fun get(key: String): Optional { - return if (has(key)) Optional.of(obj!!.get(key).textValue()) else Optional.empty() - } - - private fun decode(enc: String): String { - return String(Base64.getDecoder().decode(enc)) - } - - override fun toString(): String { - return obj!!.toString() - } + override fun toString(): String = obj.toString() } diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/AnalyticsResource.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/AnalyticsResource.kt index 279e9c9a9..4de3e8639 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/AnalyticsResource.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/AnalyticsResource.kt @@ -26,7 +26,7 @@ class AnalyticsResource(private val dao: SubscriberDAO) { } return dao.reportAnalytics(token.name, event).fold( - { apiError -> Response.status(apiError.status).entity(asJson(apiError.description)) }, + { apiError -> Response.status(apiError.status).entity(asJson(apiError)) }, { Response.status(Response.Status.CREATED) } ).build() } diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ApplicationTokenResource.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ApplicationTokenResource.kt index c4dffd877..b743ed398 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ApplicationTokenResource.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ApplicationTokenResource.kt @@ -29,10 +29,10 @@ class ApplicationTokenResource(private val dao: SubscriberDAO) { } return dao.getMsisdn(authToken.name).fold( - { apiError -> Response.status(apiError.status).entity(asJson(apiError.description)) }, + { apiError -> Response.status(apiError.status).entity(asJson(apiError)) }, { msisdn -> dao.storeApplicationToken(msisdn, applicationToken).fold( - { apiError -> Response.status(apiError.status).entity(asJson(apiError.description)) }, + { apiError -> Response.status(apiError.status).entity(asJson(apiError)) }, { Response.status(Response.Status.CREATED).entity(asJson(it)) }) }) .build() diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/BundlesResource.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/BundlesResource.kt new file mode 100644 index 000000000..9096dfb80 --- /dev/null +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/BundlesResource.kt @@ -0,0 +1,27 @@ +package org.ostelco.prime.client.api.resources + +import io.dropwizard.auth.Auth +import org.ostelco.prime.client.api.auth.AccessTokenPrincipal +import org.ostelco.prime.client.api.store.SubscriberDAO +import javax.ws.rs.GET +import javax.ws.rs.Path +import javax.ws.rs.Produces +import javax.ws.rs.core.Response + +@Path("/bundles") +class BundlesResource(private val dao: SubscriberDAO) { + + @GET + @Produces("application/json") + fun getBundles(@Auth token: AccessTokenPrincipal?): Response { + if (token == null) { + return Response.status(Response.Status.UNAUTHORIZED) + .build() + } + + return dao.getBundles(token.name).fold( + { apiError -> Response.status(apiError.status).entity(asJson(apiError)) }, + { Response.status(Response.Status.OK).entity(asJson(it)) }) + .build() + } +} \ No newline at end of file diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ConsentsResource.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ConsentsResource.kt index 837da0e4c..dab01548c 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ConsentsResource.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ConsentsResource.kt @@ -29,7 +29,7 @@ class ConsentsResource(private val dao: SubscriberDAO) { } return dao.getConsents(token.name).fold( - { apiError -> Response.status(apiError.status).entity(asJson(apiError.description)) }, + { apiError -> Response.status(apiError.status).entity(asJson(apiError)) }, { Response.status(Response.Status.OK).entity(asJson(it)) }) .build() } @@ -54,7 +54,7 @@ class ConsentsResource(private val dao: SubscriberDAO) { } return result.fold( - { apiError -> Response.status(apiError.status).entity(asJson(apiError.description)) }, + { apiError -> Response.status(apiError.status).entity(asJson(apiError)) }, { Response.status(Response.Status.OK).entity(asJson(it)) }) .build() } diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/PaymentResource.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/PaymentResource.kt index 52f1b0ac0..737bb3440 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/PaymentResource.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/PaymentResource.kt @@ -1,10 +1,9 @@ package org.ostelco.prime.client.api.resources import io.dropwizard.auth.Auth -import org.ostelco.prime.logger import org.ostelco.prime.client.api.auth.AccessTokenPrincipal import org.ostelco.prime.client.api.store.SubscriberDAO -import org.ostelco.prime.module.getResource +import org.ostelco.prime.getLogger import javax.validation.constraints.NotNull import javax.ws.rs.GET import javax.ws.rs.POST @@ -21,7 +20,7 @@ import javax.ws.rs.core.Response @Path("/paymentSources") class PaymentResource(private val dao: SubscriberDAO) { - private val logger by logger() + private val logger by getLogger() @POST @Produces("application/json") @@ -36,7 +35,7 @@ class PaymentResource(private val dao: SubscriberDAO) { return dao.createSource(token.name, sourceId) .fold( - { apiError -> Response.status(apiError.status).entity(asJson(apiError.description)) }, + { apiError -> Response.status(apiError.status).entity(asJson(apiError)) }, { sourceInfo -> Response.status(Response.Status.CREATED).entity(sourceInfo)} ).build() } @@ -51,12 +50,13 @@ class PaymentResource(private val dao: SubscriberDAO) { } return dao.listSources(token.name) .fold( - { apiError -> Response.status(apiError.status).entity(asJson(apiError.description)) }, + { apiError -> Response.status(apiError.status).entity(asJson(apiError)) }, { sourceList -> Response.status(Response.Status.OK).entity(sourceList)} ).build() } @PUT + @Produces("application/json") fun setDefaultSource(@Auth token: AccessTokenPrincipal?, @NotNull @QueryParam("sourceId") @@ -68,7 +68,7 @@ class PaymentResource(private val dao: SubscriberDAO) { return dao.setDefaultSource(token.name, sourceId) .fold( - { apiError -> Response.status(apiError.status).entity(asJson(apiError.description)) }, + { apiError -> Response.status(apiError.status).entity(asJson(apiError)) }, { sourceInfo -> Response.status(Response.Status.OK).entity(sourceInfo)} ).build() } diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ProductsResource.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ProductsResource.kt index 47be01e8d..b89a8f446 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ProductsResource.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ProductsResource.kt @@ -29,7 +29,7 @@ class ProductsResource(private val dao: SubscriberDAO) { } return dao.getProducts(token.name).fold( - { apiError -> Response.status(apiError.status).entity(asJson(apiError.description)) }, + { apiError -> Response.status(apiError.status).entity(asJson(apiError)) }, { Response.status(Response.Status.OK).entity(asJson(it)) }) .build() } @@ -49,7 +49,7 @@ class ProductsResource(private val dao: SubscriberDAO) { return dao.purchaseProductWithoutPayment(token.name, sku) .fold( - { apiError -> Response.status(apiError.status).entity(asJson(apiError.description)) }, + { apiError -> Response.status(apiError.status).entity(asJson(apiError)) }, { productInfo -> Response.status(CREATED).entity(productInfo) } ).build() } @@ -72,7 +72,7 @@ class ProductsResource(private val dao: SubscriberDAO) { return dao.purchaseProduct(token.name, sku, sourceId, saveCard) .fold( - { apiError -> Response.status(apiError.status).entity(asJson(apiError.description)) }, + { apiError -> Response.status(apiError.status).entity(asJson(apiError)) }, { productInfo -> Response.status(CREATED).entity(productInfo) } ).build() } diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ProfileResource.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ProfileResource.kt index 2ad00b7ad..0c7cb1391 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ProfileResource.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ProfileResource.kt @@ -30,7 +30,7 @@ class ProfileResource(private val dao: SubscriberDAO) { } return dao.getProfile(token.name).fold( - { apiError -> Response.status(apiError.status).entity(asJson(apiError.description)) }, + { apiError -> Response.status(apiError.status).entity(asJson(apiError)) }, { Response.status(Response.Status.OK).entity(asJson(it)) }) .build() } @@ -48,7 +48,7 @@ class ProfileResource(private val dao: SubscriberDAO) { } return dao.createProfile(token.name, profile, referredBy).fold( - { apiError -> Response.status(apiError.status).entity(asJson(apiError.description)) }, + { apiError -> Response.status(apiError.status).entity(asJson(apiError)) }, { Response.status(Response.Status.CREATED).entity(asJson(it)) }) .build() } @@ -64,7 +64,7 @@ class ProfileResource(private val dao: SubscriberDAO) { } return dao.updateProfile(token.name, profile).fold( - { apiError -> Response.status(apiError.status).entity(asJson(apiError.description)) }, + { apiError -> Response.status(apiError.status).entity(asJson(apiError)) }, { Response.status(Response.Status.OK).entity(asJson(it)) }) .build() } diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/PurchaseResource.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/PurchaseResource.kt index 1f31ff4f7..b19f3865f 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/PurchaseResource.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/PurchaseResource.kt @@ -24,7 +24,7 @@ class PurchaseResource(private val dao: SubscriberDAO) { } return dao.getPurchaseHistory(token.name).fold( - { apiError -> Response.status(apiError.status).entity(asJson(apiError.description)) }, + { apiError -> Response.status(apiError.status).entity(asJson(apiError)) }, { Response.status(Response.Status.OK).entity(asJson(it)) }) .build() } diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ReferralResource.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ReferralResource.kt index e8a8a27c3..698f48bb3 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ReferralResource.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ReferralResource.kt @@ -19,7 +19,7 @@ class ReferralResource(private val dao: SubscriberDAO) { } return dao.getReferrals(token.name).fold( - { apiError -> Response.status(apiError.status).entity(asJson(apiError.description)) }, + { apiError -> Response.status(apiError.status).entity(asJson(apiError)) }, { Response.status(Response.Status.OK).entity(it) }) .build() } @@ -33,7 +33,7 @@ class ReferralResource(private val dao: SubscriberDAO) { } return dao.getReferredBy(token.name).fold( - { apiError -> Response.status(apiError.status).entity(asJson(apiError.description)) }, + { apiError -> Response.status(apiError.status).entity(asJson(apiError)) }, { Response.status(Response.Status.OK).entity(it) }) .build() } diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ResourceHelpers.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ResourceHelpers.kt index 54ad98288..45678206a 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ResourceHelpers.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ResourceHelpers.kt @@ -2,7 +2,7 @@ package org.ostelco.prime.client.api.resources import com.fasterxml.jackson.core.JsonProcessingException import com.fasterxml.jackson.databind.ObjectMapper -import org.ostelco.prime.logger +import org.ostelco.prime.getLogger /** * Common 'helper' functions for resources. @@ -14,7 +14,7 @@ fun R.asJson(`object`: Any): String { try { return objectMapper.writeValueAsString(`object`) } catch (e: JsonProcessingException) { - val logger by logger() + val logger by getLogger() logger.error("Error in json response {}", e) } return "" diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/SubscriptionResource.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/SubscriptionResource.kt index 87e7256f4..bd44709bb 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/SubscriptionResource.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/SubscriptionResource.kt @@ -14,7 +14,7 @@ import javax.ws.rs.core.Response */ @Path("/subscription") -@Deprecated("use SubscriptionsResource", ReplaceWith("SubscriptionsResource", "org.ostelco.prime.client.api.resources.SubscriptionsResource")) +@Deprecated("use SubscriptionsResource and/or BundlesResource", ReplaceWith("SubscriptionsResource", "org.ostelco.prime.client.api.resources.SubscriptionsResource")) class SubscriptionResource(private val dao: SubscriberDAO) { @GET @@ -27,7 +27,7 @@ class SubscriptionResource(private val dao: SubscriberDAO) { } return dao.getSubscriptionStatus(token.name).fold( - { apiError -> Response.status(apiError.status).entity(asJson(apiError.description)) }, + { apiError -> Response.status(apiError.status).entity(asJson(apiError)) }, { Response.status(Response.Status.OK).entity(asJson(it)) }) .build() } @@ -42,7 +42,7 @@ class SubscriptionResource(private val dao: SubscriberDAO) { } return dao.getActivePseudonymOfMsisdnForSubscriber(token.name).fold( - { apiError -> Response.status(apiError.status).entity(asJson(apiError.description)) }, + { apiError -> Response.status(apiError.status).entity(asJson(apiError)) }, { pseudonym -> Response.status(Response.Status.OK).entity(pseudonym) }) .build() } @@ -60,7 +60,7 @@ class SubscriptionsResource(private val dao: SubscriberDAO) { } return dao.getSubscriptions(token.name).fold( - { apiError -> Response.status(apiError.status).entity(asJson(apiError.description)) }, + { apiError -> Response.status(apiError.status).entity(asJson(apiError)) }, { Response.status(Response.Status.OK).entity(asJson(it)) }) .build() } diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAO.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAO.kt index 45bf42fb5..11b750282 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAO.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAO.kt @@ -4,16 +4,18 @@ import arrow.core.Either import org.ostelco.prime.client.api.model.Consent import org.ostelco.prime.client.api.model.Person import org.ostelco.prime.client.api.model.SubscriptionStatus -import org.ostelco.prime.core.ApiError +import org.ostelco.prime.apierror.ApiError import org.ostelco.prime.model.ActivePseudonyms import org.ostelco.prime.model.ApplicationToken +import org.ostelco.prime.model.Bundle import org.ostelco.prime.model.Product import org.ostelco.prime.model.PurchaseRecord import org.ostelco.prime.model.Subscriber import org.ostelco.prime.model.Subscription import org.ostelco.prime.paymentprocessor.core.ProductInfo -import org.ostelco.prime.paymentprocessor.core.ProfileInfo import org.ostelco.prime.paymentprocessor.core.SourceInfo +import org.ostelco.prime.paymentprocessor.core.SourceDetailsInfo + /** * @@ -31,6 +33,8 @@ interface SubscriberDAO { fun getSubscriptions(subscriberId: String): Either> + fun getBundles(subscriberId: String): Either> + fun getPurchaseHistory(subscriberId: String): Either> fun getProduct(subscriptionId: String, sku: String): Either @@ -51,10 +55,6 @@ interface SubscriberDAO { fun storeApplicationToken(msisdn: String, applicationToken: ApplicationToken): Either - fun getPaymentProfile(name: String): Either - - fun setPaymentProfile(name: String, profileInfo: ProfileInfo): Either - fun getReferrals(subscriberId: String): Either> fun getReferredBy(subscriberId: String): Either @@ -63,7 +63,7 @@ interface SubscriberDAO { fun setDefaultSource(subscriberId: String, sourceId: String): Either - fun listSources(subscriberId: String): Either> + fun listSources(subscriberId: String): Either> companion object { diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt index 6e617bfa6..3280f5e23 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt @@ -8,15 +8,18 @@ import org.ostelco.prime.client.api.metrics.updateMetricsOnNewSubscriber import org.ostelco.prime.client.api.model.Consent import org.ostelco.prime.client.api.model.Person import org.ostelco.prime.client.api.model.SubscriptionStatus -import org.ostelco.prime.core.ApiError -import org.ostelco.prime.core.BadGatewayError -import org.ostelco.prime.core.BadRequestError -import org.ostelco.prime.core.ForbiddenError -import org.ostelco.prime.core.InsuffientStorageError -import org.ostelco.prime.core.NotFoundError -import org.ostelco.prime.logger +import org.ostelco.prime.apierror.ApiError +import org.ostelco.prime.apierror.ApiErrorCode +import org.ostelco.prime.apierror.BadGatewayError +import org.ostelco.prime.apierror.BadRequestError +import org.ostelco.prime.apierror.InsufficientStorageError +import org.ostelco.prime.apierror.NotFoundError +import org.ostelco.prime.apierror.mapPaymentErrorToApiError +import org.ostelco.prime.apierror.mapStorageErrorToApiError +import org.ostelco.prime.getLogger import org.ostelco.prime.model.ActivePseudonyms import org.ostelco.prime.model.ApplicationToken +import org.ostelco.prime.model.Bundle import org.ostelco.prime.model.Product import org.ostelco.prime.model.PurchaseRecord import org.ostelco.prime.model.Subscriber @@ -26,9 +29,11 @@ import org.ostelco.prime.ocs.OcsSubscriberService import org.ostelco.prime.paymentprocessor.PaymentProcessor import org.ostelco.prime.paymentprocessor.core.ProductInfo import org.ostelco.prime.paymentprocessor.core.ProfileInfo +import org.ostelco.prime.paymentprocessor.core.SourceDetailsInfo import org.ostelco.prime.paymentprocessor.core.SourceInfo import org.ostelco.prime.pseudonymizer.PseudonymizerService import org.ostelco.prime.storage.ClientDataSource +import org.ostelco.prime.storage.StoreError import java.time.Instant import java.util.* import java.util.concurrent.ConcurrentHashMap @@ -38,7 +43,7 @@ import java.util.concurrent.ConcurrentHashMap */ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSubscriberService: OcsSubscriberService) : SubscriberDAO { - private val logger by logger() + private val logger by getLogger() private val paymentProcessor by lazy { getResource() } private val pseudonymizer by lazy { getResource() } @@ -50,43 +55,45 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu override fun getProfile(subscriberId: String): Either { return try { storage.getSubscriber(subscriberId).mapLeft { - BadRequestError("Incomplete profile description. ${it.message}") - } + NotFoundError("Failed to fetch profile.", ApiErrorCode.FAILED_TO_FETCH_PAYMENT_PROFILE, it) + } } catch (e: Exception) { logger.error("Failed to fetch profile for subscriberId $subscriberId", e) - Either.left(NotFoundError("Failed to fetch profile")) + Either.left(NotFoundError("Failed to fetch profile", ApiErrorCode.FAILED_TO_FETCH_PAYMENT_PROFILE)) } } override fun createProfile(subscriberId: String, profile: Subscriber, referredBy: String?): Either { if (!SubscriberDAO.isValidProfile(profile)) { logger.error("Failed to create profile. Invalid profile.") - return Either.left(BadRequestError("Incomplete profile description")) + return Either.left(BadRequestError("Incomplete profile description. Profile must contain name and email", ApiErrorCode.FAILED_TO_CREATE_PAYMENT_PROFILE)) } return try { storage.addSubscriber(profile, referredBy) - .mapLeft { ForbiddenError("Failed to create profile. ${it.message}") } + .mapLeft { + mapStorageErrorToApiError("Failed to create profile.", ApiErrorCode.FAILED_TO_CREATE_PAYMENT_PROFILE, it) + } .flatMap { updateMetricsOnNewSubscriber() getProfile(subscriberId) } } catch (e: Exception) { logger.error("Failed to create profile for subscriberId $subscriberId", e) - Either.left(ForbiddenError("Failed to create profile")) + Either.left(BadGatewayError("Failed to create profile", ApiErrorCode.FAILED_TO_CREATE_PAYMENT_PROFILE)) } } override fun storeApplicationToken(msisdn: String, applicationToken: ApplicationToken): Either { if (!SubscriberDAO.isValidApplicationToken(applicationToken)) { - return Either.left(BadRequestError("Incomplete ApplicationToken")) + return Either.left(BadRequestError("Incomplete ApplicationToken", ApiErrorCode.FAILED_TO_STORE_APPLICATION_TOKEN)) } try { storage.addNotificationToken(msisdn, applicationToken) } catch (e: Exception) { logger.error("Failed to store ApplicationToken for msisdn $msisdn", e) - return Either.left(InsuffientStorageError("Failed to store ApplicationToken")) + return Either.left(InsufficientStorageError("Failed to store ApplicationToken", ApiErrorCode.FAILED_TO_STORE_APPLICATION_TOKEN)) } return getNotificationToken(msisdn, applicationToken.applicationID) } @@ -95,113 +102,117 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu try { return storage.getNotificationToken(msisdn, applicationId) ?.let { Either.right(it) } - ?: return Either.left(NotFoundError("Failed to get ApplicationToken")) + ?: return Either.left(NotFoundError("Failed to get ApplicationToken", ApiErrorCode.FAILED_TO_STORE_APPLICATION_TOKEN)) } catch (e: Exception) { logger.error("Failed to get ApplicationToken for msisdn $msisdn", e) - return Either.left(NotFoundError("Failed to get ApplicationToken")) + return Either.left(BadGatewayError("Failed to get ApplicationToken", ApiErrorCode.FAILED_TO_STORE_APPLICATION_TOKEN)) } } override fun updateProfile(subscriberId: String, profile: Subscriber): Either { if (!SubscriberDAO.isValidProfile(profile)) { - return Either.left(BadRequestError("Incomplete profile description")) + return Either.left(BadRequestError("Incomplete profile description", ApiErrorCode.FAILED_TO_UPDATE_PROFILE)) } try { storage.updateSubscriber(profile) } catch (e: Exception) { logger.error("Failed to update profile for subscriberId $subscriberId", e) - return Either.left(NotFoundError("Failed to update profile")) + return Either.left(BadGatewayError("Failed to update profile", ApiErrorCode.FAILED_TO_UPDATE_PROFILE)) } return getProfile(subscriberId) } override fun getSubscriptionStatus(subscriberId: String): Either { - try { - return storage.getBundles(subscriberId) - .map { bundles -> bundles?.first()?.balance ?: 0 } + return try { + storage.getBundles(subscriberId) + .map { bundles -> bundles.firstOrNull()?.balance ?: 0 } .flatMap { balance -> storage.getPurchaseRecords(subscriberId) .map { purchaseRecords -> SubscriptionStatus(balance, purchaseRecords.toList()) } } - .mapLeft { NotFoundError(it.message) } + .mapLeft { + mapStorageErrorToApiError("Failed to fetch subscription status.", ApiErrorCode.FAILED_TO_FETCH_SUBSCRIPTION_STATUS, it) + } } catch (e: Exception) { logger.error("Failed to get balance for subscriber $subscriberId", e) - return Either.left(NotFoundError("Failed to get balance")) + return Either.left(BadGatewayError("Failed to get balance", ApiErrorCode.FAILED_TO_FETCH_SUBSCRIPTION_STATUS)) } } override fun getSubscriptions(subscriberId: String): Either> { try { return storage.getSubscriptions(subscriberId).mapLeft { - NotFoundError("Failed to get subscriptions. ${it.message}") + NotFoundError("Failed to get subscriptions.", ApiErrorCode.FAILED_TO_FETCH_SUBSCRIPTIONS, it) } } catch (e: Exception) { logger.error("Failed to get subscriptions for subscriberId $subscriberId", e) - return Either.left(NotFoundError("Failed to get subscriptions")) + return Either.left(BadGatewayError("Failed to get subscriptions", ApiErrorCode.FAILED_TO_FETCH_SUBSCRIPTIONS)) + } + } + + override fun getBundles(subscriberId: String): Either> { + return try { + storage.getBundles(subscriberId).mapLeft { + NotFoundError("Failed to get bundles. ${it.message}", ApiErrorCode.FAILED_TO_FETCH_BUNDLES) + } + } catch (e: Exception) { + logger.error("Failed to get bundles for subscriberId $subscriberId", e) + Either.left(NotFoundError("Failed to get bundles", ApiErrorCode.FAILED_TO_FETCH_BUNDLES)) } } override fun getActivePseudonymOfMsisdnForSubscriber(subscriberId: String): Either { return storage.getMsisdn(subscriberId) - .mapLeft { NotFoundError("Failed to msisdn for user. ${it.message}") } + .mapLeft { NotFoundError("Failed to get pseudonym for user.", ApiErrorCode.FAILED_TO_FETCH_PSEUDONYM_FOR_SUBSCRIBER, it) } .map { msisdn -> pseudonymizer.getActivePseudonymsForMsisdn(msisdn) } } override fun getPurchaseHistory(subscriberId: String): Either> { return try { return storage.getPurchaseRecords(subscriberId).bimap( - { NotFoundError("Failed to get purchase history. ${it.message}") }, + { NotFoundError("Failed to get purchase history.", ApiErrorCode.FAILED_TO_FETCH_PAYMENT_HISTORY, it) }, { it.toList() }) } catch (e: Exception) { logger.error("Failed to get purchase history for subscriberId $subscriberId", e) - Either.left(NotFoundError("Failed to get purchase history")) + Either.left(BadGatewayError("Failed to get purchase history", ApiErrorCode.FAILED_TO_FETCH_PAYMENT_HISTORY)) } } override fun getMsisdn(subscriberId: String): Either { return try { storage.getMsisdn(subscriberId).mapLeft { - NotFoundError("Did not find msisdn for this subscription. ${it.message}") + NotFoundError("Did not find msisdn for this subscription.", ApiErrorCode.FAILED_TO_STORE_APPLICATION_TOKEN, it) } } catch (e: Exception) { logger.error("Did not find msisdn for subscriberId $subscriberId", e) - Either.left(NotFoundError("Did not find subscription")) + Either.left(BadGatewayError("Did not find subscription", ApiErrorCode.FAILED_TO_STORE_APPLICATION_TOKEN)) } } override fun getProducts(subscriberId: String): Either> { return try { storage.getProducts(subscriberId).bimap( - { NotFoundError(it.message) }, + { NotFoundError("Failed to fetch products", ApiErrorCode.FAILED_TO_FETCH_PRODUCT_LIST, it) }, { products -> products.values }) } catch (e: Exception) { logger.error("Failed to get Products for subscriberId $subscriberId", e) - Either.left(NotFoundError("Failed to get Products")) + Either.left(BadGatewayError("Failed to get Products", ApiErrorCode.FAILED_TO_FETCH_PRODUCT_LIST)) } } override fun getProduct(subscriptionId: String, sku: String): Either { return storage.getProduct(subscriptionId, sku) - .fold({ Either.left(NotFoundError("Failed to get products for sku $sku")) }, + .fold({ Either.left(NotFoundError("Failed to get products for sku $sku", ApiErrorCode.FAILED_TO_FETCH_PRODUCT_INFORMATION)) }, { Either.right(it) }) } - private fun createAndStorePaymentProfile(name: String): Either { - return paymentProcessor.createPaymentProfile(name) - .mapLeft { ForbiddenError(it.description) } - .flatMap { profileInfo -> - setPaymentProfile(name, profileInfo) - .map { profileInfo } - } - } - @Deprecated("use purchaseProduct", ReplaceWith("purchaseProduct")) - override fun purchaseProductWithoutPayment(subscriberId: String, sku: String): Either { + override fun purchaseProductWithoutPayment(subscriberId: String, sku: String): Either { return getProduct(subscriberId, sku) // If we can't find the product, return not-found - .mapLeft { NotFoundError("Product unavailable") } + .mapLeft { NotFoundError("Product unavailable", ApiErrorCode.FAILED_TO_PURCHASE_PRODUCT) } .flatMap { product -> val purchaseRecord = PurchaseRecord( id = UUID.randomUUID().toString(), @@ -212,7 +223,7 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu storage.addPurchaseRecord(subscriberId, purchaseRecord) .mapLeft { storeError -> logger.error("failed to save purchase record, for subscriberId $subscriberId, sku $sku") - BadGatewayError(storeError.message) + BadGatewayError("Failed to store purchase record", ApiErrorCode.FAILED_TO_PURCHASE_PRODUCT, storeError) } // Notify OCS .flatMap { @@ -234,37 +245,37 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu sku: String, sourceId: String?, saveCard: Boolean): Either = - storage.purchaseProduct( - subscriberId, - sku, - sourceId, - saveCard).mapLeft { NotFoundError(it.description) } + storage.purchaseProduct( + subscriberId, + sku, + sourceId, + saveCard).mapLeft { mapPaymentErrorToApiError("Failed to purchase product. ", ApiErrorCode.FAILED_TO_PURCHASE_PRODUCT, it) } override fun getReferrals(subscriberId: String): Either> { return try { storage.getReferrals(subscriberId).bimap( - { NotFoundError("Failed to get referral list. ${it.message}") }, + { NotFoundError("Failed to get referral list.", ApiErrorCode.FAILED_TO_FETCH_REFERRALS, it) }, { list -> list.map { Person(it) } }) } catch (e: Exception) { logger.error("Failed to get referral list for subscriberId $subscriberId", e) - Either.left(NotFoundError("Failed to get referral list")) + Either.left(BadGatewayError("Failed to get referral list", ApiErrorCode.FAILED_TO_FETCH_REFERRALS)) } } override fun getReferredBy(subscriberId: String): Either { return try { storage.getReferredBy(subscriberId).bimap( - { NotFoundError("Failed to get referred-by. ${it.message}") }, + { NotFoundError("Failed to get referred-by.", ApiErrorCode.FAILED_TO_FETCH_REFERRED_BY_LIST, it) }, { Person(name = it) }) } catch (e: Exception) { logger.error("Failed to get referred-by for subscriberId $subscriberId", e) - Either.left(NotFoundError("Failed to get referred-by")) + Either.left(BadGatewayError("Failed to get referred-by", ApiErrorCode.FAILED_TO_FETCH_REFERRED_BY_LIST)) } } override fun getConsents(subscriberId: String): Either> { consentMap.putIfAbsent(subscriberId, ConcurrentHashMap()) - consentMap[subscriberId]!!.putIfAbsent("privacy", false) + consentMap[subscriberId]?.putIfAbsent("privacy", false) return Either.right(listOf(Consent( consentId = "privacy", description = "Grant permission to process personal data", @@ -273,56 +284,71 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu override fun acceptConsent(subscriberId: String, consentId: String): Either { consentMap.putIfAbsent(subscriberId, ConcurrentHashMap()) - consentMap[subscriberId]!![consentId] = true + consentMap[subscriberId]?.put(consentId, true) return Either.right(Consent(consentId, "Grant permission to process personal data", true)) } override fun rejectConsent(subscriberId: String, consentId: String): Either { consentMap.putIfAbsent(subscriberId, ConcurrentHashMap()) - consentMap[subscriberId]!![consentId] = false + consentMap[subscriberId]?.put(consentId, false) return Either.right(Consent(consentId, "Grant permission to process personal data", false)) } - override fun getPaymentProfile(name: String): Either = + private fun getPaymentProfile(name: String): Either = storage.getPaymentId(name) ?.let { profileInfoId -> Either.right(ProfileInfo(profileInfoId)) } - ?: Either.left(BadGatewayError("Failed to fetch payment customer ID")) + ?: Either.left(org.ostelco.prime.storage.NotFoundError("Failed to fetch payment customer ID", name)) - override fun setPaymentProfile(name: String, profileInfo: ProfileInfo): Either = - Either.cond( - test = storage.createPaymentId(name, profileInfo.id), - ifTrue = { Unit }, - ifFalse = { BadGatewayError("Failed to save payment customer ID") }) + private fun setPaymentProfile(name: String, profileInfo: ProfileInfo): Either = + Either.cond( + test = storage.createPaymentId(name, profileInfo.id), + ifTrue = { Unit }, + ifFalse = { org.ostelco.prime.storage.NotCreatedError("Failed to store payment customer ID") }) override fun reportAnalytics(subscriberId: String, events: String): Either = Either.right(Unit) override fun createSource(subscriberId: String, sourceId: String): Either { - return getPaymentProfile(subscriberId) + return paymentProcessor.getPaymentProfile(subscriberId) .fold( - { createAndStorePaymentProfile(subscriberId) }, + { + paymentProcessor.createPaymentProfile(subscriberId) + .mapLeft { error -> mapPaymentErrorToApiError(error.description, ApiErrorCode.FAILED_TO_STORE_PAYMENT_SOURCE, error) } + }, { profileInfo -> Either.right(profileInfo) } ) - .flatMap { profileInfo -> paymentProcessor.addSource(profileInfo.id, sourceId).mapLeft { NotFoundError(it.description) } } + .flatMap { profileInfo -> + paymentProcessor.addSource(profileInfo.id, sourceId) + .mapLeft { mapPaymentErrorToApiError("Failed to store payment source", ApiErrorCode.FAILED_TO_STORE_PAYMENT_SOURCE, it) } + } } override fun setDefaultSource(subscriberId: String, sourceId: String): Either { - return getPaymentProfile(subscriberId) + return paymentProcessor.getPaymentProfile(subscriberId) .fold( - { createAndStorePaymentProfile(subscriberId) }, + { + paymentProcessor.createPaymentProfile(subscriberId) + .mapLeft { error -> mapPaymentErrorToApiError(error.description, ApiErrorCode.FAILED_TO_SET_DEFAULT_PAYMENT_SOURCE, error) } + }, { profileInfo -> Either.right(profileInfo) } ) - .flatMap { profileInfo -> paymentProcessor.setDefaultSource(profileInfo.id, sourceId).mapLeft { NotFoundError(it.description) } } + .flatMap { profileInfo -> + paymentProcessor.setDefaultSource(profileInfo.id, sourceId) + .mapLeft { mapPaymentErrorToApiError("Failed to set default payment source", ApiErrorCode.FAILED_TO_SET_DEFAULT_PAYMENT_SOURCE, it) } + } } - override fun listSources(subscriberId: String): Either> { - return getPaymentProfile(subscriberId) + override fun listSources(subscriberId: String): Either> { + return paymentProcessor.getPaymentProfile(subscriberId) .fold( - { createAndStorePaymentProfile(subscriberId) }, + { + paymentProcessor.createPaymentProfile(subscriberId) + .mapLeft { error -> mapPaymentErrorToApiError(error.description, ApiErrorCode.FAILED_TO_FETCH_PAYMENT_SOURCES_LIST, error) } + }, { profileInfo -> Either.right(profileInfo) } ) - .flatMap { profileInfo -> paymentProcessor.getSavedSources(profileInfo.id).mapLeft { NotFoundError(it.description) } } - + .flatMap { profileInfo -> + paymentProcessor.getSavedSources(profileInfo.id) + .mapLeft { mapPaymentErrorToApiError("Failed to list sources", ApiErrorCode.FAILED_TO_FETCH_PAYMENT_SOURCES_LIST, it) } + } } - - } diff --git a/client-api/src/test/kotlin/org/ostelco/prime/client/api/auth/GetUserInfoTest.kt b/client-api/src/test/kotlin/org/ostelco/prime/client/api/auth/GetUserInfoTest.kt index b8bd22a42..bb17bc232 100644 --- a/client-api/src/test/kotlin/org/ostelco/prime/client/api/auth/GetUserInfoTest.kt +++ b/client-api/src/test/kotlin/org/ostelco/prime/client/api/auth/GetUserInfoTest.kt @@ -28,7 +28,6 @@ class GetUserInfoTest { "http://localhost:${RULE.localPort}/userinfo") @Test - @Throws(Exception::class) fun getProfileNotFound() { @@ -39,7 +38,7 @@ class GetUserInfoTest { waitForServer() - val response = client!!.target( + val response = client.target( "http://localhost:${RULE.localPort}/profile") .request() .property(ClientProperties.CONNECT_TIMEOUT, 30000) @@ -55,7 +54,7 @@ class GetUserInfoTest { var counter = 40 // Max wait time, ten seconds. while (counter > 0) { try { - val r = client!!.target( + val r = client.target( "http://localhost:${RULE.adminPort}/healthcheck") .request() .get(Response::class.java) @@ -78,7 +77,7 @@ class GetUserInfoTest { companion object { private val key = "secret" - private var client: Client? = null + private lateinit var client: Client @JvmField @ClassRule diff --git a/client-api/src/test/kotlin/org/ostelco/prime/client/api/auth/helpers/TestApp.kt b/client-api/src/test/kotlin/org/ostelco/prime/client/api/auth/helpers/TestApp.kt index 8af25a884..196db835a 100644 --- a/client-api/src/test/kotlin/org/ostelco/prime/client/api/auth/helpers/TestApp.kt +++ b/client-api/src/test/kotlin/org/ostelco/prime/client/api/auth/helpers/TestApp.kt @@ -20,8 +20,8 @@ import org.ostelco.prime.client.api.auth.AccessTokenPrincipal import org.ostelco.prime.client.api.auth.OAuthAuthenticator import org.ostelco.prime.client.api.resources.ProfileResource import org.ostelco.prime.client.api.store.SubscriberDAO -import org.ostelco.prime.core.NotFoundError -import java.io.IOException +import org.ostelco.prime.apierror.ApiErrorCode +import org.ostelco.prime.apierror.NotFoundError class TestApp : Application() { @@ -29,20 +29,19 @@ class TestApp : Application() { return "test" } - override fun initialize(bootstrap: Bootstrap?) { - bootstrap!!.configurationSourceProvider = SubstitutingSourceProvider( + override fun initialize(bootstrap: Bootstrap) { + bootstrap.configurationSourceProvider = SubstitutingSourceProvider( bootstrap.configurationSourceProvider, EnvironmentVariableSubstitutor()) } - @Throws(IOException::class) override fun run(config: TestConfig, env: Environment) { val DAO = mock(SubscriberDAO::class.java) val arg = argumentCaptor() `when`(DAO.getProfile(arg.capture())) - .thenReturn(Either.left(NotFoundError("No profile found"))) + .thenReturn(Either.left(NotFoundError("No profile found", ApiErrorCode.FAILED_TO_FETCH_PAYMENT_PROFILE))) /* APIs. */ env.jersey().register(ProfileResource(DAO)) @@ -57,7 +56,7 @@ class TestApp : Application() { /* OAuth2 with cache. */ val authenticator = CachingAuthenticator(env.metrics(), OAuthAuthenticator(client), - config.authenticationCachePolicy!!) + config.authenticationCachePolicy) /* OAuth2. */ env.jersey().register(AuthDynamicFeature( diff --git a/client-api/src/test/kotlin/org/ostelco/prime/client/api/auth/helpers/TestConfig.kt b/client-api/src/test/kotlin/org/ostelco/prime/client/api/auth/helpers/TestConfig.kt index fbc8718ba..bb96d8ca7 100644 --- a/client-api/src/test/kotlin/org/ostelco/prime/client/api/auth/helpers/TestConfig.kt +++ b/client-api/src/test/kotlin/org/ostelco/prime/client/api/auth/helpers/TestConfig.kt @@ -12,12 +12,12 @@ class TestConfig : Configuration() { @Valid @get:JsonProperty("secret") @set:JsonProperty("secret") - var secret: String? = null + lateinit var secret: String @Valid @NotNull @get:JsonProperty("authenticationCachePolicy") - var authenticationCachePolicy: CacheBuilderSpec? = null + lateinit var authenticationCachePolicy: CacheBuilderSpec private set @Valid diff --git a/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/AnalyticsResourceTest.kt b/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/AnalyticsResourceTest.kt index 9b95a522c..f44d50879 100644 --- a/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/AnalyticsResourceTest.kt +++ b/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/AnalyticsResourceTest.kt @@ -37,14 +37,12 @@ class AnalyticsResourceTest { private val email = "mw@internet.org" @Before - @Throws(Exception::class) fun setUp() { `when`(AUTHENTICATOR.authenticate(ArgumentMatchers.anyString())) .thenReturn(Optional.of(AccessTokenPrincipal(email))) } @Test - @Throws(Exception::class) fun reportAnalytics() { val arg1 = argumentCaptor() val arg2 = argumentCaptor() @@ -52,17 +50,17 @@ class AnalyticsResourceTest { `when`(DAO.reportAnalytics(arg1.capture(), arg2.capture())).thenReturn(Either.right(Unit)) val events = """ - |[ - | { - | "eventType": "PURCHASES_A_PRODUCT", - | "sku": "1", - | "time": "1524734549" - | }, - | { - | "eventType": "EXITS_APPLICATION", - | "time": "1524742549" - | } - |]""".trimMargin() + [ + { + "eventType": "PURCHASES_A_PRODUCT", + "sku": "1", + "time": "1524734549" + }, + { + "eventType": "EXITS_APPLICATION", + "time": "1524742549" + } + ]""".trimIndent() assertThat(isValidJson(events)).isTrue() diff --git a/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/ApplicationTokenResourceTest.kt b/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/ApplicationTokenResourceTest.kt index e68cfe8e6..abe3ca9e6 100644 --- a/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/ApplicationTokenResourceTest.kt +++ b/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/ApplicationTokenResourceTest.kt @@ -20,7 +20,7 @@ import org.ostelco.prime.client.api.auth.AccessTokenPrincipal import org.ostelco.prime.client.api.auth.OAuthAuthenticator import org.ostelco.prime.client.api.store.SubscriberDAO import org.ostelco.prime.client.api.util.AccessToken -import org.ostelco.prime.core.ApiError +import org.ostelco.prime.apierror.ApiError import org.ostelco.prime.model.ApplicationToken import java.util.* import javax.ws.rs.client.Client @@ -46,14 +46,12 @@ class ApplicationTokenResourceTest { tokenType = tokenType) @Before - @Throws(Exception::class) fun setUp() { `when`(AUTHENTICATOR.authenticate(ArgumentMatchers.anyString())) .thenReturn(Optional.of(AccessTokenPrincipal(email))) } @Test - @Throws(Exception::class) fun storeApplicationToken() { val arg1 = argumentCaptor() val arg2 = argumentCaptor() @@ -69,11 +67,11 @@ class ApplicationTokenResourceTest { .request(MediaType.APPLICATION_JSON) .accept(MediaType.APPLICATION_JSON) .header("Authorization", "Bearer ${AccessToken.withEmail(email)}") - .post(Entity.json("{\n" + - " \"token\": \"" + token + "\",\n" + - " \"applicationID\": \"" + applicationID + "\",\n" + - " \"tokenType\": \"" + tokenType + "\"\n" + - "}\n")) + .post(Entity.json("""{ + "token": "$token", + "applicationID": "$applicationID", + "tokenType": "$tokenType" + }""".trimIndent())) assertThat(resp.status).isEqualTo(Response.Status.CREATED.statusCode) assertThat(resp.mediaType.toString()).isEqualTo(MediaType.APPLICATION_JSON) diff --git a/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/ConsentsResourceTest.kt b/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/ConsentsResourceTest.kt index 555206428..fa4a6eb02 100644 --- a/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/ConsentsResourceTest.kt +++ b/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/ConsentsResourceTest.kt @@ -19,7 +19,8 @@ import org.ostelco.prime.client.api.auth.OAuthAuthenticator import org.ostelco.prime.client.api.model.Consent import org.ostelco.prime.client.api.store.SubscriberDAO import org.ostelco.prime.client.api.util.AccessToken -import org.ostelco.prime.core.NotFoundError +import org.ostelco.prime.apierror.ApiErrorCode +import org.ostelco.prime.apierror.NotFoundError import java.util.* import javax.ws.rs.client.Entity import javax.ws.rs.core.GenericType @@ -39,14 +40,12 @@ class ConsentsResourceTest { Consent("2", "blabla", true)) @Before - @Throws(Exception::class) fun setUp() { `when`(AUTHENTICATOR.authenticate(ArgumentMatchers.anyString())) .thenReturn(Optional.of(AccessTokenPrincipal(email))) } @Test - @Throws(Exception::class) fun getConsents() { val arg = argumentCaptor() @@ -66,7 +65,6 @@ class ConsentsResourceTest { } @Test - @Throws(Exception::class) fun acceptConsent() { val arg1 = argumentCaptor() val arg2 = argumentCaptor() @@ -75,7 +73,7 @@ class ConsentsResourceTest { `when`(DAO.acceptConsent(arg1.capture(), arg2.capture())).thenReturn(Either.right(consents[0])) `when`(DAO.rejectConsent(arg1.capture(), arg2.capture())).thenReturn(Either.left( - NotFoundError("No consents found"))) + NotFoundError("No consents found", ApiErrorCode.FAILED_TO_FETCH_CONSENT))) val resp = RULE.target("/consents/$consentId") .queryParam("accepted", true) @@ -89,7 +87,6 @@ class ConsentsResourceTest { } @Test - @Throws(Exception::class) fun rejectConsent() { val arg1 = argumentCaptor() val arg2 = argumentCaptor() @@ -97,7 +94,7 @@ class ConsentsResourceTest { val consentId = consents[0].consentId `when`(DAO.acceptConsent(arg1.capture(), arg2.capture())).thenReturn(Either.left( - NotFoundError("No consents found"))) + NotFoundError("No consents found", ApiErrorCode.FAILED_TO_FETCH_CONSENT))) `when`(DAO.rejectConsent(arg1.capture(), arg2.capture())).thenReturn(Either.right(consents[0])) val resp = RULE.target("/consents/$consentId") diff --git a/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/ProductsResourceTest.kt b/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/ProductsResourceTest.kt index af9ca85a7..275e0f43f 100644 --- a/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/ProductsResourceTest.kt +++ b/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/ProductsResourceTest.kt @@ -21,7 +21,7 @@ import org.ostelco.prime.client.api.auth.AccessTokenPrincipal import org.ostelco.prime.client.api.auth.OAuthAuthenticator import org.ostelco.prime.client.api.store.SubscriberDAO import org.ostelco.prime.client.api.util.AccessToken -import org.ostelco.prime.core.ApiError +import org.ostelco.prime.apierror.ApiError import org.ostelco.prime.model.Price import org.ostelco.prime.model.Product import org.ostelco.prime.paymentprocessor.PaymentProcessor @@ -50,21 +50,19 @@ class ProductsResourceTest { Product("3", Price(20, "NOK"), emptyMap(), emptyMap())) private val userInfo = Base64.getEncoder().encodeToString( - """|{ - | "issuer": "someone", - | "email": "mw@internet.org" - |}""".trimMargin() + """{ + "issuer": "someone", + "email": "mw@internet.org" + }""".trimIndent() .toByteArray()) @Before - @Throws(Exception::class) fun setUp() { `when`(AUTHENTICATOR.authenticate(ArgumentMatchers.anyString())) .thenReturn(Optional.of(AccessTokenPrincipal(email))) } @Test - @Throws(Exception::class) fun getProducts() { val arg = argumentCaptor() @@ -87,7 +85,6 @@ class ProductsResourceTest { } @Test - @Throws(Exception::class) fun purchaseProduct() { val emailArg = argumentCaptor() val skuArg = argumentCaptor() diff --git a/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/ProfileResourceTest.kt b/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/ProfileResourceTest.kt index 105c62ff1..d9312ff69 100644 --- a/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/ProfileResourceTest.kt +++ b/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/ProfileResourceTest.kt @@ -41,14 +41,12 @@ class ProfileResourceTest { private val profile = Subscriber(email) @Before - @Throws(Exception::class) fun setUp() { `when`(AUTHENTICATOR.authenticate(ArgumentMatchers.anyString())) .thenReturn(Optional.of(AccessTokenPrincipal(email))) } @Test - @Throws(Exception::class) fun getProfile() { val arg = argumentCaptor() @@ -67,7 +65,6 @@ class ProfileResourceTest { } @Test - @Throws(Exception::class) fun createProfile() { val arg1 = argumentCaptor() val arg2 = argumentCaptor() @@ -80,13 +77,13 @@ class ProfileResourceTest { val resp = RULE.target("/profile") .request(MediaType.APPLICATION_JSON) .header("Authorization", "Bearer ${AccessToken.withEmail(email)}") - .post(Entity.json("{\n" + - " \"name\": \"" + name + "\",\n" + - " \"address\": \"" + address + "\",\n" + - " \"postCode\": \"" + postCode + "\",\n" + - " \"city\": \"" + city + "\",\n" + - " \"email\": \"" + email + "\"\n" + - "}\n")) + .post(Entity.json("""{ + "name": "$name", + "address": "$address", + "postCode": "$postCode", + "city": "$city", + "email": "$email" + }""".trimIndent())) assertThat(resp.status).isEqualTo(Response.Status.CREATED.statusCode) assertThat(resp.mediaType.toString()).isEqualTo(MediaType.APPLICATION_JSON) @@ -100,7 +97,6 @@ class ProfileResourceTest { } @Test - @Throws(Exception::class) fun createProfileWithReferral() { val arg1 = argumentCaptor() val arg2 = argumentCaptor() @@ -115,13 +111,13 @@ class ProfileResourceTest { .queryParam("referred_by", referredBy) .request(MediaType.APPLICATION_JSON) .header("Authorization", "Bearer ${AccessToken.withEmail(email)}") - .post(Entity.json("{\n" + - " \"name\": \"" + name + "\",\n" + - " \"address\": \"" + address + "\",\n" + - " \"postCode\": \"" + postCode + "\",\n" + - " \"city\": \"" + city + "\",\n" + - " \"email\": \"" + email + "\"\n" + - "}\n")) + .post(Entity.json("""{ + "name": "$name", + "address": "$address", + "postCode": "$postCode", + "city": "$city", + "email": "$email" + }""".trimIndent())) assertThat(resp.status).isEqualTo(Response.Status.CREATED.statusCode) assertThat(resp.mediaType.toString()).isEqualTo(MediaType.APPLICATION_JSON) @@ -135,7 +131,6 @@ class ProfileResourceTest { } @Test - @Throws(Exception::class) fun updateProfile() { val arg1 = argumentCaptor() val arg2 = argumentCaptor() @@ -149,13 +144,13 @@ class ProfileResourceTest { val resp = RULE.target("/profile") .request(MediaType.APPLICATION_JSON) .header("Authorization", "Bearer ${AccessToken.withEmail(email)}") - .put(Entity.json("{\n" + - " \"name\": \"" + name + "\",\n" + - " \"address\": \"" + newAddress + "\",\n" + - " \"postCode\": \"" + newPostCode + "\",\n" + - " \"city\": \"" + city + "\",\n" + - " \"email\": \"" + email + "\"\n" + - "}\n")) + .put(Entity.json("""{ + "name": "$name", + "address": "$newAddress", + "postCode": "$newPostCode", + "city": "$city", + "email": "$email" + }""".trimIndent())) assertThat(resp.status).isEqualTo(Response.Status.OK.statusCode) assertThat(resp.mediaType.toString()).isEqualTo(MediaType.APPLICATION_JSON) @@ -168,14 +163,11 @@ class ProfileResourceTest { } @Test - @Throws(Exception::class) fun updateWithIncompleteProfile() { val resp = RULE.target("/profile") .request(MediaType.APPLICATION_JSON) .header("Authorization", "Bearer ${AccessToken.withEmail(email)}") - .put(Entity.json("{\n" + - " \"name\": \"" + name + "\"\n" + - "}\n")) + .put(Entity.json("""{ "name": "$name" }""")) assertThat(resp.status).isEqualTo(Response.Status.BAD_REQUEST.statusCode) } diff --git a/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/PurchasesResourceTest.kt b/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/PurchasesResourceTest.kt index f07fdc6de..d16050fe9 100644 --- a/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/PurchasesResourceTest.kt +++ b/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/PurchasesResourceTest.kt @@ -19,7 +19,7 @@ import org.ostelco.prime.client.api.auth.AccessTokenPrincipal import org.ostelco.prime.client.api.auth.OAuthAuthenticator import org.ostelco.prime.client.api.store.SubscriberDAO import org.ostelco.prime.client.api.util.AccessToken -import org.ostelco.prime.core.ApiError +import org.ostelco.prime.apierror.ApiError import org.ostelco.prime.model.Price import org.ostelco.prime.model.Product import org.ostelco.prime.model.PurchaseRecord @@ -35,14 +35,13 @@ class PurchasesResourceTest { private val email = "mw@internet.org" private val userInfo = Base64.getEncoder().encodeToString( - """|{ - | "issuer": "someone", - | "email": "mw@internet.org" - |}""".trimMargin() + """{ + "issuer": "someone", + "email": "mw@internet.org" + }""".trimIndent() .toByteArray()) @Before - @Throws(Exception::class) fun setUp() { Mockito.`when`(AUTHENTICATOR.authenticate(ArgumentMatchers.anyString())) .thenReturn(Optional.of(AccessTokenPrincipal(email))) diff --git a/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/SubscriptionsResourceTest.kt b/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/SubscriptionsResourceTest.kt index 1ba221fcb..2abde50a8 100644 --- a/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/SubscriptionsResourceTest.kt +++ b/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/SubscriptionsResourceTest.kt @@ -20,7 +20,7 @@ import org.ostelco.prime.client.api.auth.AccessTokenPrincipal import org.ostelco.prime.client.api.auth.OAuthAuthenticator import org.ostelco.prime.client.api.store.SubscriberDAO import org.ostelco.prime.client.api.util.AccessToken -import org.ostelco.prime.core.ApiError +import org.ostelco.prime.apierror.ApiError import org.ostelco.prime.model.Subscription import java.util.* import javax.ws.rs.client.Invocation @@ -38,14 +38,12 @@ class SubscriptionsResourceTest { private val subscription = Subscription(MSISDN) @Before - @Throws(Exception::class) fun setUp() { `when`(AUTHENTICATOR.authenticate(ArgumentMatchers.anyString())) .thenReturn(Optional.of(AccessTokenPrincipal(email))) } @Test - @Throws(Exception::class) fun getSubscriptions() { val arg = argumentCaptor() diff --git a/dataflow-pipelines/Dockerfile b/dataflow-pipelines/Dockerfile index 6c8a80698..4827fd238 100644 --- a/dataflow-pipelines/Dockerfile +++ b/dataflow-pipelines/Dockerfile @@ -1,4 +1,4 @@ -FROM openjdk:8u171 +FROM azul/zulu-openjdk:8u181-8.31.0.1 MAINTAINER CSI "csi@telenordigital.com" diff --git a/dataflow-pipelines/README.md b/dataflow-pipelines/README.md index 87aa7b4e8..06b484ac9 100644 --- a/dataflow-pipelines/README.md +++ b/dataflow-pipelines/README.md @@ -1,4 +1,4 @@ -# Analytics +# Module Dataflow pipelines ## Setup diff --git a/dataflow-pipelines/build.gradle b/dataflow-pipelines/build.gradle index 59461ef65..5bb20659a 100644 --- a/dataflow-pipelines/build.gradle +++ b/dataflow-pipelines/build.gradle @@ -1,5 +1,5 @@ plugins { - id "org.jetbrains.kotlin.jvm" version "1.2.61" + id "org.jetbrains.kotlin.jvm" version "1.2.70" id "application" id "com.github.johnrengelman.shadow" version "2.0.4" id "idea" diff --git a/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/definitions/DataConsumptionPipelineDefinition.kt b/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/definitions/DataConsumptionPipelineDefinition.kt index 0eaa0cc39..2963bf8af 100644 --- a/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/definitions/DataConsumptionPipelineDefinition.kt +++ b/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/definitions/DataConsumptionPipelineDefinition.kt @@ -86,6 +86,8 @@ val consumptionPerMsisdn = object : PTransform, PCo .setMsisdn(it.msisdn) .setTimestamp(Timestamps.fromSeconds(hoursSinceEpoch * 3600)) .setDataBytes(0) + .setApn(it.apn) + .setMccMnc(it.mccMnc) .build(), it.bucketBytes) } @@ -97,6 +99,8 @@ val consumptionPerMsisdn = object : PTransform, PCo .setMsisdn(it.key?.msisdn) .setTimestamp(it.key?.timestamp) .setDataBytes(it.value) + .setApn(it.key?.apn) + .setMccMnc(it.key?.mccMnc) .build() } diff --git a/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/io/BigQuery.kt b/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/io/BigQuery.kt index 6316583e7..79e629e27 100644 --- a/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/io/BigQuery.kt +++ b/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/io/BigQuery.kt @@ -53,6 +53,8 @@ private object TableSchemas { fields.add(TableFieldSchema().setName("bucketBytes").setType("INTEGER")) fields.add(TableFieldSchema().setName("bundleBytes").setType("INTEGER")) fields.add(TableFieldSchema().setName("timestamp").setType("TIMESTAMP")) + fields.add(TableFieldSchema().setName("apn").setType("STRING")) + fields.add(TableFieldSchema().setName("mccMnc").setType("STRING")) TableSchema().setFields(fields) } HOURLY_CONSUMPTION, DAILY_CONSUMPTION -> { @@ -60,6 +62,8 @@ private object TableSchemas { fields.add(TableFieldSchema().setName("msisdn").setType("STRING")) fields.add(TableFieldSchema().setName("bytes").setType("INTEGER")) fields.add(TableFieldSchema().setName("timestamp").setType("TIMESTAMP")) + fields.add(TableFieldSchema().setName("apn").setType("STRING")) + fields.add(TableFieldSchema().setName("mccMnc").setType("STRING")) TableSchema().setFields(fields) } } @@ -75,6 +79,8 @@ val convertToRawTableRows = ParDoFn.transform { .set("bucketBytes", it.bucketBytes) .set("bundleBytes", it.bundleBytes) .set("timestamp", protobufTimestampToZonedDateTime(it.timestamp)) + .set("apn", it.apn) + .set("mccMnc", it.mccMnc) } val convertToHourlyTableRows = ParDoFn.transform { @@ -82,6 +88,8 @@ val convertToHourlyTableRows = ParDoFn.transform = TestStream.create(ProtoCoder.of(DataTrafficInfo::class.java)) .addElements( - DataTrafficInfo.newBuilder() - .setMsisdn("123") - .setBucketBytes(100) - .setBundleBytes(900) - .setTimestamp(Timestamps.fromMillis(Instant.now().millis)) - .build(), - DataTrafficInfo.newBuilder() - .setMsisdn("123") - .setBucketBytes(100) - .setBundleBytes(800) - .setTimestamp(Timestamps.fromMillis(Instant.now().millis)) - .build(), - DataTrafficInfo.newBuilder() - .setMsisdn("123") - .setBucketBytes(100) - .setBundleBytes(700) - .setTimestamp(Timestamps.fromMillis(Instant.now().millis)) - .build(), - DataTrafficInfo.newBuilder() - .setMsisdn("456") - .setBucketBytes(100) - .setBundleBytes(900) - .setTimestamp(Timestamps.fromMillis(Instant.now().millis)) - .build(), - DataTrafficInfo.newBuilder() - .setMsisdn("456") - .setBucketBytes(100) - .setBundleBytes(800) - .setTimestamp(Timestamps.fromMillis(Instant.now().millis)) - .build(), - DataTrafficInfo.newBuilder() - .setMsisdn("789") - .setBucketBytes(100) - .setBundleBytes(900) - .setTimestamp(Timestamps.fromMillis(Instant.now().millis)) - .build()) + createDataTrafficInfo(msisdn = "123", bucketBytes = 100, bundleBytes = 900, apn = "ostelco", mccMnc = "242_02"), + createDataTrafficInfo(msisdn = "123", bucketBytes = 100, bundleBytes = 800, apn = "ostelco", mccMnc = "242_02"), + createDataTrafficInfo(msisdn = "123", bucketBytes = 100, bundleBytes = 700, apn = "ostelco", mccMnc = "242_02"), + createDataTrafficInfo(msisdn = "123", bucketBytes = 100, bundleBytes = 600, apn = "pi", mccMnc = "242_02"), + createDataTrafficInfo(msisdn = "456", bucketBytes = 100, bundleBytes = 900, apn = "ostelco", mccMnc = "242_02"), + createDataTrafficInfo(msisdn = "456", bucketBytes = 100, bundleBytes = 800, apn = "ostelco", mccMnc = "242_02"), + createDataTrafficInfo(msisdn = "789", bucketBytes = 100, bundleBytes = 900, apn = "ostelco", mccMnc = "242_02")) .advanceWatermarkToInfinity() @@ -78,13 +49,45 @@ class ConsumptionPerMsisdnTest { .setCoder(ProtoCoder.of(AggregatedDataTrafficInfo::class.java)) PAssert.that(out).containsInAnyOrder( - AggregatedDataTrafficInfo.newBuilder().setMsisdn("123").setDataBytes(300).setTimestamp(currentHourDateTime).build(), - AggregatedDataTrafficInfo.newBuilder().setMsisdn("456").setDataBytes(200).setTimestamp(currentHourDateTime).build(), - AggregatedDataTrafficInfo.newBuilder().setMsisdn("789").setDataBytes(100).setTimestamp(currentHourDateTime).build()) + createAggregatedDataTrafficInfo(msisdn = "123", dataBytes = 300, timestamp = currentHourDateTime, apn = "ostelco", mccMnc = "242_02"), + createAggregatedDataTrafficInfo(msisdn = "123", dataBytes = 100, timestamp = currentHourDateTime, apn = "pi", mccMnc = "242_02"), + createAggregatedDataTrafficInfo(msisdn = "456", dataBytes = 200, timestamp = currentHourDateTime, apn = "ostelco", mccMnc = "242_02"), + createAggregatedDataTrafficInfo(msisdn = "789", dataBytes = 100, timestamp = currentHourDateTime, apn = "ostelco", mccMnc = "242_02")) pipeline.run().waitUntilFinish() } } private fun getCurrentHourDateTime(): Timestamp = Timestamps.fromSeconds((java.time.Instant.now().epochSecond / 3600) * 3600) + + private fun createDataTrafficInfo( + msisdn: String, + bucketBytes: Long, + bundleBytes: Long, + apn: String, + mccMnc: String): DataTrafficInfo = + + DataTrafficInfo.newBuilder() + .setMsisdn(msisdn) + .setBucketBytes(bucketBytes) + .setBundleBytes(bundleBytes) + .setTimestamp(Timestamps.fromMillis(Instant.now().millis)) + .setApn(apn) + .setMccMnc(mccMnc) + .build() + + private fun createAggregatedDataTrafficInfo( + msisdn: String, + dataBytes: Long, + timestamp: Timestamp, + apn: String, + mccMnc: String): AggregatedDataTrafficInfo = + + AggregatedDataTrafficInfo.newBuilder() + .setMsisdn(msisdn) + .setDataBytes(dataBytes) + .setTimestamp(timestamp) + .setApn(apn) + .setMccMnc(mccMnc) + .build() } \ No newline at end of file diff --git a/diameter-stack/README.md b/diameter-stack/README.md index 64f1c82b1..7944e0681 100644 --- a/diameter-stack/README.md +++ b/diameter-stack/README.md @@ -1 +1 @@ -Diameter library \ No newline at end of file +# Module Diameter Stack \ No newline at end of file diff --git a/diameter-stack/build.gradle b/diameter-stack/build.gradle index 4651ff9b9..9060d9b76 100644 --- a/diameter-stack/build.gradle +++ b/diameter-stack/build.gradle @@ -1,5 +1,5 @@ plugins { - id "org.jetbrains.kotlin.jvm" version "1.2.61" + id "org.jetbrains.kotlin.jvm" version "1.2.70" id "java-library" id "signing" id "maven" diff --git a/diameter-stack/src/main/kotlin/org/ostelco/diameter/builder/Builder.kt b/diameter-stack/src/main/kotlin/org/ostelco/diameter/builder/Builder.kt index eb0ce98c5..330e09610 100644 --- a/diameter-stack/src/main/kotlin/org/ostelco/diameter/builder/Builder.kt +++ b/diameter-stack/src/main/kotlin/org/ostelco/diameter/builder/Builder.kt @@ -6,7 +6,7 @@ import java.net.InetAddress import java.util.* /** - * DSL style helper class to populate values into {@link org.jdiameter.api.AvpSet} + * DSL style helper class to populate values into [org.jdiameter.api.AvpSet] */ fun set(avpSet: AvpSet, init: AvpSetContext.() -> Unit) { val avpSetContext = AvpSetContext(avpSet) diff --git a/diameter-stack/src/main/kotlin/org/ostelco/diameter/model/CreditControlRequest.kt b/diameter-stack/src/main/kotlin/org/ostelco/diameter/model/CreditControlRequest.kt index 5e5f4ba16..cb2f1cc5e 100644 --- a/diameter-stack/src/main/kotlin/org/ostelco/diameter/model/CreditControlRequest.kt +++ b/diameter-stack/src/main/kotlin/org/ostelco/diameter/model/CreditControlRequest.kt @@ -7,7 +7,7 @@ import org.ostelco.diameter.model.SubscriptionType.END_USER_IMSI import org.ostelco.diameter.parser.AvpField import org.ostelco.diameter.parser.AvpList -class CreditControlRequest() { +class CreditControlRequest { @AvpList(Avp.MULTIPLE_SERVICES_CREDIT_CONTROL, MultipleServiceCreditControl::class) var multipleServiceCreditControls: List = emptyList() diff --git a/diameter-stack/src/main/kotlin/org/ostelco/diameter/model/Model.kt b/diameter-stack/src/main/kotlin/org/ostelco/diameter/model/Model.kt index e223e8695..5469f6185 100644 --- a/diameter-stack/src/main/kotlin/org/ostelco/diameter/model/Model.kt +++ b/diameter-stack/src/main/kotlin/org/ostelco/diameter/model/Model.kt @@ -159,7 +159,7 @@ data class RedirectServer( * Service-Information AVP ( 873 ) * http://www.3gpp.org/ftp/Specs/html-info/32299.htm */ -class ServiceInformation() { +class ServiceInformation { @AvpList(Avp.PS_INFORMATION, PsInformation::class) var psInformation: List = emptyList() @@ -179,7 +179,7 @@ enum class SubscriptionType { /** * https://tools.ietf.org/html/rfc4006#section-8.46 */ -class SubscriptionId() { +class SubscriptionId { @AvpField(Avp.SUBSCRIPTION_ID_TYPE) var idType: SubscriptionType? = null @@ -191,7 +191,7 @@ class SubscriptionId() { /** * https://tools.ietf.org/html/rfc4006#page-78 */ -class UserEquipmentInfo() { +class UserEquipmentInfo { @AvpField(Avp.USER_EQUIPMENT_INFO_TYPE) var userEquipmentInfoType: UserEquipmentInfoType? = null diff --git a/diameter-stack/src/main/kotlin/org/ostelco/diameter/model/PsInformation.kt b/diameter-stack/src/main/kotlin/org/ostelco/diameter/model/PsInformation.kt index 065a0d4d3..eef25045b 100644 --- a/diameter-stack/src/main/kotlin/org/ostelco/diameter/model/PsInformation.kt +++ b/diameter-stack/src/main/kotlin/org/ostelco/diameter/model/PsInformation.kt @@ -10,7 +10,7 @@ import java.net.InetAddress * * TODO martin: Add 3GPP-GPRS-Negotiated-QoS-Profile ( Avp 5 ) */ -class PsInformation() { +class PsInformation { // 3GPP-Charging-Id (Avp 2) @AvpField(Avp.TGPP_CHARGING_ID) @@ -58,7 +58,7 @@ class PsInformation() { // 3GPP-SGSN-MCC-MNC ( Avp 18) @AvpField(Avp.GPP_SGSN_MCC_MNC) - var sgsnMncMcc: String? = null + var sgsnMccMnc: String? = null // 3GPP-MS-TimeZone ( Avp 23 ) @AvpField(Avp.TGPP_MS_TIMEZONE) diff --git a/diameter-stack/src/test/kotlin/org/ostelco/diameter/parser/AvpParserTest.kt b/diameter-stack/src/test/kotlin/org/ostelco/diameter/parser/AvpParserTest.kt index 8d5b3a05b..acfe544ed 100644 --- a/diameter-stack/src/test/kotlin/org/ostelco/diameter/parser/AvpParserTest.kt +++ b/diameter-stack/src/test/kotlin/org/ostelco/diameter/parser/AvpParserTest.kt @@ -16,24 +16,24 @@ class AvpParserTest { @Test fun parsePsInformation() { val calledStationIdAvp = Mockito.mock(Avp::class.java) - val sgsnMncMccAvp = Mockito.mock(Avp::class.java) + val sgsnMccMncAvp = Mockito.mock(Avp::class.java) `when`(calledStationIdAvp.utF8String).thenReturn("panacea") `when`(calledStationIdAvp.code).thenReturn(30) `when`(calledStationIdAvp.vendorId).thenReturn(0) - `when`(sgsnMncMccAvp.utF8String).thenReturn("24201") - `when`(sgsnMncMccAvp.code).thenReturn(Avp.GPP_SGSN_MCC_MNC) - `when`(sgsnMncMccAvp.vendorId).thenReturn(VENDOR_ID_3GPP) + `when`(sgsnMccMncAvp.utF8String).thenReturn("24201") + `when`(sgsnMccMncAvp.code).thenReturn(Avp.GPP_SGSN_MCC_MNC) + `when`(sgsnMccMncAvp.vendorId).thenReturn(VENDOR_ID_3GPP) val set = Mockito.mock(AvpSet::class.java) `when`(set.getAvp(30)).thenReturn(calledStationIdAvp) - `when`(set.getAvp(Avp.GPP_SGSN_MCC_MNC)).thenReturn(sgsnMncMccAvp) + `when`(set.getAvp(Avp.GPP_SGSN_MCC_MNC)).thenReturn(sgsnMccMncAvp) val psInformation = AvpParser().parse(PsInformation::class, set) assertEquals("panacea", psInformation.calledStationId) - assertEquals("24201", psInformation.sgsnMncMcc) + assertEquals("24201", psInformation.sgsnMccMnc) } @Test diff --git a/diameter-test/README.md b/diameter-test/README.md index 6a9e8674a..0ed91fbfc 100644 --- a/diameter-test/README.md +++ b/diameter-test/README.md @@ -1 +1,2 @@ -DIAMETER test library. This provides a TestClient that can be used generate DIAMETER traffic in unit tests. \ No newline at end of file +# Module DIAMETER test library. +This provides a TestClient that can be used generate DIAMETER traffic in unit tests. \ No newline at end of file diff --git a/diameter-test/build.gradle b/diameter-test/build.gradle index 01daf992a..1e33e6c78 100644 --- a/diameter-test/build.gradle +++ b/diameter-test/build.gradle @@ -1,5 +1,5 @@ plugins { - id "org.jetbrains.kotlin.jvm" version "1.2.61" + id "org.jetbrains.kotlin.jvm" version "1.2.70" id "java-library" id "signing" id "maven" diff --git a/diameter-test/src/main/kotlin/org/ostelco/diameter/test/TestHelper.kt b/diameter-test/src/main/kotlin/org/ostelco/diameter/test/TestHelper.kt index 8b0bb8dde..07d015f4e 100644 --- a/diameter-test/src/main/kotlin/org/ostelco/diameter/test/TestHelper.kt +++ b/diameter-test/src/main/kotlin/org/ostelco/diameter/test/TestHelper.kt @@ -107,14 +107,14 @@ object TestHelper { } } - private fun addServiceInformation(ccrAvps: AvpSet, apn: String, sgsnMncMcc: String) { + private fun addServiceInformation(ccrAvps: AvpSet, apn: String, sgsnMccMnc: String) { set(ccrAvps) { group(Avp.SERVICE_INFORMATION, vendorId = VENDOR_ID_3GPP) { group(PS_INFORMATION, vendorId = VENDOR_ID_3GPP) { avp(CALLED_STATION_ID, apn, pFlag = true) - avp(Avp.GPP_SGSN_MCC_MNC, sgsnMncMcc, vendorId = VENDOR_ID_3GPP, asOctetString = true) + avp(Avp.GPP_SGSN_MCC_MNC, sgsnMccMnc, vendorId = VENDOR_ID_3GPP, asOctetString = true) } } } @@ -127,7 +127,7 @@ object TestHelper { buildBasicRequest(ccrAvps, RequestType.INITIAL_REQUEST, requestNumber = 0) addUser(ccrAvps, msisdn = msisdn, imsi = IMSI) addBucketRequest(ccrAvps, ratingGroup = 10, serviceIdentifier = 1, bucketSize = bucketSize) - addServiceInformation(ccrAvps, apn = APN, sgsnMncMcc = SGSN_MCC_MNC) + addServiceInformation(ccrAvps, apn = APN, sgsnMccMnc = SGSN_MCC_MNC) } @JvmStatic @@ -135,7 +135,7 @@ object TestHelper { buildBasicRequest(ccrAvps, RequestType.UPDATE_REQUEST, requestNumber = 1) addUser(ccrAvps, msisdn = msisdn, imsi = IMSI) addBucketRequest(ccrAvps, ratingGroup = 10, serviceIdentifier = 1, bucketSize = bucketSize, usedBucketSize = usedBucketSize) - addServiceInformation(ccrAvps, apn = APN, sgsnMncMcc = SGSN_MCC_MNC) + addServiceInformation(ccrAvps, apn = APN, sgsnMccMnc = SGSN_MCC_MNC) } @JvmStatic @@ -143,7 +143,7 @@ object TestHelper { buildBasicRequest(ccrAvps, RequestType.UPDATE_REQUEST, requestNumber = 1) addUser(ccrAvps, msisdn = msisdn, imsi = IMSI) addFinalBucketRequest(ccrAvps, ratingGroup = 10, serviceIdentifier = 1) - addServiceInformation(ccrAvps, apn = APN, sgsnMncMcc = SGSN_MCC_MNC) + addServiceInformation(ccrAvps, apn = APN, sgsnMccMnc = SGSN_MCC_MNC) } @JvmStatic @@ -151,13 +151,13 @@ object TestHelper { buildBasicRequest(ccrAvps, RequestType.TERMINATION_REQUEST, requestNumber = 2) addUser(ccrAvps, msisdn = msisdn, imsi = IMSI) addTerminateRequest(ccrAvps, ratingGroup = 10, serviceIdentifier = 1, bucketSize = bucketSize) - addServiceInformation(ccrAvps, apn = APN, sgsnMncMcc = SGSN_MCC_MNC) + addServiceInformation(ccrAvps, apn = APN, sgsnMccMnc = SGSN_MCC_MNC) } @JvmStatic fun createTerminateRequest(ccrAvps: AvpSet, msisdn: String) { buildBasicRequest(ccrAvps, RequestType.TERMINATION_REQUEST, requestNumber = 2) addUser(ccrAvps, msisdn = msisdn, imsi = IMSI) - addServiceInformation(ccrAvps, apn = APN, sgsnMncMcc = SGSN_MCC_MNC) + addServiceInformation(ccrAvps, apn = APN, sgsnMccMnc = SGSN_MCC_MNC) } } \ No newline at end of file diff --git a/docs/DEPLOY.md b/docs/DEPLOY.md index c1a791aa2..dd72d41ea 100644 --- a/docs/DEPLOY.md +++ b/docs/DEPLOY.md @@ -77,11 +77,11 @@ If cluster already exists, fetch authentication credentials for the Kubernetes c Build the Docker image (In the folder with Dockerfile) - docker build -t gcr.io/${PROJECT_ID}/: . + docker build -t eu.gcr.io/${PROJECT_ID}/: . Push to the registry - gcloud docker -- push gcr.io/${PROJECT_ID}/: + docker push eu.gcr.io/${PROJECT_ID}/: Apply the deployment diff --git a/docs/EXPERIMENTAL_AGENTS_LIFECYCLE.md b/docs/EXPERIMENTAL_AGENTS_LIFECYCLE.md new file mode 100644 index 000000000..551a384fa --- /dev/null +++ b/docs/EXPERIMENTAL_AGENTS_LIFECYCLE.md @@ -0,0 +1,278 @@ +Experimental agents +=== + +Agents and the context they work in +---- +This directory contains a set of experiments, that should eventually +lead to a framework, a very simple one, for making analytic agents. + +Analytic agents are in this context defined as processes, either +fully automatic or partly manual, that takes data about user +behavoir and demographics, and translates this into offers +that can be presented to subscribers to be acted on. + + demographics+behavior -> agent -> offer to subscriber. + +It is important to note that even though it does make sense +for agents to run only once, it makes even more sense if they +run more than once, meaning that future runs of agents will +be observing the results of previous actions. This makes +puts the agents in a position to be learning agents. + +Interaction with analytics systems +---- +There are already many analytics systems in this world, google +analytics, firebase analytics, kissmetrics, ... . Our analytics agent +subsystem should emphaticly _not_ replicate much or anything +of what these existing systems do. Reports, ad-hoc queries, +dashboards, setting up funnels, tracking campaigns etc. They all do +this much better than we can hope for, so we should use them, not +copy them. What we _do_ want is to be able to work in concert with +them. We want to be able to pick up events that are generated on the +basis of offers generated by our analytics agents, and track them +using external analytics tools, and to use the same terms as the +tools we are working in concert with. This means that we need +to solve a coordination problem with respect to whatever analytics +system we choose to use. Our current working hypothesis is that we +will be using Firebase Analytics, + + +Current overall design +--- + + +### Consumption cycle + +The current overall design is what we think today should be the +architecture to aim for. This is a moving target, tomorrow +it could be something else. With that caveat in mind, here goes: + +We set up a processing cycle with these steps and formats. + +* Continously collect data using whatever means necessary, and dump + them into bigquery. Data is stored pseudoanonymized. The + bigquery dumps should contain at least: Data consumption data and + behavior data fra analytics. + +Periodically, every hour or so, run a kubernetes cron job that does this: + +* Run predefined queries from the data stored in bigquery, + extract tables for demoraphics, etc. that makes sense + for the analytics agents to work with. Not too much, + not too little, subject to change and the models must + allow for this (e.g. require that columns must be + allowed to appear without consumers breaking). + Put the results into temporary bigquery tables. + +* Translate from pseudoanonymized data into consistent + pseudoanonymized datasets. + +* Dump the translated & consistent dataset into one or more cloud + storage buckets, for consumption by agents. + +Agents can then either + +* Read the data directly from the bucket programmatically +* Read the data form the bucket via the web inteface, and then + process it from local file storage. +* Use FUSE filesystem to read data from bucket, but present it + as a local file system. + + * Agents will then take the input, process it, produce + instructions for offers to be made. Do this in the form of + yaml files (or perhaps a single yaml file, see sample + format in this directory). The output is written + back into a cloud storage bucket. + +The output from the agent is then picked up by a job +listening for input into buckets (either by running +periodically, or by active listening for changes). +The offers are then translated into internal format +offers, written into the appropriate databases, +possibly signalled through messaging services and + immediately picked up by the subscribers. + +This completes the cycle. + +### Components + +A kubernetes program running either continously, or as a kubernets +batch job that exports and imports datasets via google cloud storage. +Written in Kotlin, running as a dropwizard job. + +The component should use a pubsub channel to listen to changes in the +actual changes in the bucket. The reason for favoring pubsub, is that +it is possible to run tests of the component on a workstation making +it unecessary to deploy the comonent to a production-like environment +to run it in an environment that is _almost_ production like but +runs on a workstation. + +### Example yaml file describing offer (work in progress) + +This version is based on the data we used in a previous version of the +"prime" component's internal schema, to which is added some fields +that are assumed to make sense when describing offers. This proposal +has not been made as minimal as possible, and it has not been +critisized to death (a.k.a. "code reviewed" :) ) to ensure that it is +the best possible format we can make today. That should happen before +we commit to using it, or write even a single line o code to parse or +produce it! + + + # + # This is a sample YAML format to be used by + # agents that produce offers. The general idea + # is that an offer has a set of parameters, + # and also a set of selected subscribers that will + # get it. + # + # YAML was chosen since it's more human readable than + # e.g. json or protobuffers, while still being + # easy to produce by an agent, and relatively compact, + # in particular when gzipped. + # + + producing-agent: + name: Simple agent + version: 1.0 + + # All of the parameters below are just copied from the firebasr + # realtime database we used in the demo, converted to + # camel case. All the fields should be documented + # in this document, and we should think through if this is + # the best set of parameters we weant. + + offer: + history: + createdAt: "2018-02-22T12:41:49.871Z" + updatedAt: "2018-02-22T12:41:49.871Z" + visibleFrom: "2018-02-22T12:41:49.871Z" + expiresOn: "2018-02-22T12:41:49.871Z" + presentation: + badgeLabel: "mbop" + description: "Best offer you will get today" + shortDescription: "Best offer!" + label: "3 GB" + name: "3 GB" + priceLabel: "49 NOK" + hidden: false + image: https://www.ft-associates.com/wp-content/uploads/2015/08/Best-Offer.jpg + financial: + repurchability:1 + currencyLabel: "NOK" + price: 4900 + taxRate: 10 + product: + SKU: 2 + # A possibly very long list of product parameters that are all + # dependent on the SKU's requirement. Details ignored here, + # that may pop up later. Deal with them then. + noOfBytes: 3000000000 + + # We put the segment last, since it may have a long list of + # members in it. We want that list to be last, since it contains + # little information that humans are interested in, and we want + # humans to start reading the file at the top. + + segment: + type: agent-specific-segment + members: + # The decryption key is what the de-anonymizer will use to + # make proper identifiers out of the members listed below. + # The special purpose key "none" indicatest that the member list + # is in clear text. + decryptionKey: none + members: + - 4790300157 + - 4790300144 + - 4333333333 + + + + +Notes +--- + +### Some questions about the bigquery structure: + +* We should use timestamps I think, since it's simpler to work with. + +* We should also considere using ingestion-time partitioned tables + + https://cloud.google.com/bigquery/docs/creating-partitioned-tables + +* Most of our time-dependent queries will overlap reasonably + sized shards (days, weeks, months), so that we don't have to + table-scan _all_ of the potentially very large datasets we will + accumulate. + +* Don't name any field 'timestamp'. It leads to endless confusion + with the datatype named 'timestamp', and the function called + 'timestamp'. + +### Formatting guidelines + +* We should decide if we will use camel case or snake case in + offer descriptions. The current firebase/firestore structure uses + both, and that is confusing. + +### Accessing data via FUSE + +* To get a FUSE mounted cloud storage on a mac, do this: + https://cloud.google.com/storage/docs/gcs-fuse + +* Download from + https://github.com/GoogleCloudPlatform/gcsfuse/ + https://github.com/GoogleCloudPlatform/gcsfuse/blob/master/docs/installing.md + +* Install this way + + brew install gcsfuse + sudo ln -s /usr/local/sbin/mount_gcsfuse /sbin # For mount(8) support + +* Create certficate to make fuse authenticate correctly +Make a certificate https://console.cloud.google.com/apis/credentials +To authenticate + + export GOOGLE_APPLICATION_CREDENTIALS=/some/path/pantel-credentials.json + +then + + gcsfuse rmz-test-bucket /tmp/aaas + + + +## Installing google cloud comman dline interface + +https://cloud.google.com/sdk/docs/ + +To authenticvate Authenticate + + gcloud auth login + +(will use the browser to do actual authentication) + + +Set your project: + + gcloud config set project pantel + +.. or some other project + +Run a script to get some data, e.g. + + + bq head -n 10 pantel-2decb:data_consumption.hourly_consumption + +to get ten lines of consumption data displayed. + + +TODO +=== + +* Disuss with Cecilie etc. what's really necessary. +* Write kotlin/kubernetes program that will extract data from + misc. sources and present for agent consumption/read data from + agents. +* Declare initial victory, but be prepared for massive rewrites when + actual experience from using these tools become available diff --git a/docs/TEST.md b/docs/TEST.md index b24b9098f..aa448c9e0 100644 --- a/docs/TEST.md +++ b/docs/TEST.md @@ -11,23 +11,27 @@ grep -i pantel $(find . -name '.gitignore') | awk -F: '{print $1}' | sort | uniq | sed 's/.gitignore//g' ``` - * Create test subscriber with default balance by importing `docs/pantel-2decb_test.json` at `/test` path in Firebase. - - * Create self-signed certificate for nginx with domain as `ocs.ostelco.org` and place them at following location: - * In `esp`, keep `nginx.key` and `nginx.cert`. - * In `ocsgw/config`, keep `nginx.cert`. + * Create self-signed certificate for nginx with domain as `ocs.dev.ostelco.org` and place them at following location: + * In `certs/ocs.dev.ostelco.org`, keep `nginx.key` and `nginx.cert`. + * In `ocsgw/config`, keep `ocs.cert`. ```bash cd certs/ocs.dev.ostelco.org openssl req -x509 -nodes -days 365 -newkey rsa:2048 -keyout ./nginx.key -out ./nginx.crt -subj '/CN=ocs.dev.ostelco.org' cp nginx.crt ../../ocsgw/config/ocs.crt ``` + * Create self-signed certificate for nginx with domain as `metrics.dev.ostelco.org` and place them at following location: + * In `certs/metrics.dev.ostelco.org`, keep `nginx.key` and `nginx.cert`. + * In `ocsgw/config`, keep `metrics.cert`. ```bash cd certs/metrics.dev.ostelco.org openssl req -x509 -nodes -days 365 -newkey rsa:2048 -keyout ./nginx.key -out ./nginx.crt -subj '/CN=metrics.dev.ostelco.org' cp nginx.crt ../../ocsgw/config/metrics.crt ``` + + * Set Stripe API key as env variable - `STRIPE_API_KEY` + ### Test acceptance-tests @@ -35,27 +39,3 @@ cp nginx.crt ../../ocsgw/config/metrics.crt gradle clean build docker-compose up --build --abort-on-container-exit ``` - - * Integration tests for Prime - -```bash -gradle prime:integration -``` - -## Configuring emulators - - * Testing with Google Cloud PubSub Emulator - - * Install `gcloud` cli tool & basic components. - * Install `pubsub beta` emulator using `gcloud`. - * Init emulator and set ENV variable. - * Start emulator. - -```bash -gcloud components list -gcloud components install pubsub-emulator -gcloud beta emulators pubsub env-init -gcloud beta emulators pubsub start -``` - - diff --git a/docs/TODO.md b/docs/TODO.md new file mode 100644 index 000000000..68de918a5 --- /dev/null +++ b/docs/TODO.md @@ -0,0 +1,24 @@ +# TODO + + +* Start using something other than this file to track tasks. +* Get rid of the interface/impl pattern that is present in + the entities package. Since we simplified the data classes + with lombok, that separation gives little or no benefit. +* The interactions between the various types of messages are + confusing. Consider autogenerating sequence diagrams when + running tests to help document what is going on. +* Increase unit testability, restructure to make almost everything + unit testable. +* Refactor firebase database into something that is integration testable. +* Make a template project for dropwizard. +* Automatically generate javadoc in Travis build. +* Automatically publish javadoc to the github website. + https://github.com/blog/2233-publish-your-project-documentation-with-github-pages +* Look into making a healthcheck for firebase/firestore + - https://www.firebase.com/docs/web/guide/offline-capabilities.html#section-connection-state + this.firebaseDatabase.getReference("/.info/connected").addValueEventListener() + +* This looks like a good writeup of best (&worst) practices for testing + http://blog.codepipes.com/testing/software-testing-antipatterns.html We should + absorb this and adapt and institutionalise the practices we want to use. \ No newline at end of file diff --git a/exporter/Dockerfile b/exporter/Dockerfile index 9756d72b6..9e003a63c 100644 --- a/exporter/Dockerfile +++ b/exporter/Dockerfile @@ -1,16 +1,18 @@ -FROM ubuntu:17.10 +FROM ubuntu:18.04 MAINTAINER CSI "csi@telenordigital.com" RUN apt-get update && apt-get install -y --no-install-recommends \ - curl=7.55.1-1ubuntu2.4 \ - uuid-runtime=2.30.1-0ubuntu4.1 \ - lsb-release=9.20160110ubuntu5 \ - ca-certificates=20170717 \ + apt-utils \ + curl \ + uuid-runtime \ + lsb-release \ + ca-certificates \ + gnupg2 \ && echo "deb http://packages.cloud.google.com/apt cloud-sdk-artful main" | tee -a /etc/apt/sources.list.d/google-cloud-sdk.list \ && curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add - \ && apt-get update && apt-get install -y --no-install-recommends \ - google-cloud-sdk=198.0.0-0 \ + google-cloud-sdk \ && rm -rf /var/lib/apt/lists/* COPY script/idle.sh /idle.sh diff --git a/exporter/README.md b/exporter/README.md index cd439079f..cd8e306bc 100644 --- a/exporter/README.md +++ b/exporter/README.md @@ -16,23 +16,11 @@ Tables created: How to deploy/use this in kubernetes cluster ``` -#PROJECT_ID=pantel-2decb -export PROJECT_ID="$(gcloud config get-value project -q)" +# To deploy in prod +exporter/deploy/deploy.sh -# Create cluster -gcloud container clusters create private-cluster --scopes=default,bigquery,datastore,pubsub,sql,storage-rw --num-nodes=3 - -# Get authentication credentials for the cluster -gcloud container clusters get-credentials private-cluster - -# Build the Docker image (In the folder with Dockerfile) -docker build -t gcr.io/${PROJECT_ID}/exporter:v1 . - -# Push to the registry -gcloud docker -- push gcr.io/${PROJECT_ID}/exporter:v1 - -# Apply the deployment -kubectl apply -f ./exporter.yaml +# To deploy in dev +exporter/deploy/deploy-dev.sh # Details of the deployment kubectl describe deployment exporter diff --git a/exporter/deploy/deploy-dev.sh b/exporter/deploy/deploy-dev.sh new file mode 100755 index 000000000..a1f4a5c1d --- /dev/null +++ b/exporter/deploy/deploy-dev.sh @@ -0,0 +1,25 @@ +#!/usr/bin/env bash + +set -e + +if [ ! -f exporter/deploy/deploy-dev.sh ]; then + (>&2 echo "Run this script from project root dir (ostelco-core)") + exit 1 +fi + +kubectl config use-context $(kubectl config get-contexts --output name | grep dev-cluster) + +PROJECT_ID="$(gcloud config get-value project -q)" +SHORT_SHA="$(git log -1 --pretty=format:%h)" +TAG="v${SHORT_SHA}-dev" + +echo PROJECT_ID=${PROJECT_ID} +echo SHORT_SHA=${SHORT_SHA} +echo TAG=${TAG} + +docker build -t eu.gcr.io/${PROJECT_ID}/exporter:${TAG} exporter +docker push eu.gcr.io/${PROJECT_ID}/exporter:${TAG} + +echo "Deploying exporter to GKE" + +sed -e s/EXPORTER_VERSION/${TAG}/g exporter/exporter.yaml | kubectl apply -f - \ No newline at end of file diff --git a/exporter/deploy/deploy.sh b/exporter/deploy/deploy.sh new file mode 100755 index 000000000..19a989c05 --- /dev/null +++ b/exporter/deploy/deploy.sh @@ -0,0 +1,25 @@ +#!/usr/bin/env bash + +set -e + +if [ ! -f exporter/deploy/deploy-dev.sh ]; then + (>&2 echo "Run this script from project root dir (ostelco-core)") + exit 1 +fi + +kubectl config use-context $(kubectl config get-contexts --output name | grep private-cluster) + +PROJECT_ID="$(gcloud config get-value project -q)" +SHORT_SHA="$(git log -1 --pretty=format:%h)" +TAG="v${SHORT_SHA}" + +echo PROJECT_ID=${PROJECT_ID} +echo SHORT_SHA=${SHORT_SHA} +echo TAG=${TAG} + +docker build -t eu.gcr.io/${PROJECT_ID}/exporter:${TAG} exporter +docker push eu.gcr.io/${PROJECT_ID}/exporter:${TAG} + +echo "Deploying exporter to GKE" + +sed -e s/EXPORTER_VERSION/${TAG}/g exporter/exporter.yaml | kubectl apply -f - \ No newline at end of file diff --git a/exporter/exporter.yaml b/exporter/exporter.yaml index 3a7e137a4..c174ac9d5 100644 --- a/exporter/exporter.yaml +++ b/exporter/exporter.yaml @@ -15,6 +15,7 @@ spec: spec: containers: - name: exporter - image: gcr.io/pantel-2decb/exporter:v2.1 + image: eu.gcr.io/pantel-2decb/exporter:EXPORTER_VERSION + imagePullPolicy: Always ports: - containerPort: 8080 diff --git a/exporter/script/delete_export_data.sh b/exporter/script/delete_export_data.sh index 271b09f1a..2d59e7e2b 100644 --- a/exporter/script/delete_export_data.sh +++ b/exporter/script/delete_export_data.sh @@ -10,16 +10,38 @@ exportId=${exportId//-} exportId=${exportId,,} projectId=pantel-2decb -pseudonymsTable=exported_pseudonyms.$exportId +msisdnPseudonymsTable=exported_pseudonyms.${exportId}_msisdn +subscriberPseudonymsTable=exported_pseudonyms.${exportId}_subscriber +sub2msisdnMappingsTable=exported_data_consumption.${exportId}_sub2msisdn dataConsumptionTable=exported_data_consumption.$exportId +purchaseRecordsTable=exported_data_consumption.${exportId}_purchases csvfile=$projectId-dataconsumption-export/$exportId.csv +purchasesCsvfile=$projectId-dataconsumption-export/$exportId-purchases.csv +sub2msisdnCsvfile=$projectId-dataconsumption-export/$exportId-sub2msisdn.csv echo "Cleaning all data for export $exportId" -echo "Deleting Table $pseudonymsTable" -bq rm -f -t $pseudonymsTable +echo "Deleting Table $msisdnPseudonymsTable" +bq rm -f -t $msisdnPseudonymsTable + +echo "Deleting Table $subscriberPseudonymsTable" +bq rm -f -t $subscriberPseudonymsTable + +echo "Deleting Table $sub2msisdnMappingsTable" +bq rm -f -t $sub2msisdnMappingsTable + echo "Deleting Table $dataConsumptionTable" bq rm -f -t $dataConsumptionTable + +echo "Deleting Table $purchaseRecordsTable" +bq rm -f -t $purchaseRecordsTable + echo "Deleting csv gs://$csvfile" gsutil rm gs://$csvfile +echo "Deleting csv gs://$purchasesCsvfile" +gsutil rm gs://$purchasesCsvfile + +echo "Deleting csv gs://$sub2msisdnCsvfile" +gsutil rm gs://$sub2msisdnCsvfile + echo "Finished cleanup for the export $exportId" \ No newline at end of file diff --git a/exporter/script/export_data.sh b/exporter/script/export_data.sh index 2595be512..a63afd2e2 100644 --- a/exporter/script/export_data.sh +++ b/exporter/script/export_data.sh @@ -9,10 +9,16 @@ exportId=${exportId//-} exportId=${exportId,,} projectId=pantel-2decb -pseudonymsTable=$projectId.exported_pseudonyms.$exportId +msisdnPseudonymsTable=$projectId.exported_pseudonyms.${exportId}_msisdn +subscriberPseudonymsTable=$projectId.exported_pseudonyms.${exportId}_subscriber +sub2msisdnMappingsTable=exported_data_consumption.${exportId}_sub2msisdn hourlyConsumptionTable=$projectId.data_consumption.hourly_consumption dataConsumptionTable=exported_data_consumption.$exportId +rawPurchasesTable=$projectId.purchases.raw_purchases +purchaseRecordsTable=exported_data_consumption.${exportId}_purchases csvfile=$projectId-dataconsumption-export/$exportId.csv +purchasesCsvfile=$projectId-dataconsumption-export/$exportId-purchases.csv +sub2msisdnCsvfile=$projectId-dataconsumption-export/$exportId-sub2msisdn.csv # Generate the pseudonym tables for this export echo "Starting export job for $exportId" @@ -43,19 +49,20 @@ if [[ $jsonResult != FINISHED ]]; then echo "Table creation failed $(curl -X GET $queryUrl 2> /dev/null)" exit fi -echo "Created Table $pseudonymsTable" +echo "Created Table $msisdnPseudonymsTable" +echo "Created Table $subscriberPseudonymsTable" echo "Creating table $dataConsumptionTable" # SQL for joining pseudonym & hourly consumption tables. read -r -d '' sqlForJoin << EOM SELECT - hc.bytes, ps.msisdnid, hc.timestamp + hc.bytes, ps.pseudoid as msisdnid, hc.timestamp FROM \`$hourlyConsumptionTable\` as hc JOIN - \`$pseudonymsTable\` as ps -ON ps.msisdn = hc.msisdn + \`$msisdnPseudonymsTable\` as ps +ON ps.pseudonym = hc.msisdn EOM # Run the query using bq & dump results to the new table bq --location=EU --format=none query --destination_table $dataConsumptionTable --replace --use_legacy_sql=false $sqlForJoin @@ -63,4 +70,28 @@ echo "Created table $dataConsumptionTable" echo "Exporting data to csv $csvfile" bq --location=EU extract --destination_format=CSV $dataConsumptionTable gs://$csvfile -echo "Exported data to gs://$csvfile" \ No newline at end of file +echo "Exported data to gs://$csvfile" + +echo "Creating table $purchaseRecordsTable" +# SQL for joining subscriber pseudonym & purchase record tables. +read -r -d '' sqlForJoin2 << EOM +SELECT + TIMESTAMP_MILLIS(pr.timestamp) as timestamp , ps.pseudoid as subscriberId, pr.product.sku, pr.product.price.amount, product.price.currency +FROM + \`$rawPurchasesTable\` as pr +JOIN + \`$subscriberPseudonymsTable\` as ps +ON ps.pseudonym = pr.subscriberId +EOM + +# Run the query using bq & dump results to the new table +bq --location=EU --format=none query --destination_table $purchaseRecordsTable --replace --use_legacy_sql=false $sqlForJoin2 +echo "Created table $purchaseRecordsTable" + +echo "Exporting data to csv $purchasesCsvfile" +bq --location=EU extract --destination_format=CSV $purchaseRecordsTable gs://$purchasesCsvfile +echo "Exported data to gs://$purchasesCsvfile" + +echo "Exporting data to csv $sub2msisdnCsvfile" +bq --location=EU extract --destination_format=CSV $sub2msisdnMappingsTable gs://$sub2msisdnCsvfile +echo "Exported data to gs://$sub2msisdnCsvfile" diff --git a/ext-auth-provider/Dockerfile b/ext-auth-provider/Dockerfile index 796602997..78e784b6d 100644 --- a/ext-auth-provider/Dockerfile +++ b/ext-auth-provider/Dockerfile @@ -1,4 +1,4 @@ -FROM openjdk:8u171 +FROM azul/zulu-openjdk:8u181-8.31.0.1 MAINTAINER CSI "csi@telenordigital.com" diff --git a/ext-auth-provider/build.gradle b/ext-auth-provider/build.gradle index 06474ae34..225fb0949 100644 --- a/ext-auth-provider/build.gradle +++ b/ext-auth-provider/build.gradle @@ -1,5 +1,5 @@ plugins { - id "org.jetbrains.kotlin.jvm" version "1.2.61" + id "org.jetbrains.kotlin.jvm" version "1.2.70" id "application" id "com.github.johnrengelman.shadow" version "2.0.4" } diff --git a/firebase-extensions/build.gradle b/firebase-extensions/build.gradle new file mode 100644 index 000000000..d741c6479 --- /dev/null +++ b/firebase-extensions/build.gradle @@ -0,0 +1,11 @@ +plugins { + id "org.jetbrains.kotlin.jvm" version "1.2.70" + id "java-library" +} + +dependencies { + implementation project(":prime-modules") + // Match netty via ocs-api + api "com.google.firebase:firebase-admin:$firebaseVersion" + api "com.fasterxml.jackson.module:jackson-module-kotlin:$jacksonVersion" +} \ No newline at end of file diff --git a/firebase-extensions/src/main/kotlin/org.ostelco.common.firebasex/FirebaseExtensions.kt b/firebase-extensions/src/main/kotlin/org.ostelco.common.firebasex/FirebaseExtensions.kt new file mode 100644 index 000000000..142f10170 --- /dev/null +++ b/firebase-extensions/src/main/kotlin/org.ostelco.common.firebasex/FirebaseExtensions.kt @@ -0,0 +1,33 @@ +package org.ostelco.common.firebasex + +import com.fasterxml.jackson.core.type.TypeReference +import com.fasterxml.jackson.databind.ObjectMapper +import com.fasterxml.jackson.module.kotlin.registerKotlinModule +import com.google.auth.oauth2.GoogleCredentials +import com.google.firebase.FirebaseOptions.Builder +import java.io.File +import java.io.FileInputStream +import java.nio.file.Files +import java.nio.file.Paths +import javax.naming.ConfigurationException + +private val genericMapType = object : TypeReference>() {} +private val objectMapper = ObjectMapper().registerKotlinModule() + +/** + * Extension function added into [com.google.firebase.FirebaseOptions.Builder] which accepts Firebase Credentials + * file and sets credentials as well as database URL. Database URL needs database name, which is extracted from the same + * credentials file. If the credentials file does not exists, it throws [ConfigurationException]. + */ +fun Builder.usingCredentialsFile(credentialsFile: String): Builder { + if (Files.exists(Paths.get(credentialsFile))) { + val credentials: GoogleCredentials = FileInputStream(credentialsFile) + .use { serviceAccount -> GoogleCredentials.fromStream(serviceAccount) } + val map: Map = objectMapper.readValue(File(credentialsFile), genericMapType) + val databaseName = map["project_id"] + setCredentials(credentials) + setDatabaseUrl("https://$databaseName.firebaseio.com/") + return this + } + throw ConfigurationException("Missing Firebase Credentials file: $credentialsFile") +} \ No newline at end of file diff --git a/firebase-store/build.gradle b/firebase-store/build.gradle index 88fc3cd52..14a377823 100644 --- a/firebase-store/build.gradle +++ b/firebase-store/build.gradle @@ -1,13 +1,11 @@ plugins { - id "org.jetbrains.kotlin.jvm" version "1.2.61" + id "org.jetbrains.kotlin.jvm" version "1.2.70" id "java-library" } dependencies { - implementation project(":prime-api") - // Match netty via ocs-api - api 'com.google.firebase:firebase-admin:6.4.0' - implementation "com.fasterxml.jackson.module:jackson-module-kotlin:$jacksonVersion" + implementation project(":prime-modules") + api project(":firebase-extensions") testImplementation "org.jetbrains.kotlin:kotlin-test:$kotlinVersion" testImplementation "org.jetbrains.kotlin:kotlin-test-junit:$kotlinVersion" diff --git a/firebase-store/src/main/kotlin/org/ostelco/prime/storage/firebase/FirebaseModule.kt b/firebase-store/src/main/kotlin/org/ostelco/prime/storage/firebase/FirebaseModule.kt index bc212f4ee..a6b608dbc 100644 --- a/firebase-store/src/main/kotlin/org/ostelco/prime/storage/firebase/FirebaseModule.kt +++ b/firebase-store/src/main/kotlin/org/ostelco/prime/storage/firebase/FirebaseModule.kt @@ -2,7 +2,6 @@ package org.ostelco.prime.storage.firebase import com.fasterxml.jackson.annotation.JsonProperty import com.fasterxml.jackson.annotation.JsonTypeName -import org.hibernate.validator.constraints.NotEmpty import org.ostelco.prime.module.PrimeModule @JsonTypeName("firebase") @@ -18,17 +17,4 @@ object FirebaseConfigRegistry { lateinit var firebaseConfig: FirebaseConfig } -class FirebaseConfig { - - @NotEmpty - @JsonProperty("databaseName") - lateinit var databaseName: String - - @NotEmpty - @JsonProperty("configFile") - lateinit var configFile: String - - @NotEmpty - @JsonProperty("rootPath") - lateinit var rootPath: String -} \ No newline at end of file +data class FirebaseConfig(val configFile: String, val rootPath: String) \ No newline at end of file diff --git a/firebase-store/src/main/kotlin/org/ostelco/prime/storage/firebase/FirebaseSchema.kt b/firebase-store/src/main/kotlin/org/ostelco/prime/storage/firebase/FirebaseSchema.kt index 2276e5e93..2c9f52180 100644 --- a/firebase-store/src/main/kotlin/org/ostelco/prime/storage/firebase/FirebaseSchema.kt +++ b/firebase-store/src/main/kotlin/org/ostelco/prime/storage/firebase/FirebaseSchema.kt @@ -5,7 +5,7 @@ import com.google.firebase.database.DatabaseError import com.google.firebase.database.DatabaseReference import com.google.firebase.database.FirebaseDatabase import com.google.firebase.database.ValueEventListener -import org.ostelco.prime.logger +import org.ostelco.prime.getLogger import java.net.URLDecoder import java.net.URLEncoder import java.nio.charset.StandardCharsets @@ -24,7 +24,7 @@ class EntityStore( firebaseDatabase: FirebaseDatabase, private val entityType: EntityType) { - private val logger by logger() + private val logger by getLogger() val databaseReference: DatabaseReference = firebaseDatabase.getReference("/${config.rootPath}/${entityType.path}") @@ -94,14 +94,8 @@ class EntityStore( URLDecoder.decode(value, StandardCharsets.UTF_8.name()) .replace(oldValue = "%2E", newValue = ".") - /** - * Check if entity exists for a given value - */ private fun exists(id: String, reference: EntityStore.() -> DatabaseReference = { databaseReference }) = get(id, reference) != null - /** - * Inverse of exists - */ private fun dontExists(id: String, reference: EntityStore.() -> DatabaseReference = { databaseReference }) = !exists(id, reference) /** diff --git a/firebase-store/src/main/kotlin/org/ostelco/prime/storage/firebase/FirebaseStorage.kt b/firebase-store/src/main/kotlin/org/ostelco/prime/storage/firebase/FirebaseStorage.kt index aab071b0f..4c651e8bd 100644 --- a/firebase-store/src/main/kotlin/org/ostelco/prime/storage/firebase/FirebaseStorage.kt +++ b/firebase-store/src/main/kotlin/org/ostelco/prime/storage/firebase/FirebaseStorage.kt @@ -1,18 +1,15 @@ package org.ostelco.prime.storage.firebase -import com.google.auth.oauth2.GoogleCredentials import com.google.firebase.FirebaseApp import com.google.firebase.FirebaseOptions import com.google.firebase.database.FirebaseDatabase +import org.ostelco.common.firebasex.usingCredentialsFile import org.ostelco.prime.model.ApplicationToken import org.ostelco.prime.storage.DocumentStore -import java.io.FileInputStream -import java.nio.file.Files -import java.nio.file.Paths /** * This class is using the singleton class as delegate. - * This is done because the {@link java.util.ServiceLoader} expects public no-args constructor, which is absent in Singleton. + * This is done because the [java.util.ServiceLoader] expects public no-args constructor, which is absent in Singleton. */ class FirebaseStorage : DocumentStore by FirebaseStorageSingleton @@ -29,18 +26,10 @@ object FirebaseStorageSingleton : DocumentStore { private fun setupFirebaseInstance(): FirebaseDatabase { val config: FirebaseConfig = FirebaseConfigRegistry.firebaseConfig - val databaseName: String = config.databaseName val configFile: String = config.configFile - val credentials: GoogleCredentials = if (Files.exists(Paths.get(configFile))) { - FileInputStream(configFile).use { serviceAccount -> GoogleCredentials.fromStream(serviceAccount) } - } else { - GoogleCredentials.getApplicationDefault() - } - val options = FirebaseOptions.Builder() - .setCredentials(credentials) - .setDatabaseUrl("https://$databaseName.firebaseio.com/") + .usingCredentialsFile(configFile) .build() try { FirebaseApp.getInstance() diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 115e6ac0a..e0b3fb8d7 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,5 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-4.10-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-4.10.2-bin.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/model/build.gradle b/model/build.gradle index 081b9c8f6..3c2a0a6cd 100644 --- a/model/build.gradle +++ b/model/build.gradle @@ -1,5 +1,5 @@ plugins { - id "org.jetbrains.kotlin.jvm" version "1.2.61" + id "org.jetbrains.kotlin.jvm" version "1.2.70" id "java-library" } @@ -8,6 +8,6 @@ dependencies { implementation "com.fasterxml.jackson.core:jackson-annotations:$jacksonVersion" // TODO vihang: this dependency is added only for @Exclude annotation for firebase - implementation 'com.google.firebase:firebase-admin:6.4.0' + implementation "com.google.firebase:firebase-admin:$firebaseVersion" implementation "org.slf4j:slf4j-api:1.7.25" } \ No newline at end of file diff --git a/model/src/main/kotlin/org/ostelco/prime/model/Entities.kt b/model/src/main/kotlin/org/ostelco/prime/model/Entities.kt index d02412ace..60c64ad04 100644 --- a/model/src/main/kotlin/org/ostelco/prime/model/Entities.kt +++ b/model/src/main/kotlin/org/ostelco/prime/model/Entities.kt @@ -9,12 +9,12 @@ interface HasId { data class Offer( override val id: String, - @JsonIgnore val segments: Collection = emptyList(), - @JsonIgnore val products: Collection = emptyList()) : HasId + val segments: Collection = emptyList(), + val products: Collection = emptyList()) : HasId data class Segment( override val id: String, - @JsonIgnore val subscribers: Collection = emptyList()) : HasId + val subscribers: Collection = emptyList()) : HasId data class Subscriber( val email: String, @@ -48,7 +48,8 @@ data class ApplicationToken( } data class Subscription( - val msisdn: String) : HasId { + val msisdn: String, + val alias: String = "") : HasId { override val id: String @JsonIgnore diff --git a/neo4j-store/build.gradle b/neo4j-store/build.gradle index a3db2a884..70ed8509f 100644 --- a/neo4j-store/build.gradle +++ b/neo4j-store/build.gradle @@ -1,10 +1,10 @@ plugins { - id "org.jetbrains.kotlin.jvm" version "1.2.61" + id "org.jetbrains.kotlin.jvm" version "1.2.70" id "java-library" } -ext.neo4jVersion="3.4.4" -ext.neo4jDriverVersion="1.6.1" +ext.neo4jVersion="3.4.7" +ext.neo4jDriverVersion="1.6.3" tasks.withType(org.jetbrains.kotlin.gradle.tasks.KotlinCompile).all { kotlinOptions { @@ -19,7 +19,7 @@ repositories { } dependencies { - implementation project(":prime-api") + implementation project(":prime-modules") implementation "com.fasterxml.jackson.module:jackson-module-kotlin:$jacksonVersion" implementation "com.fasterxml.jackson.core:jackson-databind:$jacksonVersion" diff --git a/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt b/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt index 08cbc4451..733f9b63c 100644 --- a/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt +++ b/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt @@ -7,8 +7,7 @@ import org.neo4j.driver.v1.Transaction import org.ostelco.prime.analytics.AnalyticsService import org.ostelco.prime.analytics.PrimeMetric.REVENUE import org.ostelco.prime.analytics.PrimeMetric.USERS_PAID_AT_LEAST_ONCE -import org.ostelco.prime.core.ApiError -import org.ostelco.prime.logger +import org.ostelco.prime.getLogger import org.ostelco.prime.model.Bundle import org.ostelco.prime.model.Offer import org.ostelco.prime.model.Product @@ -24,8 +23,6 @@ import org.ostelco.prime.paymentprocessor.PaymentProcessor import org.ostelco.prime.paymentprocessor.core.BadGatewayError import org.ostelco.prime.paymentprocessor.core.PaymentError import org.ostelco.prime.paymentprocessor.core.ProductInfo -import org.ostelco.prime.paymentprocessor.core.ProfileInfo -import org.ostelco.prime.storage.DocumentStore import org.ostelco.prime.storage.GraphStore import org.ostelco.prime.storage.NotFoundError import org.ostelco.prime.storage.StoreError @@ -60,7 +57,7 @@ class Neo4jStore : GraphStore by Neo4jStoreSingleton object Neo4jStoreSingleton : GraphStore { private val ocsAdminService: OcsAdminService by lazy { getResource() } - private val logger by logger() + private val logger by getLogger() // // Entity @@ -125,7 +122,7 @@ object Neo4jStoreSingleton : GraphStore { // Balance (Subscriber - Bundle) // - override fun getBundles(subscriberId: String): Either?> = readTransaction { + override fun getBundles(subscriberId: String): Either> = readTransaction { subscriberStore.getRelated(subscriberId, subscriberToBundleRelation, transaction) } @@ -197,7 +194,7 @@ object Neo4jStoreSingleton : GraphStore { .ifFailedThenRollback(transaction) } // << END - + override fun updateSubscriber(subscriber: Subscriber): Either = writeTransaction { subscriberStore.update(subscriber, transaction) .ifFailedThenRollback(transaction) @@ -264,7 +261,15 @@ object Neo4jStoreSingleton : GraphStore { override fun getMsisdn(subscriptionId: String): Either { return readTransaction { subscriberStore.getRelated(subscriptionId, subscriptionRelation, transaction) - .map { it.first().msisdn } + .flatMap { + if (it.isEmpty()) { + Either.left(NotFoundError( + type = subscriptionEntity.name, + id = "for ${subscriberEntity.name} = $subscriptionId")) + } else { + Either.right(it.first().msisdn) + } + } } } @@ -326,30 +331,10 @@ object Neo4jStoreSingleton : GraphStore { // TODO vihang: Move this logic to DSL + Rule Engine + Triggers, when they are ready // >> BEGIN - private val documentStore by lazy { getResource() } private val paymentProcessor by lazy { getResource() } private val ocs by lazy { getResource() } private val analyticsReporter by lazy { getResource() } - private fun getPaymentProfile(name: String): Either = - documentStore.getPaymentId(name) - ?.let { profileInfoId -> Either.right(ProfileInfo(profileInfoId)) } - ?: Either.left(BadGatewayError("Failed to fetch payment customer ID")) - - private fun createAndStorePaymentProfile(name: String): Either { - return paymentProcessor.createPaymentProfile(name) - .flatMap { profileInfo -> - setPaymentProfile(name, profileInfo) - .map { profileInfo } - } - } - - private fun setPaymentProfile(name: String, profileInfo: ProfileInfo): Either = - Either.cond( - test = documentStore.createPaymentId(name, profileInfo.id), - ifTrue = { Unit }, - ifFalse = { BadGatewayError("Failed to save payment customer ID") }) - override fun purchaseProduct( subscriberId: String, sku: String, @@ -361,9 +346,9 @@ object Neo4jStoreSingleton : GraphStore { .mapLeft { org.ostelco.prime.paymentprocessor.core.NotFoundError("Product unavailable") } .flatMap { product: Product -> // Fetch/Create stripe payment profile for the subscriber. - getPaymentProfile(subscriberId) + paymentProcessor.getPaymentProfile(subscriberId) .fold( - { createAndStorePaymentProfile(subscriberId) }, + { paymentProcessor.createPaymentProfile(subscriberId) }, { profileInfo -> Either.right(profileInfo) } ) .map { profileInfo -> Pair(product, profileInfo) } @@ -371,7 +356,19 @@ object Neo4jStoreSingleton : GraphStore { .flatMap { (product, profileInfo) -> // Add payment source if (sourceId != null) { - paymentProcessor.addSource(profileInfo.id, sourceId).map { sourceInfo -> Triple(product, profileInfo, sourceInfo.id) } + paymentProcessor.getSavedSources(profileInfo.id) + .fold( + { + Either.left(org.ostelco.prime.paymentprocessor.core.BadGatewayError("Failed to fetch sources for user", it.description)) + }, + { + var linkedSource = sourceId + if (!it.any{ sourceDetailsInfo -> sourceDetailsInfo.id == sourceId }) { + paymentProcessor.addSource(profileInfo.id, sourceId).map { sourceInfo -> linkedSource = sourceInfo.id } + } + Either.right(Triple(product,profileInfo, linkedSource)) + } + ) } else { Either.right(Triple(product, profileInfo, null)) } @@ -433,7 +430,7 @@ object Neo4jStoreSingleton : GraphStore { if (!saveCard && savedSourceId != null) { paymentProcessor.removeSource(profileInfo.id, savedSourceId) .mapLeft { paymentError -> - logger.error("Failed to remove card, for customerId ${profileInfo.id}, sourceId $sourceId") + logger.error("Failed to remove card, for customerId ${profileInfo.id}, sourceId $savedSourceId") paymentError } } @@ -578,6 +575,7 @@ object Neo4jStoreSingleton : GraphStore { result.single().get("count").asLong() } } + // // Stores // @@ -600,34 +598,106 @@ object Neo4jStoreSingleton : GraphStore { private val productClassEntity = EntityType(ProductClass::class.java) private val productClassStore = EntityStore(productClassEntity) + // + // Product Class + // override fun createProductClass(productClass: ProductClass): Either = writeTransaction { productClassStore.create(productClass, transaction) .ifFailedThenRollback(transaction) } + // + // Product + // override fun createProduct(product: Product): Either = writeTransaction { - productStore.create(product, transaction) + createProduct(product, transaction) .ifFailedThenRollback(transaction) } - override fun createSegment(segment: Segment): Either { - return writeTransaction { - segmentStore.create(segment, transaction) - .flatMap { subscriberToSegmentStore.create(segment.subscribers, segment.id, transaction) } - .ifFailedThenRollback(transaction) - } + private fun createProduct(product: Product, transaction: Transaction): Either = + productStore.create(product, transaction) + + // + // Segment + // + override fun createSegment(segment: Segment): Either = writeTransaction { + createSegment(segment, transaction) + .ifFailedThenRollback(transaction) } + private fun createSegment(segment: Segment, transaction: Transaction): Either { + return segmentStore.create(segment, transaction) + .flatMap { subscriberToSegmentStore.create(segment.subscribers, segment.id, transaction) } + } + + override fun updateSegment(segment: Segment): Either = writeTransaction { + subscriberToSegmentStore.create(segment.id, segment.subscribers, transaction) + .ifFailedThenRollback(transaction) + } + + // + // Offer + // override fun createOffer(offer: Offer): Either = writeTransaction { - offerStore - .create(offer, transaction) + createOffer(offer, transaction) + .ifFailedThenRollback(transaction) + } + + private fun createOffer(offer: Offer, transaction: Transaction): Either { + return offerStore + .create(offer.id, transaction) .flatMap { offerToSegmentStore.create(offer.id, offer.segments, transaction) } .flatMap { offerToProductStore.create(offer.id, offer.products, transaction) } - .ifFailedThenRollback(transaction) } - override fun updateSegment(segment: Segment): Either = writeTransaction { - subscriberToSegmentStore.create(segment.id, segment.subscribers, transaction) + // + // Atomic Import of Offer + Product + Segment + // + override fun atomicImport( + offer: Offer, + segments: Collection, + products: Collection): Either = writeTransaction { + + // validation + val productIds = (offer.products + products.map { it.sku }).toSet() + val segmentIds = (offer.segments + segments.map { it.id }).toSet() + + if (productIds.isEmpty()) { + return@writeTransaction Either.left(ValidationError( + type = productEntity.name, + id = offer.id, + message = "Cannot create Offer without new/existing Product(s)")) + } + + if (segmentIds.isEmpty()) { + return@writeTransaction Either.left(ValidationError( + type = offerEntity.name, + id = offer.id, + message = "Cannot create Offer without new/existing Segment(s)")) + } + // end of validation + + var result = Either.right(Unit) as Either + + result = products.fold( + initial = result, + operation = { acc, product -> + acc.flatMap { createProduct(product, transaction) } + }) + + result = segments.fold( + initial = result, + operation = { acc, segment -> + acc.flatMap { createSegment(segment, transaction) } + }) + + val actualOffer = Offer( + id = offer.id, + products = productIds, + segments = segmentIds) + + result + .flatMap { createOffer(actualOffer, transaction) } .ifFailedThenRollback(transaction) } diff --git a/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Schema.kt b/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Schema.kt index 397674a83..4500633c0 100644 --- a/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Schema.kt +++ b/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Schema.kt @@ -9,7 +9,7 @@ import org.neo4j.driver.v1.AccessMode.READ import org.neo4j.driver.v1.AccessMode.WRITE import org.neo4j.driver.v1.StatementResult import org.neo4j.driver.v1.Transaction -import org.ostelco.prime.logger +import org.ostelco.prime.getLogger import org.ostelco.prime.model.HasId import org.ostelco.prime.storage.AlreadyExistsError import org.ostelco.prime.storage.NotCreatedError @@ -72,6 +72,21 @@ class EntityStore(private val entityType: EntityType) { } } + fun create(id: String, transaction: Transaction): Either { + + if (get(id, transaction).isRight()) { + return Either.left(AlreadyExistsError(type = entityType.name, id = id)) + } + + return write("""CREATE (node:${entityType.name} { id:"$id"});""", + transaction) { + if (it.summary().counters().nodesCreated() == 1) + Either.right(Unit) + else + Either.left(NotCreatedError(type = entityType.name, id = id)) + } + } + fun getRelated( id: String, relationType: RelationType, @@ -269,7 +284,7 @@ class RelationStore(private val relationType: Relation // object Graph { - private val LOG by logger() + private val LOG by getLogger() fun write(query: String, transaction: Transaction, transform: (StatementResult) -> R): R { LOG.trace("write:[\n$query\n]") diff --git a/neo4j-store/src/test/kotlin/org/ostelco/prime/storage/graph/GraphStoreTest.kt b/neo4j-store/src/test/kotlin/org/ostelco/prime/storage/graph/GraphStoreTest.kt index f7c5d439f..3d46a5fc1 100644 --- a/neo4j-store/src/test/kotlin/org/ostelco/prime/storage/graph/GraphStoreTest.kt +++ b/neo4j-store/src/test/kotlin/org/ostelco/prime/storage/graph/GraphStoreTest.kt @@ -159,6 +159,59 @@ class GraphStoreTest { { fail("Expected get product to fail since it is not linked to any subscriber --> segment --> offer") }) } + @Test + fun `import offer + product + segment`() { + + // existing products + Neo4jStoreSingleton.createProduct(createProduct("1GB_249NOK", 24900)) + .mapLeft { fail(it.message) } + Neo4jStoreSingleton.createProduct(createProduct("2GB_299NOK", 29900)) + .mapLeft { fail(it.message) } + + val products = listOf( + createProduct("3GB_349NOK", 34900), + createProduct("5GB_399NOK", 39900)) + + val segments = listOf(Segment(id = "segment_1"), Segment(id = "segment_2")) + + val offer = Offer(id = "some_offer", products = listOf("1GB_249NOK", "2GB_299NOK")) + + Neo4jStoreSingleton.atomicImport(offer = offer, products = products, segments = segments) + .mapLeft { fail(it.message) } + } + + @Test + fun `failed on import duplicate offer`() { + + // existing products + Neo4jStoreSingleton.createProduct(createProduct("1GB_249NOK", 24900)) + .mapLeft { fail(it.message) } + Neo4jStoreSingleton.createProduct(createProduct("2GB_299NOK", 29900)) + .mapLeft { fail(it.message) } + + // new products in the offer + val products = listOf( + createProduct("3GB_349NOK", 34900), + createProduct("5GB_399NOK", 39900)) + + // new segment in the offer + val segments = listOf(Segment(id = "segment_1"), Segment(id = "segment_2")) + + val offer = Offer(id = "some_offer", products = listOf("1GB_249NOK", "2GB_299NOK")) + + Neo4jStoreSingleton.atomicImport(offer = offer, products = products, segments = segments) + .mapLeft { fail(it.message) } + + val duplicateOffer = Offer( + id = offer.id, + products = (products.map { it.sku } + offer.products).toSet(), + segments = segments.map { it.id }) + + Neo4jStoreSingleton.atomicImport(offer = duplicateOffer).bimap( + { assertEquals("Offer - some_offer already exists.", it.message) }, + { fail("Expected import to fail since offer already exists.") }) + } + companion object { const val EMAIL = "foo@bar.com" const val NAME = "Test User" diff --git a/ocs-grpc-api/build.gradle b/ocs-grpc-api/build.gradle index 308b1b22f..1a37a301e 100644 --- a/ocs-grpc-api/build.gradle +++ b/ocs-grpc-api/build.gradle @@ -4,10 +4,6 @@ plugins { id "idea" } -// Keeping it version 1.14.0 to be consistent with grpc via PubSub client lib -// Keeping it version 1.14.0 to be consistent with netty via Firebase lib -ext.grpcVersion = "1.14.0" - dependencies { api "io.grpc:grpc-netty-shaded:$grpcVersion" api "io.grpc:grpc-protobuf:$grpcVersion" diff --git a/ocs/build.gradle b/ocs/build.gradle index cf6c53e4d..aef0024c8 100644 --- a/ocs/build.gradle +++ b/ocs/build.gradle @@ -1,16 +1,23 @@ plugins { - id "org.jetbrains.kotlin.jvm" version "1.2.61" + id "org.jetbrains.kotlin.jvm" version "1.2.70" id "java-library" } +kotlin { + experimental { + coroutines 'enable' + } +} + dependencies { - implementation project(':prime-api') + implementation project(':prime-modules') implementation 'com.lmax:disruptor:3.4.2' + implementation "org.jetbrains.kotlinx:kotlinx-coroutines-core:0.26.1" testImplementation "org.jetbrains.kotlin:kotlin-test:$kotlinVersion" testImplementation "org.jetbrains.kotlin:kotlin-test-junit:$kotlinVersion" - testImplementation 'org.mockito:mockito-core:2.18.3' + testImplementation "org.mockito:mockito-core:$mockitoVersion" } apply from: '../jacoco.gradle' \ No newline at end of file diff --git a/ocs/design.puml b/ocs/design.puml new file mode 100644 index 000000000..4ee79d418 --- /dev/null +++ b/ocs/design.puml @@ -0,0 +1,92 @@ +@startuml + +' +' Declarations +' + +[ocsgw] + +interface OcsGrpcService + +[OcsGrpcServer] + +interface OcsAsyncRequestConsumer + +[OcsService] + +interface EventProducer + +[EventProducerImpl] + +interface OcsAsyncResponseProducer + +node OcsDisruptor { + database OcsState + database BundleBalanceStore + interface "**PurchaseRequestHandler**\n //implements// **EventHandler**" AS PREH + [ThresholdChecker] + database AnalyticsReporter + [OcsEventToGrpcResponseMapper] + [ClearingEvent] +} + +actor Client + +interface OcsSubscriberService + +[OcsPrimeService] + +[PurchaseRequestHandler] + +' +' Purchase Request +' + +Client --> OcsSubscriberService : <> Purchase Request + +OcsSubscriberService -- OcsPrimeService : <> + +OcsPrimeService --> PurchaseRequestHandler : <> Purchase Request + +PurchaseRequestHandler -right-> EventProducer : <> Purchase Request + +PurchaseRequestHandler -- PREH : <> +' +' Consumption +' + +ocsgw -down-> OcsGrpcService : gRPC connection + +OcsGrpcServer .right.> OcsGrpcService : binds + +OcsGrpcService -down-> OcsAsyncRequestConsumer : <> Sends Requests + +OcsAsyncRequestConsumer -down- OcsService : <> + +OcsService --right- OcsAsyncResponseProducer : <> + +OcsService -left-> EventProducer : <> Forwards Requests + +EventProducer -down- EventProducerImpl + +EventProducerImpl -down-> OcsDisruptor : <> Submit OcsEvent + +' +' Disruptor +' + +OcsState --> BundleBalanceStore +OcsState --> PREH +OcsState --> ThresholdChecker +OcsState --> AnalyticsReporter +OcsState --> OcsEventToGrpcResponseMapper + +OcsEventToGrpcResponseMapper --> ClearingEvent +BundleBalanceStore --> ClearingEvent +PREH --> ClearingEvent +ThresholdChecker --> ClearingEvent +AnalyticsReporter --> ClearingEvent + +OcsEventToGrpcResponseMapper -up-> OcsAsyncResponseProducer + +@enduml \ No newline at end of file diff --git a/ocs/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsReporter.kt b/ocs/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsReporter.kt new file mode 100644 index 000000000..e564c291c --- /dev/null +++ b/ocs/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsReporter.kt @@ -0,0 +1,43 @@ +package org.ostelco.prime.analytics + +import com.lmax.disruptor.EventHandler +import org.ostelco.prime.analytics.PrimeMetric.MEGABYTES_CONSUMED +import org.ostelco.prime.disruptor.EventMessageType.CREDIT_CONTROL_REQUEST +import org.ostelco.prime.disruptor.OcsEvent +import org.ostelco.prime.getLogger +import org.ostelco.prime.module.getResource + +/** + * This class publishes the data consumption information events analytics. + */ +object AnalyticsReporter : EventHandler { + + private val logger by getLogger() + + private val analyticsReporter by lazy { getResource() } + + override fun onEvent( + event: OcsEvent, + sequence: Long, + endOfBatch: Boolean) { + + if (event.messageType != CREDIT_CONTROL_REQUEST) { + return + } + + val msisdn = event.msisdn + if (msisdn != null) { + logger.info("Sent Data Consumption info event to analytics") + analyticsReporter.reportTrafficInfo( + msisdn = msisdn, + usedBytes = event.request?.msccList?.firstOrNull()?.used?.totalOctets ?: 0L, + bundleBytes = event.bundleBytes, + apn = event.request?.serviceInformation?.psInformation?.calledStationId, + mccMnc = event.request?.serviceInformation?.psInformation?.sgsnMccMnc) + analyticsReporter.reportMetric( + primeMetric = MEGABYTES_CONSUMED, + value = (event.request?.msccList?.firstOrNull()?.used?.totalOctets ?: 0L) / 1_000_000) + + } + } +} diff --git a/ocs/src/main/kotlin/org/ostelco/prime/analytics/DataConsumptionInfo.kt b/ocs/src/main/kotlin/org/ostelco/prime/analytics/DataConsumptionInfo.kt deleted file mode 100644 index 268660faa..000000000 --- a/ocs/src/main/kotlin/org/ostelco/prime/analytics/DataConsumptionInfo.kt +++ /dev/null @@ -1,55 +0,0 @@ -package org.ostelco.prime.analytics - -import com.lmax.disruptor.EventHandler -import org.ostelco.prime.analytics.PrimeMetric.MEGABYTES_CONSUMED -import org.ostelco.ocs.api.CreditControlRequestType -import org.ostelco.prime.disruptor.OcsEvent -import org.ostelco.prime.disruptor.EventMessageType.CREDIT_CONTROL_REQUEST -import org.ostelco.prime.logger -import org.ostelco.prime.module.getResource - -/** - * This class publishes the data consumption information events analytics. - */ -class DataConsumptionInfo() : EventHandler { - - private val logger by logger() - - private val analyticsReporter by lazy { getResource() } - - override fun onEvent( - event: OcsEvent, - sequence: Long, - endOfBatch: Boolean) { - - if (event.messageType != CREDIT_CONTROL_REQUEST) { - return - } - - if (event.msisdn != null) { - logger.info("Sent DataConsumptionInfo event to analytics") - analyticsReporter.reportTrafficInfo( - msisdn = event.msisdn!!, - usedBytes = event.request?.msccList?.firstOrNull()?.used?.totalOctets ?: 0L, - bundleBytes = event.bundleBytes) - analyticsReporter.reportMetric( - primeMetric = MEGABYTES_CONSUMED, - value = (event.request?.msccList?.firstOrNull()?.used?.totalOctets ?: 0L) / 1_000_000) - - //ToDo: Send to analytics and build pipeline - event.request?.let { request -> - if(request.type == CreditControlRequestType.INITIAL_REQUEST) { - logger.info("MSISDN : {} connected apn {} sgsn_mcc_mnc {}", - request.msisdn, - request.serviceInformation.psInformation.calledStationId, - request.serviceInformation.psInformation.sgsnMccMnc) - } else if (request.type == CreditControlRequestType.TERMINATION_REQUEST) { - logger.info("MSISDN : {} disconnected apn {} sgsn_mcc_mnc", - request.msisdn, - request.serviceInformation.psInformation.calledStationId, - request.serviceInformation.psInformation.sgsnMccMnc) - } - } - } - } -} diff --git a/ocs/src/main/kotlin/org/ostelco/prime/consumption/Interfaces.kt b/ocs/src/main/kotlin/org/ostelco/prime/consumption/Interfaces.kt new file mode 100644 index 000000000..592acd3df --- /dev/null +++ b/ocs/src/main/kotlin/org/ostelco/prime/consumption/Interfaces.kt @@ -0,0 +1,25 @@ +package org.ostelco.prime.consumption + +import io.grpc.stub.StreamObserver +import org.ostelco.ocs.api.ActivateResponse +import org.ostelco.ocs.api.CreditControlAnswerInfo +import org.ostelco.ocs.api.CreditControlRequestInfo + +/** + * Ocs Requests from [OcsGrpcService] are consumed by implementation [OcsService] of [OcsAsyncRequestConsumer] + */ +interface OcsAsyncRequestConsumer { + fun putCreditControlClient(streamId: String, creditControlAnswer: StreamObserver) + fun creditControlRequestEvent(streamId: String, request: CreditControlRequestInfo) + fun deleteCreditControlClient(streamId: String) + fun updateActivateResponse(streamId: String, activateResponse: StreamObserver) +} + +/** + * Ocs Events from [OcsEventToGrpcResponseMapper] forwarded to implementation [OcsService] of [OcsAsyncResponseProducer] + */ +interface OcsAsyncResponseProducer { + fun activateOnNextResponse(response: ActivateResponse) + fun sendCreditControlAnswer(streamId: String, creditControlAnswer: CreditControlAnswerInfo) + fun returnUnusedDataBucketEvent(msisdn: String, reservedBucketBytes: Long) +} \ No newline at end of file diff --git a/ocs/src/main/kotlin/org/ostelco/prime/consumption/OcsEventToGrpcResponseMapper.kt b/ocs/src/main/kotlin/org/ostelco/prime/consumption/OcsEventToGrpcResponseMapper.kt new file mode 100644 index 000000000..cbd493d39 --- /dev/null +++ b/ocs/src/main/kotlin/org/ostelco/prime/consumption/OcsEventToGrpcResponseMapper.kt @@ -0,0 +1,126 @@ +package org.ostelco.prime.consumption + +import com.lmax.disruptor.EventHandler +import org.ostelco.ocs.api.ActivateResponse +import org.ostelco.ocs.api.CreditControlAnswerInfo +import org.ostelco.ocs.api.FinalUnitAction +import org.ostelco.ocs.api.FinalUnitIndication +import org.ostelco.ocs.api.MultipleServiceCreditControl +import org.ostelco.ocs.api.ReportingReason +import org.ostelco.ocs.api.ServiceUnit +import org.ostelco.prime.disruptor.EventMessageType.CREDIT_CONTROL_REQUEST +import org.ostelco.prime.disruptor.EventMessageType.TOPUP_DATA_BUNDLE_BALANCE +import org.ostelco.prime.disruptor.OcsEvent +import org.ostelco.prime.getLogger + +/** + * An event handler, handling the [OcsEvent] messages that + * are used by the Disruptor execution mechanism to handle events. + */ +internal class OcsEventToGrpcResponseMapper(private val ocsAsyncResponseProducer: OcsAsyncResponseProducer) : EventHandler { + + private val logger by getLogger() + + override fun onEvent( + event: OcsEvent, + sequence: Long, + endOfBatch: Boolean) { + + try { + dispatchOnEventType(event) + } catch (e: Exception) { + logger.warn("Exception handling prime event in OcsService", e) + } + } + + private fun dispatchOnEventType(event: OcsEvent) { + when (event.messageType) { + CREDIT_CONTROL_REQUEST -> handleCreditControlRequest(event) + TOPUP_DATA_BUNDLE_BALANCE -> handleTopupDataBundleBalance(event) + + else -> {} // do nothing + } + } + + private fun handleTopupDataBundleBalance(event: OcsEvent) { + event.topupContext?.msisdnToppedUp?.forEach { msisdn -> + val response = ActivateResponse.newBuilder().setMsisdn(msisdn).build() + ocsAsyncResponseProducer.activateOnNextResponse(response) + } + } + + private fun logEventProcessing(msg: String, event: OcsEvent) { + val logString = """ + $msg + Msisdn: ${event.msisdn} + Requested bytes: ${event.request?.msccList?.firstOrNull()?.requested?.totalOctets ?: 0L} + Used bytes: ${event.request?.msccList?.firstOrNull()?.used?.totalOctets ?: 0L} + Bundle bytes: ${event.bundleBytes} + Topup bytes: ${event.topupContext?.topUpBytes} + Topup errorMessage: ${event.topupContext?.errorMessage} + Request id: ${event.request?.requestId} + """.trimIndent() + + logger.info(logString) + } + + private fun handleCreditControlRequest(event: OcsEvent) { + + logEventProcessing("Returning Credit-Control-Answer", event) + + // FIXME martin: This assume we only have one MSCC + // TODO martin: In case of zero balance we should add appropriate FinalUnitAction + + val msisdn = event.msisdn + + if (msisdn != null) { + try { + val creditControlAnswer = CreditControlAnswerInfo + .newBuilder() + .setMsisdn(msisdn) + + event.request?.let { request -> + if (request.msccCount > 0) { + val msccBuilder = MultipleServiceCreditControl.newBuilder() + msccBuilder.setServiceIdentifier(request.getMscc(0).serviceIdentifier) + .setRatingGroup(request.getMscc(0).ratingGroup) + .setValidityTime(86400) + + if ((request.getMscc(0).reportingReason != ReportingReason.FINAL) && (request.getMscc(0).requested.totalOctets > 0)) { + msccBuilder.granted = ServiceUnit.newBuilder() + .setTotalOctets(event.reservedBucketBytes) + .build() + if (event.reservedBucketBytes < request.getMscc(0).requested.totalOctets) { + msccBuilder.finalUnitIndication = FinalUnitIndication.newBuilder() + .setFinalUnitAction(FinalUnitAction.TERMINATE) + .setIsSet(true) + .build() + } + } else { + // Use -1 to indicate no granted service unit should be included in the answer + msccBuilder.granted = ServiceUnit.newBuilder() + .setTotalOctets(-1) + .build() + } + creditControlAnswer.addMscc(msccBuilder.build()) + } + creditControlAnswer.setRequestId(request.requestId) + } + + val streamId = event.ocsgwStreamId + if (streamId != null) { + ocsAsyncResponseProducer.sendCreditControlAnswer(streamId, creditControlAnswer.build()) + } + } catch (e: Exception) { + logger.warn("Exception handling prime event", e) + logEventProcessing("Exception sending Credit-Control-Answer", event) + + // unable to send Credit-Control-Answer. + // So, return reserved bucket bytes back to data bundle. + ocsAsyncResponseProducer.returnUnusedDataBucketEvent( + msisdn, + event.reservedBucketBytes) + } + } + } +} diff --git a/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsGrpcServer.kt b/ocs/src/main/kotlin/org/ostelco/prime/consumption/OcsGrpcServer.kt similarity index 72% rename from ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsGrpcServer.kt rename to ocs/src/main/kotlin/org/ostelco/prime/consumption/OcsGrpcServer.kt index 9abebad4f..07b3d52de 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsGrpcServer.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/consumption/OcsGrpcServer.kt @@ -1,11 +1,10 @@ -package org.ostelco.prime.ocs +package org.ostelco.prime.consumption import io.dropwizard.lifecycle.Managed import io.grpc.BindableService import io.grpc.Server import io.grpc.ServerBuilder -import org.ostelco.prime.logger -import java.io.IOException +import org.ostelco.prime.getLogger /** * This is OCS Server running on gRPC protocol. @@ -15,36 +14,24 @@ import java.io.IOException */ class OcsGrpcServer(private val port: Int, service: BindableService) : Managed { - private val logger by logger() + private val logger by getLogger() // may add Transport Security with Certificates if needed. // may add executor for control over number of threads private val server: Server = ServerBuilder.forPort(port).addService(service).build() - /** - * Startup is managed by Dropwizard's lifecycle. - * - * @throws IOException ... sometimes, perhaps. - */ override fun start() { server.start() logger.info("OcsServer Server started, listening for incoming gRPC traffic on {}", port) } - /** - * Shutdown is managed by Dropwizard's lifecycle. - * - * @throws InterruptedException When something goes wrong. - */ override fun stop() { logger.info("Stopping OcsServer Server listening for gRPC traffic on {}", port) server.shutdown() blockUntilShutdown() } - /** - * Used for unit testing - */ + // Used for unit testing fun forceStop() { logger.info("Stopping forcefully OcsServer Server listening for gRPC traffic on {}", port) server.shutdownNow() diff --git a/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsGrpcService.kt b/ocs/src/main/kotlin/org/ostelco/prime/consumption/OcsGrpcService.kt similarity index 67% rename from ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsGrpcService.kt rename to ocs/src/main/kotlin/org/ostelco/prime/consumption/OcsGrpcService.kt index b27098722..b59fb312a 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsGrpcService.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/consumption/OcsGrpcService.kt @@ -1,4 +1,4 @@ -package org.ostelco.prime.ocs +package org.ostelco.prime.consumption import io.grpc.stub.StreamObserver import org.ostelco.ocs.api.ActivateRequest @@ -7,7 +7,7 @@ import org.ostelco.ocs.api.CreditControlAnswerInfo import org.ostelco.ocs.api.CreditControlRequestInfo import org.ostelco.ocs.api.CreditControlRequestType.NONE import org.ostelco.ocs.api.OcsServiceGrpc -import org.ostelco.prime.logger +import org.ostelco.prime.getLogger import java.util.* @@ -38,37 +38,24 @@ import java.util.* * see that a client invokes a method, and listens for a stream of information related to * that particular stream. */ -class OcsGrpcService(private val ocsService: OcsService) : OcsServiceGrpc.OcsServiceImplBase() { +class OcsGrpcService(private val ocsAsyncRequestConsumer: OcsAsyncRequestConsumer) : OcsServiceGrpc.OcsServiceImplBase() { - private val logger by logger() + private val logger by getLogger() /** * Method to handle Credit-Control-Requests * * @param creditControlAnswer Stream used to send Credit-Control-Answer back to requester */ - override fun creditControlRequest( - creditControlAnswer: StreamObserver): StreamObserver { + override fun creditControlRequest(creditControlAnswer: StreamObserver): StreamObserver { val streamId = newUniqueStreamId() - logger.info("Starting Credit-Control-Request with streamId: {}", streamId) - - ocsService.putCreditControlClient(streamId, creditControlAnswer) - + ocsAsyncRequestConsumer.putCreditControlClient(streamId, creditControlAnswer) return StreamObserverForStreamWithId(streamId) } - /** - * Return an unique ID based on Java's UUID generator that uniquely - * identifies a stream of values. - * @return A new unique identifier. - */ - private fun newUniqueStreamId(): String { - return UUID.randomUUID().toString() - } - - private inner class StreamObserverForStreamWithId internal constructor(private val streamId: String) : StreamObserver { + private inner class StreamObserverForStreamWithId(private val streamId: String) : StreamObserver { /** * This method gets called every time a Credit-Control-Request is received @@ -83,7 +70,7 @@ class OcsGrpcService(private val ocsService: OcsService) : OcsServiceGrpc.OcsSer logger.info("Received Credit-Control-Request request :: " + "for MSISDN: {} with request id: {}", request.msisdn, request.requestId) - ocsService.creditControlRequestEvent(request, streamId) + ocsAsyncRequestConsumer.creditControlRequestEvent(streamId = streamId, request = request) } override fun onError(t: Throwable) { @@ -92,7 +79,7 @@ class OcsGrpcService(private val ocsService: OcsService) : OcsServiceGrpc.OcsSer override fun onCompleted() { logger.info("Credit-Control-Request with streamId: {} completed", streamId) - ocsService.deleteCreditControlClient(streamId) + ocsAsyncRequestConsumer.deleteCreditControlClient(streamId) } } @@ -112,17 +99,15 @@ class OcsGrpcService(private val ocsService: OcsService) : OcsServiceGrpc.OcsSer request: ActivateRequest, activateResponse: StreamObserver) { - // The session we have with the OCS will only have one - // activation invocation. Thus it makes sense to keep the - // return channel (the activateResponse instance) in a - // particular place, so that's what we do. The reason this - // code looks brittle is that if we ever get multiple activate - // requests, it will break. The reason it never breaks - // is that we never do get more than one. So yes, it's brittle. - ocsService.updateActivateResponse(activateResponse) + val streamId = newUniqueStreamId() + logger.info("Starting Activate-Response stream with streamId: {}", streamId) + ocsAsyncRequestConsumer.updateActivateResponse(streamId, activateResponse) - val response = ActivateResponse.newBuilder().setMsisdn("").build() + val initialDummyResponse = ActivateResponse.newBuilder().setMsisdn("").build() + activateResponse.onNext(initialDummyResponse) + } - activateResponse.onNext(response) + private fun newUniqueStreamId(): String { + return UUID.randomUUID().toString() } } diff --git a/ocs/src/main/kotlin/org/ostelco/prime/consumption/OcsService.kt b/ocs/src/main/kotlin/org/ostelco/prime/consumption/OcsService.kt new file mode 100644 index 000000000..a529586a6 --- /dev/null +++ b/ocs/src/main/kotlin/org/ostelco/prime/consumption/OcsService.kt @@ -0,0 +1,88 @@ +package org.ostelco.prime.consumption + +import com.lmax.disruptor.EventHandler +import io.grpc.stub.StreamObserver +import org.ostelco.ocs.api.ActivateResponse +import org.ostelco.ocs.api.CreditControlAnswerInfo +import org.ostelco.ocs.api.CreditControlRequestInfo +import org.ostelco.ocs.api.OcsServiceGrpc +import org.ostelco.prime.disruptor.EventProducer +import org.ostelco.prime.disruptor.OcsEvent +import java.util.concurrent.ConcurrentHashMap +import java.util.concurrent.ConcurrentMap + +/** + * [OcsService] acts as bridge between [OcsGrpcService] and [OcsDisruptor]. + * + * Ocs Requests from [OcsGrpcService] are sent to [OcsService] via [OcsAsyncRequestConsumer] interface. + * These requests are then sent to [OcsDisruptor] via [EventProducer]. + * [EventProducerImpl] is the implementation for [EventProducer] interface. + * + * OcsEvents from [OcsDisruptor] are handled by [OcsEventToGrpcResponseMapper]. + * [OcsEventToGrpcResponseMapper] forwards responses to [OcsService] via [OcsAsyncResponseProducer] interface. + * [OcsService] produces responses and sends them to [OcsGrpcService]. + */ +class OcsService(private val producer: EventProducer) : OcsAsyncRequestConsumer, OcsAsyncResponseProducer { + + private val creditControlClientMap: ConcurrentMap> = ConcurrentHashMap() + private val activateMsisdnClientMap: ConcurrentMap> = ConcurrentHashMap() + + /** + * A holder for + * [<] + * instances that are somehow used + */ + val eventHandler: EventHandler = OcsEventToGrpcResponseMapper(this) + val ocsGrpcService: OcsServiceGrpc.OcsServiceImplBase = OcsGrpcService(this) + + // + // Request Consumer functions + // + + override fun updateActivateResponse( + streamId: String, + activateResponse: StreamObserver) { + this.activateMsisdnClientMap[streamId] = activateResponse + } + + override fun deleteCreditControlClient(streamId: String) { + this.creditControlClientMap.remove(streamId) + } + + override fun creditControlRequestEvent( + streamId: String, + request: CreditControlRequestInfo) { + producer.injectCreditControlRequestIntoRingbuffer(streamId, request) + } + + override fun putCreditControlClient( + streamId: String, + creditControlAnswer: StreamObserver) { + creditControlClientMap[streamId] = creditControlAnswer + } + + // + // Response Producer functions + // + + override fun sendCreditControlAnswer( + streamId: String, + creditControlAnswer: CreditControlAnswerInfo) { + creditControlClientMap[streamId]?.onNext(creditControlAnswer) + } + + override fun activateOnNextResponse(response: ActivateResponse) { + // TODO martin: send activate MSISDN to selective ocsgw instead of all + this.activateMsisdnClientMap.forEach { _ , responseStream -> + responseStream.onNext(response) + } + } + + override fun returnUnusedDataBucketEvent( + msisdn: String, + reservedBucketBytes: Long) { + producer.releaseReservedDataBucketEvent( + msisdn, + reservedBucketBytes) + } +} diff --git a/ocs/src/main/kotlin/org/ostelco/prime/events/EventProcessor.kt b/ocs/src/main/kotlin/org/ostelco/prime/disruptor/BundleBalanceStore.kt similarity index 53% rename from ocs/src/main/kotlin/org/ostelco/prime/events/EventProcessor.kt rename to ocs/src/main/kotlin/org/ostelco/prime/disruptor/BundleBalanceStore.kt index 13131a944..b44a4bc45 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/events/EventProcessor.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/disruptor/BundleBalanceStore.kt @@ -1,12 +1,9 @@ -package org.ostelco.prime.events +package org.ostelco.prime.disruptor import com.lmax.disruptor.EventHandler -import org.ostelco.prime.disruptor.EventMessageType.CREDIT_CONTROL_REQUEST -import org.ostelco.prime.disruptor.EventMessageType.RELEASE_RESERVED_BUCKET -import org.ostelco.prime.disruptor.EventMessageType.REMOVE_MSISDN_TO_BUNDLE_MAPPING -import org.ostelco.prime.disruptor.EventMessageType.TOPUP_DATA_BUNDLE_BALANCE -import org.ostelco.prime.disruptor.OcsEvent -import org.ostelco.prime.logger +import org.ostelco.prime.disruptor.EventMessageType.ADD_MSISDN_TO_BUNDLE_MAPPING +import org.ostelco.prime.disruptor.EventMessageType.UPDATE_BUNDLE +import org.ostelco.prime.getLogger import org.ostelco.prime.model.Bundle import org.ostelco.prime.module.getResource import org.ostelco.prime.storage.ClientDataSource @@ -16,10 +13,9 @@ import org.ostelco.prime.storage.ClientDataSource * It will be initialized properly using `getResource()`. * Storage is parameterized into constructor to be able to pass mock for unit testing. */ -class EventProcessor( - private val storage: ClientDataSource = getResource()) : EventHandler { +class BundleBalanceStore(private val storage: ClientDataSource = getResource()) : EventHandler { - private val logger by logger() + private val logger by getLogger() override fun onEvent( event: OcsEvent, @@ -27,10 +23,8 @@ class EventProcessor( endOfBatch: Boolean) { try { - if (event.messageType == CREDIT_CONTROL_REQUEST - || event.messageType == RELEASE_RESERVED_BUCKET - || event.messageType == TOPUP_DATA_BUNDLE_BALANCE - || event.messageType == REMOVE_MSISDN_TO_BUNDLE_MAPPING) { + if (event.messageType != UPDATE_BUNDLE && event.messageType != ADD_MSISDN_TO_BUNDLE_MAPPING) { + logger.info("Updating data bundle balance for bundleId : {} to {} bytes", event.bundleId, event.bundleBytes) val bundleId = event.bundleId diff --git a/ocs/src/main/kotlin/org/ostelco/prime/disruptor/ClearingEventHandler.kt b/ocs/src/main/kotlin/org/ostelco/prime/disruptor/ClearingEvent.kt similarity index 74% rename from ocs/src/main/kotlin/org/ostelco/prime/disruptor/ClearingEventHandler.kt rename to ocs/src/main/kotlin/org/ostelco/prime/disruptor/ClearingEvent.kt index 73e6c325b..1886f3e85 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/disruptor/ClearingEventHandler.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/disruptor/ClearingEvent.kt @@ -1,11 +1,11 @@ package org.ostelco.prime.disruptor import com.lmax.disruptor.EventHandler -import org.ostelco.prime.logger +import org.ostelco.prime.getLogger -class ClearingEventHandler : EventHandler { +object ClearingEvent : EventHandler { - private val logger by logger() + private val logger by getLogger() override fun onEvent( event: OcsEvent, diff --git a/ocs/src/main/kotlin/org/ostelco/prime/disruptor/EventProducer.kt b/ocs/src/main/kotlin/org/ostelco/prime/disruptor/EventProducer.kt index a2e4880e8..29d424b0c 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/disruptor/EventProducer.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/disruptor/EventProducer.kt @@ -5,6 +5,7 @@ import org.ostelco.prime.model.Bundle interface EventProducer { fun topupDataBundleBalanceEvent( + requestId: String, bundleId: String, bytes: Long) @@ -13,8 +14,8 @@ interface EventProducer { bytes: Long) fun injectCreditControlRequestIntoRingbuffer( - request: CreditControlRequestInfo, - streamId: String) + streamId: String, + request: CreditControlRequestInfo) fun addBundle(bundle: Bundle) diff --git a/ocs/src/main/kotlin/org/ostelco/prime/disruptor/EventProducerImpl.kt b/ocs/src/main/kotlin/org/ostelco/prime/disruptor/EventProducerImpl.kt index 644d4fe70..88ff116cb 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/disruptor/EventProducerImpl.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/disruptor/EventProducerImpl.kt @@ -2,19 +2,18 @@ package org.ostelco.prime.disruptor import com.lmax.disruptor.RingBuffer import org.ostelco.ocs.api.CreditControlRequestInfo -import org.ostelco.ocs.api.ReportingReason import org.ostelco.prime.disruptor.EventMessageType.ADD_MSISDN_TO_BUNDLE_MAPPING import org.ostelco.prime.disruptor.EventMessageType.CREDIT_CONTROL_REQUEST import org.ostelco.prime.disruptor.EventMessageType.RELEASE_RESERVED_BUCKET import org.ostelco.prime.disruptor.EventMessageType.TOPUP_DATA_BUNDLE_BALANCE import org.ostelco.prime.disruptor.EventMessageType.UPDATE_BUNDLE -import org.ostelco.prime.logger +import org.ostelco.prime.getLogger import org.ostelco.prime.model.Bundle import java.util.function.Consumer class EventProducerImpl(private val ringBuffer: RingBuffer) : EventProducer { - private val logger by logger() + private val logger by getLogger() private fun processNextEventOnTheRingBuffer(consumer: Consumer) { @@ -42,37 +41,39 @@ class EventProducerImpl(private val ringBuffer: RingBuffer) : EventPro } private fun injectIntoRingBuffer( - type: EventMessageType, + eventMessageType: EventMessageType, msisdn: String? = null, bundleId: String? = null, bundleBytes: Long = 0, reservedBytes: Long = 0, streamId: String? = null, request: CreditControlRequestInfo? = null, - topUpBytes: Long? = 0) { + topupContext: TopupContext? = null) { processNextEventOnTheRingBuffer( Consumer { event -> - event.update(type, - msisdn, - bundleId, - emptyList(), - bundleBytes, - reservedBytes, - streamId, - request, - topUpBytes) + event.update(messageType = eventMessageType, + msisdn = msisdn, + bundleId = bundleId, + bundleBytes = bundleBytes, + reservedBucketBytes = reservedBytes, + ocsgwStreamId = streamId, + request = request, + topupContext = topupContext) }) } override fun topupDataBundleBalanceEvent( + requestId: String, bundleId: String, bytes: Long) { injectIntoRingBuffer( - type = TOPUP_DATA_BUNDLE_BALANCE, + eventMessageType = TOPUP_DATA_BUNDLE_BALANCE, bundleId = bundleId, - topUpBytes = bytes) + topupContext = TopupContext( + requestId = requestId, + topUpBytes = bytes)) } override fun releaseReservedDataBucketEvent( @@ -80,26 +81,32 @@ class EventProducerImpl(private val ringBuffer: RingBuffer) : EventPro bytes: Long) { injectIntoRingBuffer( - type = RELEASE_RESERVED_BUCKET, + eventMessageType = RELEASE_RESERVED_BUCKET, msisdn = msisdn) } override fun injectCreditControlRequestIntoRingbuffer( - request: CreditControlRequestInfo, - streamId: String) { + streamId: String, + request: CreditControlRequestInfo) { - injectIntoRingBuffer(CREDIT_CONTROL_REQUEST, + injectIntoRingBuffer( + eventMessageType =CREDIT_CONTROL_REQUEST, msisdn = request.msisdn, - reservedBytes = 0, streamId = streamId, request = request) } override fun addBundle(bundle: Bundle) { - injectIntoRingBuffer(UPDATE_BUNDLE, bundleId = bundle.id, bundleBytes = bundle.balance) + injectIntoRingBuffer( + eventMessageType = UPDATE_BUNDLE, + bundleId = bundle.id, + bundleBytes = bundle.balance) } override fun addMsisdnToBundleMapping(msisdn: String, bundleId: String) { - injectIntoRingBuffer(ADD_MSISDN_TO_BUNDLE_MAPPING, msisdn = msisdn, bundleId = bundleId) + injectIntoRingBuffer( + eventMessageType = ADD_MSISDN_TO_BUNDLE_MAPPING, + msisdn = msisdn, + bundleId = bundleId) } } diff --git a/ocs/src/main/kotlin/org/ostelco/prime/disruptor/OcsDisruptor.kt b/ocs/src/main/kotlin/org/ostelco/prime/disruptor/OcsDisruptor.kt index cb5b3e869..d756dee3c 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/disruptor/OcsDisruptor.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/disruptor/OcsDisruptor.kt @@ -1,7 +1,6 @@ package org.ostelco.prime.disruptor import com.lmax.disruptor.EventFactory -import com.lmax.disruptor.TimeoutException import com.lmax.disruptor.dsl.Disruptor import io.dropwizard.lifecycle.Managed import java.util.concurrent.Executors @@ -28,7 +27,6 @@ class OcsDisruptor : Managed { disruptor.start() } - @Throws(TimeoutException::class) override fun stop() { disruptor.shutdown(TIMEOUT_IN_SECONDS.toLong(), TimeUnit.SECONDS) } diff --git a/ocs/src/main/kotlin/org/ostelco/prime/disruptor/OcsEvent.kt b/ocs/src/main/kotlin/org/ostelco/prime/disruptor/OcsEvent.kt index 3ee85c00c..03c4816c1 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/disruptor/OcsEvent.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/disruptor/OcsEvent.kt @@ -1,7 +1,6 @@ package org.ostelco.prime.disruptor import org.ostelco.ocs.api.CreditControlRequestInfo -import org.ostelco.ocs.api.ReportingReason class OcsEvent { @@ -21,11 +20,6 @@ class OcsEvent { */ var bundleId: String? = null - /** - * Phone numbers linked to a bundle which is topped up. - */ - var msisdnToppedUp: List? = null - /** * Buckets that has been reserved from the bundle. */ @@ -45,45 +39,45 @@ class OcsEvent { /** * Credit-Control-Request from OCS */ - var request: CreditControlRequestInfo? = null; + var request: CreditControlRequestInfo? = null - /** - * Topup amount for bundle - */ - var topUpBytes: Long? = 0; + var topupContext: TopupContext? = null fun clear() { messageType = null msisdn = null bundleId = null - msisdnToppedUp = null bundleBytes = 0 reservedBucketBytes = 0 ocsgwStreamId = null request = null - topUpBytes = 0; + topupContext = null; } - //FIXME vihang: We need to think about roaming!!! + //FIXME vihang: We need to think about roaming fun update( messageType: EventMessageType?, msisdn: String?, bundleId: String?, - msisdnToppedUp: List, bundleBytes: Long, reservedBucketBytes: Long, ocsgwStreamId: String?, request: CreditControlRequestInfo?, - topUpBytes: Long?) { + topupContext: TopupContext?) { this.messageType = messageType this.msisdn = msisdn this.bundleId = bundleId - this.msisdnToppedUp = msisdnToppedUp this.bundleBytes = bundleBytes this.reservedBucketBytes = reservedBucketBytes this.ocsgwStreamId = ocsgwStreamId this.request = request - this.topUpBytes = topUpBytes + this.topupContext = topupContext } } + +class TopupContext( + val requestId: String, + val topUpBytes: Long, + var msisdnToppedUp: List = emptyList(), + var errorMessage: String = "") \ No newline at end of file diff --git a/ocs/src/main/kotlin/org/ostelco/prime/disruptor/OcsEventFactory.kt b/ocs/src/main/kotlin/org/ostelco/prime/disruptor/OcsEventFactory.kt deleted file mode 100644 index ccc22e082..000000000 --- a/ocs/src/main/kotlin/org/ostelco/prime/disruptor/OcsEventFactory.kt +++ /dev/null @@ -1,10 +0,0 @@ -package org.ostelco.prime.disruptor - -import com.lmax.disruptor.EventFactory - -class OcsEventFactory : EventFactory { - - override fun newInstance(): OcsEvent { - return OcsEvent() - } -} diff --git a/ocs/src/main/kotlin/org/ostelco/prime/handler/PurchaseRequestHandler.kt b/ocs/src/main/kotlin/org/ostelco/prime/handler/PurchaseRequestHandler.kt index b751d7dcc..f4044b339 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/handler/PurchaseRequestHandler.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/handler/PurchaseRequestHandler.kt @@ -1,39 +1,89 @@ package org.ostelco.prime.handler -import arrow.core.getOrElse +import arrow.core.Either +import arrow.core.flatMap +import com.lmax.disruptor.EventHandler +import org.ostelco.prime.disruptor.EventMessageType.TOPUP_DATA_BUNDLE_BALANCE import org.ostelco.prime.disruptor.EventProducer -import org.ostelco.prime.logger +import org.ostelco.prime.disruptor.OcsEvent +import org.ostelco.prime.getLogger import org.ostelco.prime.module.getResource import org.ostelco.prime.storage.ClientGraphStore +import java.util.* +import java.util.concurrent.CompletableFuture +import java.util.concurrent.ConcurrentHashMap +import java.util.concurrent.TimeUnit.MILLISECONDS class PurchaseRequestHandler( private val producer: EventProducer, - private val storage: ClientGraphStore = getResource()) { + private val storage: ClientGraphStore = getResource()) : EventHandler { - private val logger by logger() + private val logger by getLogger() + + private val requestMap = ConcurrentHashMap>() fun handlePurchaseRequest( subscriberId: String, - productSku: String) { + productSku: String): Either { logger.info("Handling purchase request - subscriberId: {} sku = {}", subscriberId, productSku) // get Product by SKU - storage.getProduct(subscriberId, productSku) - .map { product -> - val noOfBytes = product.properties["noOfBytes"]?.replace("_", "")?.toLong() - - val bundleId = storage.getBundles(subscriberId).map { it?.first()?.id }.getOrElse { null } + return storage.getProduct(subscriberId, productSku) + // if left, map StoreError to String + .mapLeft { + "Unable to Topup. Not a valid SKU: $productSku. ${it.message}" + } + // map product to noOfBytes + .flatMap { product -> + val noOfBytes: Long? = product.properties["noOfBytes"] + ?.replace("_", "") + ?.toLong() + if (noOfBytes != null && noOfBytes > 0) { + Either.right(noOfBytes) + } else { + Either.left("Unable to Topup. No bytes to topup for product: $productSku") + } + } + // map noOfBytes to (noOfBytes, bundleId) + .flatMap { noOfBytes -> + storage.getBundles(subscriberId) + .mapLeft { "Unable to Topup. No bundles found for subscriberId: $subscriberId" } + .flatMap { bundles -> + bundles.firstOrNull() + ?.id + ?.let { Either.right(Pair(noOfBytes, it)) } + ?: Either.left("Unable to Topup. No bundles or invalid bundle found for subscriberId: $subscriberId") + } + } + .flatMap { (noOfBytes, bundleId) -> + logger.info("Handling topup product - bundleId: {} topup: {}", bundleId, noOfBytes) + topup(bundleId = bundleId, noOfBytes = noOfBytes) + } + } - if (bundleId != null && noOfBytes != null && noOfBytes > 0) { + override fun onEvent( + event: OcsEvent, + sequence: Long, + endOfBatch: Boolean) { - logger.info("Handling topup product - bundleId: {} topup: {}", bundleId, noOfBytes) + if (event.messageType == TOPUP_DATA_BUNDLE_BALANCE) { + val topupContext = event.topupContext + if (topupContext != null) { + requestMap[topupContext.requestId]?.complete(topupContext.errorMessage) + } + } + } - producer.topupDataBundleBalanceEvent(bundleId, noOfBytes) - } - }.mapLeft { - // TODO vihang: instead of throwing exception, return arrow.Either.left - throw Exception("Not a valid SKU: $productSku") - } + private fun topup(bundleId: String, noOfBytes: Long): Either { + val requestId = UUID.randomUUID().toString() + val future = CompletableFuture() + requestMap[requestId] = future + producer.topupDataBundleBalanceEvent(requestId = requestId, bundleId = bundleId, bytes = noOfBytes) + val error = future.get(5, MILLISECONDS) + if (error.isNotBlank()) { + return Either.left(error) + } + return Either.right(Unit) } } diff --git a/ocs/src/main/kotlin/org/ostelco/prime/ocs/ActivateResponseHolder.kt b/ocs/src/main/kotlin/org/ostelco/prime/ocs/ActivateResponseHolder.kt deleted file mode 100644 index b0858568d..000000000 --- a/ocs/src/main/kotlin/org/ostelco/prime/ocs/ActivateResponseHolder.kt +++ /dev/null @@ -1,47 +0,0 @@ -package org.ostelco.prime.ocs - -import io.grpc.stub.StreamObserver -import org.ostelco.ocs.api.ActivateResponse -import java.util.concurrent.locks.Lock -import java.util.concurrent.locks.ReentrantReadWriteLock - -/** - * Helper class to keep track of - * [<] - * instance in a threadsafe manner. - */ -internal class ActivateResponseHolder { - - private val readLock: Lock - - private val writeLock: Lock - - private var activateResponse: StreamObserver? = null - - init { - val readWriteLock = ReentrantReadWriteLock() - - this.readLock = readWriteLock.readLock() - this.writeLock = readWriteLock.writeLock() - } - - fun setActivateResponse(ar: StreamObserver) { - writeLock.lock() - try { - activateResponse = ar - } finally { - writeLock.unlock() - } - } - - fun onNextResponse(response: ActivateResponse) { - readLock.lock() - try { - if (activateResponse != null) { - activateResponse!!.onNext(response) - } - } finally { - readLock.unlock() - } - } -} diff --git a/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt b/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt deleted file mode 100644 index a7f2ba282..000000000 --- a/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt +++ /dev/null @@ -1,121 +0,0 @@ -package org.ostelco.prime.ocs - -import com.lmax.disruptor.EventHandler -import org.ostelco.ocs.api.ActivateResponse -import org.ostelco.ocs.api.CreditControlAnswerInfo -import org.ostelco.ocs.api.FinalUnitAction -import org.ostelco.ocs.api.FinalUnitIndication -import org.ostelco.ocs.api.MultipleServiceCreditControl -import org.ostelco.ocs.api.ReportingReason -import org.ostelco.ocs.api.ServiceUnit -import org.ostelco.prime.disruptor.EventMessageType.CREDIT_CONTROL_REQUEST -import org.ostelco.prime.disruptor.EventMessageType.TOPUP_DATA_BUNDLE_BALANCE -import org.ostelco.prime.disruptor.OcsEvent -import org.ostelco.prime.logger - -/** - * An event handler, handling the [OcsEvent] messages that - * are used by the Disruptor execution mechanism to handle events. - */ -internal class EventHandlerImpl(private val ocsService: OcsService) : EventHandler { - - private val logger by logger() - - override fun onEvent( - event: OcsEvent, - sequence: Long, - endOfBatch: Boolean) { - - try { - dispatchOnEventType(event) - } catch (e: Exception) { - logger.warn("Exception handling prime event in OcsService", e) - } - } - - private fun dispatchOnEventType(event: OcsEvent) { - when (event.messageType) { - CREDIT_CONTROL_REQUEST -> handleCreditControlRequest(event) - TOPUP_DATA_BUNDLE_BALANCE -> handleTopupDataBundleBalance(event) - - else -> {} // do nothing - } - } - - private fun handleTopupDataBundleBalance(event: OcsEvent) { - event.msisdnToppedUp?.forEach { msisdn -> - val response = ActivateResponse.newBuilder().setMsisdn(msisdn).build() - ocsService.activateOnNextResponse(response) - } - } - - private fun logEventProcessing(msg: String, event: OcsEvent) { - val logString = """ - $msg - Msisdn: ${event.msisdn} - Requested bytes: ${event.request?.msccList?.firstOrNull()?.requested?.totalOctets ?: 0L} - Used bytes: ${event.request?.msccList?.firstOrNull()?.used?.totalOctets ?: 0L} - Bundle bytes: ${event.bundleBytes} - Topup bytes: ${event.topUpBytes} - Request id: ${event.request?.requestId} - """.trimIndent() - - logger.info(logString) - } - - private fun handleCreditControlRequest(event: OcsEvent) { - - logEventProcessing("Returning Credit-Control-Answer", event) - - // FIXME martin: This assume we only have one MSCC - // TODO martin: In case of zero balance we should add appropriate FinalUnitAction - - try { - val creditControlAnswer = CreditControlAnswerInfo.newBuilder() - .setMsisdn(event.msisdn) - - event.request?.let { request -> - if (request.msccCount > 0) { - val msccBuilder = MultipleServiceCreditControl.newBuilder() - msccBuilder.setServiceIdentifier(request.getMscc(0).serviceIdentifier) - .setRatingGroup(request.getMscc(0).ratingGroup) - .setValidityTime(86400) - - if ((request.getMscc(0).reportingReason != ReportingReason.FINAL) && (request.getMscc(0).requested.totalOctets > 0)) { - msccBuilder.granted = ServiceUnit.newBuilder() - .setTotalOctets(event.reservedBucketBytes) - .build() - if (event.reservedBucketBytes < request.getMscc(0).requested.totalOctets) { - msccBuilder.finalUnitIndication = FinalUnitIndication.newBuilder() - .setFinalUnitAction(FinalUnitAction.TERMINATE) - .setIsSet(true) - .build() - } - } else { - // Use -1 to indicate no granted service unit should be included in the answer - msccBuilder.granted = ServiceUnit.newBuilder() - .setTotalOctets(-1) - .build() - } - creditControlAnswer.addMscc(msccBuilder.build()) - } - creditControlAnswer.setRequestId(request.requestId) - } - - val streamId = event.ocsgwStreamId - if (streamId != null) { - ocsService.sendCreditControlAnswer(streamId, creditControlAnswer.build()) - } - } catch (e: Exception) { - logger.warn("Exception handling prime event", e) - logEventProcessing("Exception sending Credit-Control-Answer", event) - - // unable to send Credit-Control-Answer. - // So, return reserved bucket bytes back to data bundle. - ocsService.returnUnusedDataBucketEvent( - event.msisdn!!, // TODO vihang: need proper null check - event.reservedBucketBytes) - } - - } -} diff --git a/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsModule.kt b/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsModule.kt index af54645ee..dea4238de 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsModule.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsModule.kt @@ -4,11 +4,13 @@ import com.fasterxml.jackson.annotation.JsonProperty import com.fasterxml.jackson.annotation.JsonTypeName import io.dropwizard.setup.Environment import org.hibernate.validator.constraints.NotEmpty -import org.ostelco.prime.analytics.DataConsumptionInfo -import org.ostelco.prime.disruptor.ClearingEventHandler +import org.ostelco.prime.analytics.AnalyticsReporter +import org.ostelco.prime.consumption.OcsGrpcServer +import org.ostelco.prime.consumption.OcsService +import org.ostelco.prime.disruptor.BundleBalanceStore +import org.ostelco.prime.disruptor.ClearingEvent import org.ostelco.prime.disruptor.EventProducerImpl import org.ostelco.prime.disruptor.OcsDisruptor -import org.ostelco.prime.events.EventProcessor import org.ostelco.prime.module.PrimeModule import org.ostelco.prime.thresholds.ThresholdChecker @@ -32,11 +34,7 @@ class OcsModule : PrimeModule { val ocsService = OcsService(producer) // OcsServer assigns OcsService as handler for gRPC requests - val server = OcsGrpcServer(8082, ocsService.asOcsServiceImplBase()) - - val dataConsumptionInfo = DataConsumptionInfo() - - val thresholdChecker = ThresholdChecker(config.lowBalanceThreshold) + val server = OcsGrpcServer(8082, ocsService.ocsGrpcService) // Events flow: // Producer:(OcsService, Subscriber) @@ -46,8 +44,12 @@ class OcsModule : PrimeModule { disruptor.disruptor .handleEventsWith(OcsState()) - .then(ocsService.asEventHandler(), EventProcessor(), thresholdChecker, dataConsumptionInfo) - .then(ClearingEventHandler()) + .then(ocsService.eventHandler, + BundleBalanceStore(), + OcsPrimeServiceSingleton.purchaseRequestHandler, + ThresholdChecker(config.lowBalanceThreshold), + AnalyticsReporter) + .then(ClearingEvent) // dropwizard starts disruptor env.lifecycle().manage(disruptor) diff --git a/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsPrimeService.kt b/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsPrimeService.kt index 7db8fa881..d5353b6ad 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsPrimeService.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsPrimeService.kt @@ -1,5 +1,6 @@ package org.ostelco.prime.ocs +import arrow.core.Either import org.ostelco.prime.disruptor.EventProducer import org.ostelco.prime.handler.OcsStateUpdateHandler import org.ostelco.prime.handler.PurchaseRequestHandler @@ -9,13 +10,13 @@ import org.ostelco.prime.storage.ClientDataSource /** * This class is using the singleton class as delegate. - * This is done because the {@link java.util.ServiceLoader} expects public no-args constructor, which is absent in Singleton. + * This is done because the [java.util.ServiceLoader] expects public no-args constructor, which is absent in Singleton. */ class OcsPrimeService : OcsSubscriberService by OcsPrimeServiceSingleton, OcsAdminService by OcsPrimeServiceSingleton object OcsPrimeServiceSingleton : OcsSubscriberService, OcsAdminService { - private lateinit var purchaseRequestHandler: PurchaseRequestHandler + lateinit var purchaseRequestHandler: PurchaseRequestHandler private lateinit var ocsStateUpdateHandler: OcsStateUpdateHandler private val storage by lazy { getResource() } @@ -25,8 +26,8 @@ object OcsPrimeServiceSingleton : OcsSubscriberService, OcsAdminService { ocsStateUpdateHandler = OcsStateUpdateHandler(producer) } - override fun topup(subscriberId: String, sku: String) { - purchaseRequestHandler.handlePurchaseRequest(subscriberId, sku) + override fun topup(subscriberId: String, sku: String): Either { + return purchaseRequestHandler.handlePurchaseRequest(subscriberId, sku) } override fun addBundle(bundle: Bundle) { diff --git a/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsService.kt b/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsService.kt deleted file mode 100644 index b5f20e01e..000000000 --- a/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsService.kt +++ /dev/null @@ -1,102 +0,0 @@ -package org.ostelco.prime.ocs - -import com.lmax.disruptor.EventHandler -import io.grpc.stub.StreamObserver -import org.ostelco.ocs.api.ActivateResponse -import org.ostelco.ocs.api.CreditControlAnswerInfo -import org.ostelco.ocs.api.CreditControlRequestInfo -import org.ostelco.ocs.api.OcsServiceGrpc -import org.ostelco.prime.disruptor.EventProducer -import org.ostelco.prime.disruptor.OcsEvent -import java.util.concurrent.ConcurrentHashMap -import java.util.concurrent.ConcurrentMap - -class OcsService(private val producer: EventProducer) { - - private val creditControlClientMap: ConcurrentMap> - - /** - * A holder for - * [<] - * instances that are somehow used - */ - private val activateResponseHolder: ActivateResponseHolder - - private val eventHandler: EventHandler - - private val ocsServerImplBaseImpl: OcsServiceGrpc.OcsServiceImplBase - - init { - this.creditControlClientMap = ConcurrentHashMap() - this.eventHandler = EventHandlerImpl(this) - this.ocsServerImplBaseImpl = OcsGrpcService(this) - this.activateResponseHolder = ActivateResponseHolder() - } - - fun asEventHandler(): EventHandler { - return eventHandler - } - - fun returnUnusedDataBucketEvent( - msisdn: String, - bucketBytes: Long) { - producer.releaseReservedDataBucketEvent( - msisdn, - bucketBytes) - } - - /** - * Return a service that can be used to serve incoming GRPC requests. The service - * is typically bound to a service port using the GRPC ServerBuilder mechanism - * provide by GRPC: - * ` - * server = ServerBuilder. - * forPort(port). - * addService(service). - * build(); - ` * - * - * @return The service that can receive incoming GRPC messages - */ - fun asOcsServiceImplBase(): OcsServiceGrpc.OcsServiceImplBase { - return this.ocsServerImplBaseImpl - } - - private fun getCreditControlClientForStream( - streamId: String): StreamObserver? { - // Here we need to Convert it back to an answer. - creditControlClientMap[streamId] - return creditControlClientMap[streamId] - } - - fun activateOnNextResponse(response: ActivateResponse) { - this.activateResponseHolder.onNextResponse(response) - } - - fun updateActivateResponse( - activateResponse: StreamObserver) { - this.activateResponseHolder.setActivateResponse(activateResponse) - } - - fun deleteCreditControlClient(streamId: String) { - this.creditControlClientMap.remove(streamId) - } - - fun creditControlRequestEvent( - request: CreditControlRequestInfo, - streamId: String) { - producer.injectCreditControlRequestIntoRingbuffer(request, streamId) - } - - fun putCreditControlClient( - streamId: String, - creditControlAnswer: StreamObserver) { - creditControlClientMap[streamId] = creditControlAnswer - } - - fun sendCreditControlAnswer(streamId: String, creditControlAnswerInfo: CreditControlAnswerInfo) { - val creditControlAnswer = getCreditControlClientForStream(streamId) - - creditControlAnswer?.onNext(creditControlAnswerInfo) - } -} diff --git a/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsState.kt b/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsState.kt index 6e03b6820..c387aea9e 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsState.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsState.kt @@ -9,7 +9,7 @@ import org.ostelco.prime.disruptor.EventMessageType.REMOVE_MSISDN_TO_BUNDLE_MAPP import org.ostelco.prime.disruptor.EventMessageType.TOPUP_DATA_BUNDLE_BALANCE import org.ostelco.prime.disruptor.EventMessageType.UPDATE_BUNDLE import org.ostelco.prime.disruptor.OcsEvent -import org.ostelco.prime.logger +import org.ostelco.prime.getLogger import org.ostelco.prime.module.getResource import org.ostelco.prime.storage.AdminDataSource import java.util.* @@ -19,7 +19,7 @@ import java.util.* */ class OcsState(val loadSubscriberInfo:Boolean = true) : EventHandler { - private val logger by logger() + private val logger by getLogger() // this is public for prime:integration tests val msisdnToBundleIdMap = HashMap() @@ -28,6 +28,12 @@ class OcsState(val loadSubscriberInfo:Boolean = true) : EventHandler { private val bundleBalanceMap = HashMap() private val bucketReservedMap = HashMap() + init { + if (loadSubscriberInfo) { + loadDatabaseToInMemoryStructure() + } + } + override fun onEvent( event: OcsEvent, sequence: Long, @@ -41,21 +47,30 @@ class OcsState(val loadSubscriberInfo:Boolean = true) : EventHandler { logger.error("Received null as msisdn") return } - consumeDataBytes(msisdn, event.request?.msccList?.firstOrNull()?.used?.totalOctets ?: 0L) + consumeDataBytes( + msisdn = msisdn, + usedBytes = event.request?.msccList?.firstOrNull()?.used?.totalOctets ?: 0L) event.reservedBucketBytes = reserveDataBytes( - msisdn, - event.request?.msccList?.firstOrNull()?.requested?.totalOctets ?: 0L) + msisdn = msisdn, + bytes = event.request?.msccList?.firstOrNull()?.requested?.totalOctets ?: 0L) event.bundleId = msisdnToBundleIdMap[msisdn] event.bundleBytes = bundleBalanceMap[event.bundleId] ?: 0 } TOPUP_DATA_BUNDLE_BALANCE -> { - val bundleId = event.bundleId - if (bundleId == null) { - logger.error("Received null as bundleId") - return + try { + val bundleId = event.bundleId + if (bundleId == null) { + logger.error("Received null as bundleId") + event.topupContext?.errorMessage = "Received null as bundleId" + return + } + event.bundleBytes = addDataBundleBytes( + bundleId = bundleId, + bytes = event.topupContext?.topUpBytes ?: 0L) + event.topupContext?.msisdnToppedUp = bundleIdToMsisdnMap[bundleId]?.toList() ?: emptyList() + } catch (e: Exception) { + event.topupContext?.errorMessage = e.message ?: "Failed to perform topup" } - event.bundleBytes = addDataBundleBytes(bundleId, event.topUpBytes ?: 0L) - event.msisdnToppedUp = bundleIdToMsisdnMap[bundleId]?.toList() } RELEASE_RESERVED_BUCKET -> { val msisdn = event.msisdn @@ -124,14 +139,10 @@ class OcsState(val loadSubscriberInfo:Boolean = true) : EventHandler { return bundleBalanceMap[bundleId] ?: 0 } - /** + /* * Add to subscriber's data bundle balance in bytes. * This is called when subscriber top ups. or, P-GW returns * unused data after subscriber disconnects data. - * - * @param bundleId Bundle ID - * @param bytes Number of bytes we want to add - * @return bytes data bundle balance in bytes */ private fun addDataBundleBytes(bundleId: String, bytes: Long): Long { @@ -139,7 +150,7 @@ class OcsState(val loadSubscriberInfo:Boolean = true) : EventHandler { "Number of bytes must be positive") bundleBalanceMap.putIfAbsent(bundleId, 0L) - val newDataSize = bundleBalanceMap[bundleId]!! + bytes + val newDataSize = (bundleBalanceMap[bundleId] ?: 0) + bytes bundleBalanceMap[bundleId] = newDataSize return newDataSize } @@ -274,12 +285,6 @@ class OcsState(val loadSubscriberInfo:Boolean = true) : EventHandler { return consumed } - init { - if (loadSubscriberInfo) { - loadDatabaseToInMemoryStructure() - } - } - private fun loadDatabaseToInMemoryStructure() { logger.info("Loading initial balance from storage to in-memory OcsState") diff --git a/ocs/src/main/kotlin/org/ostelco/prime/thresholds/ThresholdChecker.kt b/ocs/src/main/kotlin/org/ostelco/prime/thresholds/ThresholdChecker.kt index af1c19b60..a04068427 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/thresholds/ThresholdChecker.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/thresholds/ThresholdChecker.kt @@ -2,9 +2,8 @@ package org.ostelco.prime.thresholds import com.lmax.disruptor.EventHandler import org.ostelco.prime.appnotifier.AppNotifier -import org.ostelco.prime.disruptor.OcsEvent import org.ostelco.prime.disruptor.EventMessageType.CREDIT_CONTROL_REQUEST -import org.ostelco.prime.logger +import org.ostelco.prime.disruptor.OcsEvent import org.ostelco.prime.module.getResource /** @@ -12,10 +11,7 @@ import org.ostelco.prime.module.getResource */ class ThresholdChecker(private val lowBalanceThreshold: Long) : EventHandler { - private val logger by logger() - private val appNotifier by lazy { getResource() } - // private val appNotifier by getResource() override fun onEvent( event: OcsEvent, diff --git a/ocs/src/test/kotlin/org/ostelco/prime/disruptor/PrimeEventProducerTest.kt b/ocs/src/test/kotlin/org/ostelco/prime/disruptor/PrimeEventProducerTest.kt index 63f04a32e..1471d5196 100644 --- a/ocs/src/test/kotlin/org/ostelco/prime/disruptor/PrimeEventProducerTest.kt +++ b/ocs/src/test/kotlin/org/ostelco/prime/disruptor/PrimeEventProducerTest.kt @@ -1,5 +1,6 @@ package org.ostelco.prime.disruptor +import com.lmax.disruptor.EventFactory import com.lmax.disruptor.EventHandler import com.lmax.disruptor.dsl.Disruptor import junit.framework.TestCase.assertEquals @@ -20,21 +21,20 @@ import java.util.concurrent.TimeUnit class PrimeEventProducerTest { - private var primeEventProducer: EventProducerImpl? = null + private lateinit var primeEventProducer: EventProducerImpl private var disruptor: Disruptor? = null - private var countDownLatch: CountDownLatch? = null + private lateinit var countDownLatch: CountDownLatch - private var result: MutableSet? = null + private lateinit var result: MutableSet private// Wait a short while for the thing to process. val collectedEvent: OcsEvent - @Throws(InterruptedException::class) get() { - assertTrue(countDownLatch!!.await(TIMEOUT.toLong(), TimeUnit.SECONDS)) - assertFalse(result!!.isEmpty()) - val event = result!!.iterator().next() + assertTrue(countDownLatch.await(TIMEOUT.toLong(), TimeUnit.SECONDS)) + assertFalse(result.isEmpty()) + val event = result.iterator().next() assertNotNull(event) return event } @@ -42,22 +42,22 @@ class PrimeEventProducerTest { @Before fun setUp() { - this.disruptor = Disruptor( - OcsEventFactory(), + this.disruptor = Disruptor( + EventFactory { OcsEvent() }, RING_BUFFER_SIZE, Executors.defaultThreadFactory()) - val ringBuffer = disruptor!!.ringBuffer + val ringBuffer = disruptor?.ringBuffer ?: throw Exception("Failed to init disruptor") this.primeEventProducer = EventProducerImpl(ringBuffer) this.countDownLatch = CountDownLatch(1) this.result = HashSet() val eh = EventHandler { event, _, _ -> - result!!.add(event) - countDownLatch?.countDown() + result.add(event) + countDownLatch.countDown() } - disruptor!!.handleEventsWith(eh) - disruptor!!.start() + disruptor?.handleEventsWith(eh) + disruptor?.start() } @After @@ -66,23 +66,24 @@ class PrimeEventProducerTest { } @Test - @Throws(Exception::class) fun topupDataBundleBalanceEvent() { // Stimulating a response - primeEventProducer!!.topupDataBundleBalanceEvent(BUNDLE_ID, NO_OF_TOPUP_BYTES) + primeEventProducer.topupDataBundleBalanceEvent( + requestId = TOPUP_REQUEST_ID, + bundleId = BUNDLE_ID, + bytes = NO_OF_TOPUP_BYTES) // Collect an event (or fail trying). val event = collectedEvent // Verify some behavior assertEquals(BUNDLE_ID, event.bundleId) - assertEquals(NO_OF_TOPUP_BYTES, event.topUpBytes) + assertEquals(NO_OF_TOPUP_BYTES, event.topupContext?.topUpBytes) assertEquals(TOPUP_DATA_BUNDLE_BALANCE, event.messageType) } @Test - @Throws(Exception::class) fun creditControlRequestEvent() { val request = CreditControlRequestInfo.newBuilder().setMsisdn(MSISDN).addMscc(MultipleServiceCreditControl.newBuilder() .setRequested(ServiceUnit.newBuilder() @@ -94,7 +95,7 @@ class PrimeEventProducerTest { .build() ).build() - primeEventProducer!!.injectCreditControlRequestIntoRingbuffer(request, STREAM_ID) + primeEventProducer.injectCreditControlRequestIntoRingbuffer(STREAM_ID, request) val event = collectedEvent assertEquals(MSISDN, event.msisdn) @@ -127,6 +128,8 @@ class PrimeEventProducerTest { private const val RATING_GROUP = 10L; private const val SERVICE_IDENTIFIER = 1L; + + private const val TOPUP_REQUEST_ID = "req-id" } } diff --git a/ocs/src/test/kotlin/org/ostelco/prime/event/EventProcessorTest.kt b/ocs/src/test/kotlin/org/ostelco/prime/event/EventProcessorTest.kt index 633c7031f..c9ad87496 100644 --- a/ocs/src/test/kotlin/org/ostelco/prime/event/EventProcessorTest.kt +++ b/ocs/src/test/kotlin/org/ostelco/prime/event/EventProcessorTest.kt @@ -11,7 +11,7 @@ import org.mockito.junit.MockitoJUnit import org.mockito.junit.MockitoRule import org.ostelco.prime.disruptor.EventMessageType.RELEASE_RESERVED_BUCKET import org.ostelco.prime.disruptor.OcsEvent -import org.ostelco.prime.events.EventProcessor +import org.ostelco.prime.disruptor.BundleBalanceStore import org.ostelco.prime.model.Bundle import org.ostelco.prime.model.Product import org.ostelco.prime.storage.ClientDataSource @@ -30,7 +30,7 @@ class EventProcessorTest { @Mock lateinit var storage: ClientDataSource - private lateinit var processor: EventProcessor + private lateinit var processor: BundleBalanceStore @Before fun setUp() { @@ -38,7 +38,7 @@ class EventProcessorTest { Mockito.`when`>(storage.getProduct("id", Products.DATA_TOPUP_3GB.sku)) .thenReturn(Either.right(Products.DATA_TOPUP_3GB)) - this.processor = EventProcessor(storage) + this.processor = BundleBalanceStore(storage) } @Test diff --git a/ocs/src/test/kotlin/org/ostelco/prime/handler/PurchaseRequestHandlerTest.kt b/ocs/src/test/kotlin/org/ostelco/prime/handler/PurchaseRequestHandlerTest.kt index 837bfe450..b39a18150 100644 --- a/ocs/src/test/kotlin/org/ostelco/prime/handler/PurchaseRequestHandlerTest.kt +++ b/ocs/src/test/kotlin/org/ostelco/prime/handler/PurchaseRequestHandlerTest.kt @@ -61,7 +61,10 @@ class PurchaseRequestHandlerTest { assertEquals(DATA_TOPUP_3GB, capturedPurchaseRecord.value.product) - verify(producer).topupDataBundleBalanceEvent(BUNDLE_ID, topupBytes) + verify(producer).topupDataBundleBalanceEvent( + requestId = TOPUP_REQUEST_ID, + bundleId = BUNDLE_ID, + bytes = topupBytes) } companion object { @@ -69,7 +72,7 @@ class PurchaseRequestHandlerTest { private const val MSISDN = "12345678" private const val SUBSCRIBER_ID = "foo@bar.com" private const val BUNDLE_ID = "foo@bar.com" - + private const val TOPUP_REQUEST_ID = "req-id" } // https://github.com/mockito/mockito/issues/1255 diff --git a/ocs/src/test/kotlin/org/ostelco/prime/ocs/OcsServiceTest.kt b/ocs/src/test/kotlin/org/ostelco/prime/ocs/OcsServiceTest.kt index d802af786..f33cc8945 100644 --- a/ocs/src/test/kotlin/org/ostelco/prime/ocs/OcsServiceTest.kt +++ b/ocs/src/test/kotlin/org/ostelco/prime/ocs/OcsServiceTest.kt @@ -1,5 +1,6 @@ package org.ostelco.prime.ocs +import com.lmax.disruptor.EventFactory import com.lmax.disruptor.EventHandler import com.lmax.disruptor.dsl.Disruptor import org.junit.After @@ -8,9 +9,9 @@ import org.junit.Assert.assertNotNull import org.junit.Assert.assertTrue import org.junit.Before import org.junit.Test -import org.ostelco.prime.disruptor.OcsEvent -import org.ostelco.prime.disruptor.OcsEventFactory +import org.ostelco.prime.consumption.OcsService import org.ostelco.prime.disruptor.EventProducerImpl +import org.ostelco.prime.disruptor.OcsEvent import java.util.* import java.util.concurrent.CountDownLatch import java.util.concurrent.Executors @@ -28,7 +29,6 @@ class OcsServiceTest { private// Wait wait a short while for the thing to process. val collectedEvent: OcsEvent? - @Throws(InterruptedException::class) get() { assertTrue(countDownLatch?.await(TIMEOUT, TimeUnit.SECONDS) ?: false) assertFalse(result?.isEmpty() ?: true) @@ -40,8 +40,8 @@ class OcsServiceTest { @Before fun setUp() { - val disruptor = Disruptor( - OcsEventFactory(), + val disruptor = Disruptor( + EventFactory { OcsEvent() }, RING_BUFFER_SIZE, Executors.defaultThreadFactory()) diff --git a/ocsgw/Dockerfile b/ocsgw/Dockerfile index 7e78d311a..2e2ffd740 100644 --- a/ocsgw/Dockerfile +++ b/ocsgw/Dockerfile @@ -1,4 +1,4 @@ -FROM openjdk:8u171 +FROM azul/zulu-openjdk:8u181-8.31.0.1 MAINTAINER CSI "csi@telenordigital.com" diff --git a/ocsgw/build.gradle b/ocsgw/build.gradle index 0ce19f6f4..75bd6fd43 100644 --- a/ocsgw/build.gradle +++ b/ocsgw/build.gradle @@ -3,7 +3,7 @@ plugins { id "com.github.johnrengelman.shadow" version "2.0.4" } -ext.junit5Version = "5.3.0" +ext.junit5Version = "5.3.1" dependencies { implementation project(':ocs-grpc-api') @@ -15,7 +15,7 @@ dependencies { implementation 'ch.qos.logback:logback-classic:1.2.3' // log to gcp stack-driver - implementation 'com.google.cloud:google-cloud-logging-logback:0.61.0-alpha' + implementation 'com.google.cloud:google-cloud-logging-logback:0.63.0-alpha' testImplementation project(':diameter-test') testImplementation "org.junit.jupiter:junit-jupiter-api:$junit5Version" diff --git a/ocsgw/src/main/java/org/ostelco/ocsgw/data/grpc/GrpcDataSource.java b/ocsgw/src/main/java/org/ostelco/ocsgw/data/grpc/GrpcDataSource.java index 1b615aa1b..2b7362b99 100644 --- a/ocsgw/src/main/java/org/ostelco/ocsgw/data/grpc/GrpcDataSource.java +++ b/ocsgw/src/main/java/org/ostelco/ocsgw/data/grpc/GrpcDataSource.java @@ -382,13 +382,13 @@ public void handleRequest(final CreditControlContext context) { if (psInformation != null && psInformation.getCalledStationId() != null - && psInformation.getSgsnMncMcc() != null) { + && psInformation.getSgsnMccMnc() != null) { builder.setServiceInformation( ServiceInfo.newBuilder() .setPsInformation(PsInformation.newBuilder() .setCalledStationId(psInformation.getCalledStationId()) - .setSgsnMccMnc(psInformation.getSgsnMncMcc()) + .setSgsnMccMnc(psInformation.getSgsnMccMnc()) .build()).build()); } } diff --git a/ocsgw/src/test/resources/logback.xml b/ocsgw/src/test/resources/logback.xml index cc76efb4e..01c8f6a8a 100644 --- a/ocsgw/src/test/resources/logback.xml +++ b/ocsgw/src/test/resources/logback.xml @@ -3,10 +3,11 @@ - %d{dd MMM yyyy HH:mm:ss,SSS} %-4r [%t] %-5p %c %x - %m%n + %d{dd MMM yyyy HH:mm:ss,SSS} %-5p %c{1} - %m%n + diff --git a/ostelco-lib/build.gradle b/ostelco-lib/build.gradle index 3c54eb8da..a06c315b2 100644 --- a/ostelco-lib/build.gradle +++ b/ostelco-lib/build.gradle @@ -7,12 +7,12 @@ dependencies { implementation "io.dropwizard:dropwizard-core:$dropwizardVersion" implementation "io.dropwizard:dropwizard-auth:$dropwizardVersion" // Match netty via ocs-api - implementation 'com.google.firebase:firebase-admin:6.4.0' + implementation "com.google.firebase:firebase-admin:$firebaseVersion" implementation 'com.lmax:disruptor:3.4.2' implementation "com.google.guava:guava:$guavaVersion" testImplementation "io.dropwizard:dropwizard-testing:$dropwizardVersion" - testImplementation "org.mockito:mockito-core:2.18.3" + testImplementation "org.mockito:mockito-core:$mockitoVersion" testImplementation "org.assertj:assertj-core:$assertJVersion" // https://mvnrepository.com/artifact/org.glassfish.jersey.test-framework.providers/jersey-test-framework-provider-grizzly2 @@ -23,7 +23,7 @@ dependencies { } // https://mvnrepository.com/artifact/org.jetbrains/annotations - implementation 'org.jetbrains:annotations:16.0.2' + implementation 'org.jetbrains:annotations:16.0.3' } lombok { diff --git a/payment-processor/README.md b/payment-processor/README.md index 80cd37d4f..bfb7633e8 100644 --- a/payment-processor/README.md +++ b/payment-processor/README.md @@ -1,2 +1,4 @@ +# Module Payment Processor + Placeholder for documentation for the payment processor diff --git a/payment-processor/build.gradle b/payment-processor/build.gradle index 5d018028a..e47629b26 100644 --- a/payment-processor/build.gradle +++ b/payment-processor/build.gradle @@ -1,5 +1,5 @@ plugins { - id "org.jetbrains.kotlin.jvm" version "1.2.61" + id "org.jetbrains.kotlin.jvm" version "1.2.70" id "java-library" id "idea" } @@ -19,7 +19,7 @@ sourceSets { dependencies { implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlinVersion" - implementation project(":prime-api") + implementation project(":prime-modules") implementation "com.stripe:stripe-java:$stripeVersion" @@ -42,9 +42,7 @@ task integration(type: Test, description: 'Runs the integration tests.', group: classpath = sourceSets.integration.runtimeClasspath } -if (System.getenv("BUILD_ENV") != "CI_CD") { - build.dependsOn integration -} +build.dependsOn integration tasks.withType(org.jetbrains.kotlin.gradle.tasks.KotlinCompile).all { kotlinOptions { diff --git a/payment-processor/src/integration-tests/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessorTest.kt b/payment-processor/src/integration-tests/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessorTest.kt index cea785c71..6b0d8aecc 100644 --- a/payment-processor/src/integration-tests/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessorTest.kt +++ b/payment-processor/src/integration-tests/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessorTest.kt @@ -55,6 +55,19 @@ class StripePaymentProcessorTest { assertEquals(true, result.isLeft()) } + @Test + fun getPaymentProfile() { + val result = paymentProcessor.getPaymentProfile(testCustomer) + assertEquals(true, result.isRight()) + assertEquals(stripeCustomerId, result.fold({""}, {it.id})) + } + + @Test + fun getUnknownPaymentProfile() { + val result = paymentProcessor.getPaymentProfile("not@fail.com") + assertEquals(false, result.isRight()) + } + @Test fun addSourceToCustomerAndRemove() { @@ -73,6 +86,26 @@ class StripePaymentProcessorTest { assertEquals(true, resultDeleteSource.isRight()) } + @Test + fun addSourceToCustomerTwise() { + val resultAddSource = paymentProcessor.addSource(stripeCustomerId, createPaymentSourceId()) + + val resultStoredSources = paymentProcessor.getSavedSources(stripeCustomerId) + assertEquals(1, resultStoredSources.fold({ 0 }, { it.size })) + + resultAddSource.map { addedSource -> + resultStoredSources.map { storedSources -> + assertEquals(addedSource.id, storedSources.first().id) + }.mapLeft { fail() } + }.mapLeft { fail() } + + val resultAddSecondSource = paymentProcessor.addSource(stripeCustomerId, resultStoredSources.fold({ "" }, { it.first().id })) + assertEquals(true, resultAddSecondSource.isLeft()) + + val resultDeleteSource = paymentProcessor.removeSource(stripeCustomerId, resultAddSource.fold({ "" }, { it.id })) + assertEquals(true, resultDeleteSource.isRight()) + } + @Test fun addDefaultSourceAndRemove() { diff --git a/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/PaymentProcessorModule.kt b/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/PaymentProcessorModule.kt index ebb187ba4..a30544fd9 100644 --- a/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/PaymentProcessorModule.kt +++ b/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/PaymentProcessorModule.kt @@ -3,13 +3,13 @@ package org.ostelco.prime.paymentprocessor import com.fasterxml.jackson.annotation.JsonTypeName import com.stripe.Stripe import io.dropwizard.setup.Environment -import org.ostelco.prime.logger +import org.ostelco.prime.getLogger import org.ostelco.prime.module.PrimeModule @JsonTypeName("stripe-payment-processor") class PaymentProcessorModule : PrimeModule { - private val logger by logger() + private val logger by getLogger() override fun init(env: Environment) { logger.info("PaymentProcessor init with $env") diff --git a/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessor.kt b/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessor.kt index ca34b6aef..6b6f980ca 100644 --- a/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessor.kt +++ b/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessor.kt @@ -2,64 +2,92 @@ package org.ostelco.prime.paymentprocessor import arrow.core.Either import arrow.core.flatMap -import org.ostelco.prime.logger +import com.stripe.exception.* +import org.ostelco.prime.getLogger import com.stripe.model.* import org.ostelco.prime.paymentprocessor.core.* - +import com.stripe.model.Customer class StripePaymentProcessor : PaymentProcessor { - private val logger by logger() + private val logger by getLogger() - override fun getSavedSources(customerId: String): Either> = - either(NotFoundError("Failed to retrieve sources for customer $customerId")) { - val sources = mutableListOf() + override fun getSavedSources(customerId: String): Either> = + either("Failed to retrieve sources for customer $customerId") { + val sources = mutableListOf() val customer = Customer.retrieve(customerId) customer.sources.data.forEach { - sources.add(SourceInfo(it.id, getAccountDetails(it))) + val details = getAccountDetails(it) + sources.add(SourceDetailsInfo(it.id, getAccountType(details), details)) } sources } + private fun getAccountType(details: Map) : String { + return details.get("type").toString() + } + /* Returns detailed 'account details' for the given Stripe source/account. - Note that the fields 'id' and 'accountType' are manadatory. */ + Note that including the fields 'id' and 'type' are manadatory. */ private fun getAccountDetails(accountInfo: ExternalAccount) : Map { when (accountInfo) { is Card -> { return mapOf("id" to accountInfo.id, - "accountType" to "card", + "type" to "card", "addressLine1" to accountInfo.addressLine1, "addressLine2" to accountInfo.addressLine2, - "zip" to accountInfo.addressZip, - "city" to accountInfo.addressCity, - "state" to accountInfo.addressState, + "addressZip" to accountInfo.addressZip, + "addressCity" to accountInfo.addressCity, + "addressState" to accountInfo.addressState, + "brand" to accountInfo.brand, // "Visa", "Mastercard" etc. "country" to accountInfo.country, "currency" to accountInfo.currency, - "brand" to accountInfo.brand, // "Visa", "Mastercard" etc. - "last4" to accountInfo.last4, - "expireMonth" to accountInfo.expMonth, - "expireYear" to accountInfo.expYear, - "funding" to accountInfo.funding) // Typ.: "credit" or "debit" - .filterValues { it != null } // Unfortunately the 'swagger' def. will removed fields back again. - } - // To add support for other Stripe source/account types, see - // https://stripe.com/docs/api/java#sources + "cvcCheck" to accountInfo.cvcCheck, + "expMonth" to accountInfo.expMonth, + "expYear" to accountInfo.expYear, + "fingerprint" to accountInfo.fingerprint, + "funding" to accountInfo.funding, + "last4" to accountInfo.last4, // Typ.: "credit" or "debit" + "threeDSecure" to accountInfo.threeDSecure) + .filterValues { it != null } + } + is Source -> { + return mapOf("id" to accountInfo.id, + "type" to "source", + "typeData" to accountInfo.typeData, + "owner" to accountInfo.owner) + } else -> { - logger.error("Received unsupported Stripe source/account type: {}", accountInfo) + logger.error("Received unsupported Stripe source/account type: {}", + accountInfo) return mapOf("id" to accountInfo.id, - "accountType" to "unsupported") + "type" to "unsupported") } } } override fun createPaymentProfile(userEmail: String): Either = - either(ForbiddenError("Failed to create profile for user $userEmail")) { + either("Failed to create profile for user $userEmail") { val customerParams = mapOf("email" to userEmail) ProfileInfo(Customer.create(customerParams).id) } + override fun getPaymentProfile(userEmail: String): Either { + val customerParams = mapOf( + "limit" to "1", + "email" to userEmail) + val customerList = Customer.list(customerParams) + if (customerList.data.isEmpty()) { + return Either.left(NotFoundError("Could not find a payment profile for user $userEmail")) + } else if (customerList.data.size > 1){ + return Either.left(NotFoundError("Multiple profiles for user $userEmail found")) + } else { + return Either.right(ProfileInfo(customerList.data.first().id)) + } + } + override fun createPlan(productId: String, amount: Int, currency: String, interval: PaymentProcessor.Interval): Either = - either(ForbiddenError("Failed to create plan with product id $productId amount $amount currency $currency interval ${interval.value}")) { + either("Failed to create plan with product id $productId amount $amount currency $currency interval ${interval.value}") { val planParams = mapOf( "amount" to amount, "interval" to interval.value, @@ -69,13 +97,13 @@ class StripePaymentProcessor : PaymentProcessor { } override fun removePlan(planId: String): Either = - either(NotFoundError("Failed to delete plan $planId")) { + either("Failed to delete plan $planId") { val plan = Plan.retrieve(planId) PlanInfo(plan.delete().id) } override fun createProduct(sku: String): Either = - either(ForbiddenError("Failed to create product with sku $sku")) { + either("Failed to create product with sku $sku") { val productParams = mapOf( "name" to sku, "type" to "service") @@ -83,20 +111,20 @@ class StripePaymentProcessor : PaymentProcessor { } override fun removeProduct(productId: String): Either = - either(NotFoundError("Failed to delete product $productId")) { + either("Failed to delete product $productId") { val product = Product.retrieve(productId) ProductInfo(product.delete().id) } override fun addSource(customerId: String, sourceId: String): Either = - either(NotFoundError("Failed to add source $sourceId to customer $customerId")) { + either("Failed to add source $sourceId to customer $customerId") { val customer = Customer.retrieve(customerId) val params = mapOf("source" to sourceId) SourceInfo(customer.sources.create(params).id) } override fun setDefaultSource(customerId: String, sourceId: String): Either = - either(NotFoundError("Failed to set default source $sourceId for customer $customerId")) { + either("Failed to set default source $sourceId for customer $customerId") { val customer = Customer.retrieve(customerId) val updateParams = mapOf("default_source" to sourceId) val customerUpdated = customer.update(updateParams) @@ -104,18 +132,18 @@ class StripePaymentProcessor : PaymentProcessor { } override fun getDefaultSource(customerId: String): Either = - either(NotFoundError("Failed to get default source for customer $customerId")) { + either("Failed to get default source for customer $customerId") { SourceInfo(Customer.retrieve(customerId).defaultSource) } override fun deletePaymentProfile(customerId: String): Either = - either(NotFoundError("Failed to delete customer $customerId")) { + either("Failed to delete customer $customerId") { val customer = Customer.retrieve(customerId) ProfileInfo(customer.delete().id) } override fun subscribeToPlan(planId: String, customerId: String): Either = - either(ForbiddenError("Failed to subscribe customer $customerId to plan $planId")) { + either("Failed to subscribe customer $customerId to plan $planId") { val item = mapOf("plan" to planId) val params = mapOf( "customer" to customerId, @@ -125,7 +153,7 @@ class StripePaymentProcessor : PaymentProcessor { } override fun cancelSubscription(subscriptionId: String, atIntervalEnd: Boolean): Either = - either(NotFoundError("Failed to unsubscribe subscription Id : $subscriptionId atIntervalEnd $atIntervalEnd")) { + either("Failed to unsubscribe subscription Id : $subscriptionId atIntervalEnd $atIntervalEnd") { val subscription = Subscription.retrieve(subscriptionId) val subscriptionParams = mapOf("at_period_end" to atIntervalEnd) SubscriptionInfo(subscription.cancel(subscriptionParams).id) @@ -134,7 +162,7 @@ class StripePaymentProcessor : PaymentProcessor { override fun authorizeCharge(customerId: String, sourceId: String?, amount: Int, currency: String): Either { val errorMessage = "Failed to authorize the charge for customerId $customerId sourceId $sourceId amount $amount currency $currency" - return either(ForbiddenError(errorMessage)) { + return either(errorMessage) { val chargeParams = mutableMapOf( "amount" to amount, "currency" to currency, @@ -156,7 +184,7 @@ class StripePaymentProcessor : PaymentProcessor { override fun captureCharge(chargeId: String, customerId: String): Either { val errorMessage = "Failed to capture charge for customerId $customerId chargeId $chargeId" - return either(NotFoundError(errorMessage)) { + return either(errorMessage) { Charge.retrieve(chargeId) }.flatMap { charge: Charge -> val review = charge.review @@ -177,23 +205,48 @@ class StripePaymentProcessor : PaymentProcessor { } override fun refundCharge(chargeId: String): Either = - either(NotFoundError("Failed to refund charge $chargeId")) { + either("Failed to refund charge $chargeId") { val refundParams = mapOf("charge" to chargeId) Refund.create(refundParams).charge } override fun removeSource(customerId: String, sourceId: String): Either = - either(ForbiddenError("Failed to remove source $sourceId from customer $customerId")) { + either("Failed to remove source $sourceId from customer $customerId") { Customer.retrieve(customerId).sources.retrieve(sourceId).delete().id } - private fun either(paymentError: PaymentError, action: () -> RETURN): Either { + private fun either(errorDescription: String, action: () -> RETURN): Either { return try { Either.right(action()) + } catch (e: CardException) { + // If something is decline with a card purchase, CardException will be caught + logger.warn("Payment error : $errorDescription , Stripe Error Code: ${e.getCode()}", e) + Either.left(ForbiddenError(errorDescription, e.message)) + } catch (e: RateLimitException) { + // Too many requests made to the API too quickly + logger.warn("Payment error : $errorDescription , Stripe Error Code: ${e.getCode()}", e) + Either.left(BadGatewayError(errorDescription, e.message)) + } catch (e: InvalidRequestException) { + // Invalid parameters were supplied to Stripe's API + logger.warn("Payment error : $errorDescription , Stripe Error Code: ${e.getCode()}", e) + Either.left(NotFoundError(errorDescription, e.message)) + } catch (e: AuthenticationException) { + // Authentication with Stripe's API failed + // (maybe you changed API keys recently) + logger.warn("Payment error : $errorDescription , Stripe Error Code: ${e.getCode()}", e) + Either.left(BadGatewayError(errorDescription)) + } catch (e: ApiConnectionException) { + // Network communication with Stripe failed + logger.warn("Payment error : $errorDescription , Stripe Error Code: ${e.getCode()}", e) + Either.left(BadGatewayError(errorDescription)) + } catch (e: StripeException) { + // Unknown Stripe error + logger.error("Payment error : $errorDescription , Stripe Error Code: ${e.getCode()}", e) + Either.left(BadGatewayError(errorDescription)) } catch (e: Exception) { - paymentError.externalErrorMessage = e.message - logger.warn(paymentError.description, e) - Either.left(paymentError) + // Something else happened, could be completely unrelated to Stripe + logger.error(errorDescription, e) + Either.left(BadGatewayError(errorDescription)) } } } diff --git a/prime-api/src/main/kotlin/org/ostelco/prime/LoggerDelegate.kt b/prime-api/src/main/kotlin/org/ostelco/prime/LoggerDelegate.kt deleted file mode 100644 index 67ffc8785..000000000 --- a/prime-api/src/main/kotlin/org/ostelco/prime/LoggerDelegate.kt +++ /dev/null @@ -1,13 +0,0 @@ -package org.ostelco.prime - -import org.slf4j.Logger -import org.slf4j.LoggerFactory - -/** - * This is a function to which the member variable of type {@link org.slf4j.Logger} is delegated to be instantiated. - * The syntax to do so is `private val logger by logger()`. - * This function will then return the logger for calling class. - */ -fun R.logger(): Lazy = lazy { - LoggerFactory.getLogger(this.javaClass) -} diff --git a/prime-api/src/main/kotlin/org/ostelco/prime/core/ApiError.kt b/prime-api/src/main/kotlin/org/ostelco/prime/core/ApiError.kt deleted file mode 100644 index 2cc304c1e..000000000 --- a/prime-api/src/main/kotlin/org/ostelco/prime/core/ApiError.kt +++ /dev/null @@ -1,27 +0,0 @@ -package org.ostelco.prime.core - -import javax.ws.rs.core.Response - -sealed class ApiError(val description: String) { - open var status : Int = 0 -} - -class BadGatewayError(description: String) : ApiError(description) { - override var status : Int = Response.Status.BAD_GATEWAY.getStatusCode() -} - -class BadRequestError(description: String) : ApiError(description) { - override var status : Int = Response.Status.BAD_REQUEST.getStatusCode() -} - -class ForbiddenError(description: String) : ApiError(description) { - override var status : Int = Response.Status.FORBIDDEN.getStatusCode() -} - -class InsuffientStorageError(description: String) : ApiError(description) { - override var status : Int = 507 -} - -class NotFoundError(description: String) : ApiError(description) { - override var status : Int = Response.Status.NOT_FOUND.getStatusCode() -} diff --git a/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/core/PaymentError.kt b/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/core/PaymentError.kt deleted file mode 100644 index 42b2f3945..000000000 --- a/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/core/PaymentError.kt +++ /dev/null @@ -1,20 +0,0 @@ -package org.ostelco.prime.paymentprocessor.core - -import javax.ws.rs.core.Response - -sealed class PaymentError(val description: String) { - open var status : Int = 0 - var externalErrorMessage : String? = null -} - -class ForbiddenError(description: String) : PaymentError(description) { - override var status : Int = Response.Status.FORBIDDEN.statusCode -} - -class NotFoundError(description: String) : PaymentError(description) { - override var status : Int = Response.Status.NOT_FOUND.statusCode -} - -class BadGatewayError(description: String) : PaymentError(description) { - override var status : Int = Response.Status.BAD_REQUEST.statusCode -} \ No newline at end of file diff --git a/prime-api/src/main/kotlin/org/ostelco/prime/storage/StoreError.kt b/prime-api/src/main/kotlin/org/ostelco/prime/storage/StoreError.kt deleted file mode 100644 index c9d49c73f..000000000 --- a/prime-api/src/main/kotlin/org/ostelco/prime/storage/StoreError.kt +++ /dev/null @@ -1,45 +0,0 @@ -package org.ostelco.prime.storage - -sealed class StoreError(val type: String, val id: String) { - open var message: String = "" - protected set -} - -class NotFoundError(type: String, id: String) : StoreError(type, id) { - init { - super.message = "$type - $id not found." - } -} - -class AlreadyExistsError(type: String, id: String) : StoreError(type, id) { - init { - super.message = "$type - $id already exists." - } -} - -class NotCreatedError( - type: String, - id: String = "", - val expectedCount: Int = 1, - val actualCount:Int = 0) : StoreError(type, id) { - - init { - super.message = "Failed to create $type - $id" - } -} - -class NotUpdatedError(type: String, id: String) : StoreError(type, id) { - init { - super.message = "$type - $id not updated." - } -} - -class NotDeletedError(type: String, id: String) : StoreError(type, id) { - init { - super.message = "$type - $id not deleted." - } -} - -class ValidationError( - type: String, id: String, - override var message: String) : StoreError(type, id) \ No newline at end of file diff --git a/prime-client-api/build.gradle b/prime-client-api/build.gradle index d78bb4bd1..ba58c49f1 100644 --- a/prime-client-api/build.gradle +++ b/prime-client-api/build.gradle @@ -1,7 +1,7 @@ plugins { - id "org.jetbrains.kotlin.jvm" version "1.2.61" + id "org.jetbrains.kotlin.jvm" version "1.2.70" id 'java-library' - id 'org.hidetake.swagger.generator' version '2.12.0' + id 'org.hidetake.swagger.generator' version '2.13.0' id "idea" } @@ -42,7 +42,7 @@ dependencies { swaggerCodegen 'io.swagger:swagger-codegen-cli:2.3.1' // taken from build/swagger-code-java-client/build.gradle - implementation 'io.swagger:swagger-annotations:1.5.15' + implementation 'io.swagger:swagger-annotations:1.5.21' implementation 'com.google.code.gson:gson:2.8.5' implementation 'com.squareup.okhttp:okhttp:2.7.5' implementation 'com.squareup.okhttp:logging-interceptor:2.7.5' diff --git a/prime-api/build.gradle b/prime-modules/build.gradle similarity index 88% rename from prime-api/build.gradle rename to prime-modules/build.gradle index d678ce5f9..a302cb169 100644 --- a/prime-api/build.gradle +++ b/prime-modules/build.gradle @@ -1,5 +1,5 @@ plugins { - id "org.jetbrains.kotlin.jvm" version "1.2.61" + id "org.jetbrains.kotlin.jvm" version "1.2.70" id "java-library" } diff --git a/prime-modules/src/main/kotlin/org/ostelco/prime/LoggerDelegate.kt b/prime-modules/src/main/kotlin/org/ostelco/prime/LoggerDelegate.kt new file mode 100644 index 000000000..2cff693d6 --- /dev/null +++ b/prime-modules/src/main/kotlin/org/ostelco/prime/LoggerDelegate.kt @@ -0,0 +1,13 @@ +package org.ostelco.prime + +import org.slf4j.Logger +import org.slf4j.LoggerFactory + +/** + * This is a function to which the member variable of type [org.slf4j.Logger] is delegated to be instantiated. + * The syntax to do so is `private val getLogger by getLogger()`. + * This function will then return the getLogger for calling class. + */ +fun R.getLogger(): Lazy = lazy { + LoggerFactory.getLogger(this.javaClass) +} diff --git a/prime-api/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsService.kt b/prime-modules/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsService.kt similarity index 94% rename from prime-api/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsService.kt rename to prime-modules/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsService.kt index ddfcac34a..f91b0afd6 100644 --- a/prime-api/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsService.kt +++ b/prime-modules/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsService.kt @@ -5,7 +5,7 @@ import org.ostelco.prime.analytics.MetricType.GAUGE import org.ostelco.prime.model.PurchaseRecord interface AnalyticsService { - fun reportTrafficInfo(msisdn: String, usedBytes: Long, bundleBytes: Long) + fun reportTrafficInfo(msisdn: String, usedBytes: Long, bundleBytes: Long, apn: String?, mccMnc: String?) fun reportMetric(primeMetric: PrimeMetric, value: Long) fun reportPurchaseInfo(purchaseRecord: PurchaseRecord, subscriberId: String, status: String) } diff --git a/prime-modules/src/main/kotlin/org/ostelco/prime/apierror/ApiError.kt b/prime-modules/src/main/kotlin/org/ostelco/prime/apierror/ApiError.kt new file mode 100644 index 000000000..05b523976 --- /dev/null +++ b/prime-modules/src/main/kotlin/org/ostelco/prime/apierror/ApiError.kt @@ -0,0 +1,48 @@ +package org.ostelco.prime.apierror + +import org.ostelco.prime.paymentprocessor.core.PaymentError +import org.ostelco.prime.storage.StoreError +import javax.ws.rs.core.Response + +sealed class ApiError(val message: String, val errorCode: ApiErrorCode, val error: InternalError?) { + open var status : Int = 0 +} + +class BadGatewayError(description: String, errorCode: ApiErrorCode, error: InternalError? = null) : ApiError(description, errorCode, error) { + override var status : Int = Response.Status.BAD_GATEWAY.statusCode +} + +class BadRequestError(description: String, errorCode: ApiErrorCode, error: InternalError? = null) : ApiError(description, errorCode, error) { + override var status : Int = Response.Status.BAD_REQUEST.statusCode +} + +class ForbiddenError(description: String, errorCode: ApiErrorCode, error: InternalError? = null) : ApiError(description, errorCode, error) { + override var status : Int = Response.Status.FORBIDDEN.statusCode +} + +class InsufficientStorageError(description: String, errorCode: ApiErrorCode, error: InternalError? = null) : ApiError(description, errorCode, error) { + override var status : Int = 507 +} + +class NotFoundError(description: String, errorCode: ApiErrorCode, error: InternalError? = null) : ApiError(description, errorCode, error) { + override var status : Int = Response.Status.NOT_FOUND.statusCode +} + +fun mapPaymentErrorToApiError(description: String, errorCode: ApiErrorCode, paymentError: PaymentError) : ApiError { + return when(paymentError) { + is org.ostelco.prime.paymentprocessor.core.ForbiddenError -> org.ostelco.prime.apierror.ForbiddenError(description, errorCode, paymentError) + is org.ostelco.prime.paymentprocessor.core.BadGatewayError -> org.ostelco.prime.apierror.BadGatewayError(description, errorCode) + is org.ostelco.prime.paymentprocessor.core.NotFoundError -> org.ostelco.prime.apierror.NotFoundError(description, errorCode, paymentError) + } +} + +fun mapStorageErrorToApiError(description: String, errorCode: ApiErrorCode, storeError: StoreError) : ApiError { + return when(storeError) { + is org.ostelco.prime.storage.NotFoundError -> org.ostelco.prime.apierror.NotFoundError(description, errorCode, storeError) + is org.ostelco.prime.storage.AlreadyExistsError -> org.ostelco.prime.apierror.ForbiddenError(description, errorCode, storeError) + is org.ostelco.prime.storage.NotCreatedError -> org.ostelco.prime.apierror.BadGatewayError(description, errorCode) + is org.ostelco.prime.storage.NotUpdatedError -> org.ostelco.prime.apierror.BadGatewayError(description, errorCode) + is org.ostelco.prime.storage.NotDeletedError -> org.ostelco.prime.apierror.BadGatewayError(description, errorCode) + is org.ostelco.prime.storage.ValidationError -> org.ostelco.prime.apierror.ForbiddenError(description, errorCode, storeError) + } +} \ No newline at end of file diff --git a/prime-modules/src/main/kotlin/org/ostelco/prime/apierror/ApiErrorCodes.kt b/prime-modules/src/main/kotlin/org/ostelco/prime/apierror/ApiErrorCodes.kt new file mode 100644 index 000000000..4b0950a03 --- /dev/null +++ b/prime-modules/src/main/kotlin/org/ostelco/prime/apierror/ApiErrorCodes.kt @@ -0,0 +1,24 @@ +package org.ostelco.prime.apierror + +enum class ApiErrorCode { + FAILED_TO_CREATE_PAYMENT_PROFILE, + FAILED_TO_FETCH_PAYMENT_PROFILE, + FAILED_TO_STORE_APPLICATION_TOKEN, + @Deprecated("Will be removed") + FAILED_TO_FETCH_SUBSCRIPTION_STATUS, + FAILED_TO_FETCH_SUBSCRIPTIONS, + FAILED_TO_FETCH_BUNDLES, + FAILED_TO_FETCH_PSEUDONYM_FOR_SUBSCRIBER, + FAILED_TO_FETCH_PAYMENT_HISTORY, + FAILED_TO_FETCH_PRODUCT_LIST, + FAILED_TO_PURCHASE_PRODUCT, + FAILED_TO_FETCH_REFERRALS, + FAILED_TO_FETCH_REFERRED_BY_LIST, + FAILED_TO_FETCH_PRODUCT_INFORMATION, + FAILED_TO_STORE_PAYMENT_SOURCE, + FAILED_TO_SET_DEFAULT_PAYMENT_SOURCE, + FAILED_TO_FETCH_PAYMENT_SOURCES_LIST, + FAILED_TO_UPDATE_PROFILE, + FAILED_TO_FETCH_CONSENT, + FAILED_TO_IMPORT_OFFER, +} \ No newline at end of file diff --git a/prime-modules/src/main/kotlin/org/ostelco/prime/apierror/InternalError.kt b/prime-modules/src/main/kotlin/org/ostelco/prime/apierror/InternalError.kt new file mode 100644 index 000000000..b95b0ba3c --- /dev/null +++ b/prime-modules/src/main/kotlin/org/ostelco/prime/apierror/InternalError.kt @@ -0,0 +1,3 @@ +package org.ostelco.prime.apierror + +open class InternalError \ No newline at end of file diff --git a/prime-api/src/main/kotlin/org/ostelco/prime/appnotifier/AppNotifier.kt b/prime-modules/src/main/kotlin/org/ostelco/prime/appnotifier/AppNotifier.kt similarity index 100% rename from prime-api/src/main/kotlin/org/ostelco/prime/appnotifier/AppNotifier.kt rename to prime-modules/src/main/kotlin/org/ostelco/prime/appnotifier/AppNotifier.kt diff --git a/prime-api/src/main/kotlin/org/ostelco/prime/appnotifier/Exceptions.kt b/prime-modules/src/main/kotlin/org/ostelco/prime/appnotifier/Exceptions.kt similarity index 100% rename from prime-api/src/main/kotlin/org/ostelco/prime/appnotifier/Exceptions.kt rename to prime-modules/src/main/kotlin/org/ostelco/prime/appnotifier/Exceptions.kt diff --git a/prime-api/src/main/kotlin/org/ostelco/prime/module/PrimeModule.kt b/prime-modules/src/main/kotlin/org/ostelco/prime/module/PrimeModule.kt similarity index 67% rename from prime-api/src/main/kotlin/org/ostelco/prime/module/PrimeModule.kt rename to prime-modules/src/main/kotlin/org/ostelco/prime/module/PrimeModule.kt index 98d29a8a2..54ee236cc 100644 --- a/prime-api/src/main/kotlin/org/ostelco/prime/module/PrimeModule.kt +++ b/prime-modules/src/main/kotlin/org/ostelco/prime/module/PrimeModule.kt @@ -8,11 +8,11 @@ import io.dropwizard.setup.Environment /** * Prime is a multi-module component, wherein each module is a separate library. - * Module is such a library which needs access to the Dropwizard's {@link io.dropwizard.setup.Environment} - * for actions like registering {@link io.dropwizard.lifecycle.Managed} objects, `Resources`, - * {@link com.codahale.metrics.health.HealthCheck} etc. and/or has some configuration. + * Module is such a library which needs access to the Dropwizard's [io.dropwizard.setup.Environment] + * for actions like registering [io.dropwizard.lifecycle.Managed] objects, `Resources`, + * [com.codahale.metrics.health.HealthCheck] etc. and/or has some configuration. * Each Module has to implement this interface. - * That class will then get {@link io.dropwizard.setup.Environment} object on overriding the init method. + * That class will then get [io.dropwizard.setup.Environment] object on overriding the init method. * Same class may also accept module specific configuration. */ @JsonTypeInfo(use = Id.NAME, include = As.PROPERTY, property = "type") diff --git a/prime-api/src/main/kotlin/org/ostelco/prime/module/ResourceRegistry.kt b/prime-modules/src/main/kotlin/org/ostelco/prime/module/ResourceRegistry.kt similarity index 64% rename from prime-api/src/main/kotlin/org/ostelco/prime/module/ResourceRegistry.kt rename to prime-modules/src/main/kotlin/org/ostelco/prime/module/ResourceRegistry.kt index 7e8ccc4ad..8c9b1996f 100644 --- a/prime-api/src/main/kotlin/org/ostelco/prime/module/ResourceRegistry.kt +++ b/prime-modules/src/main/kotlin/org/ostelco/prime/module/ResourceRegistry.kt @@ -4,7 +4,7 @@ import org.slf4j.LoggerFactory import java.util.* /** - * Use this method to get implementation objects to interfaces in `prime-api` using {@link java.util.ServiceLoader}. + * Use this method to get implementation objects to interfaces in `prime-modules` using [java.util.ServiceLoader]. * The libraries which have implementation classes should then add definition file to `META-INF/services`. * The name of the file should be name of Interface including package name. * The content of the file should be name of the implementing class including the package name. @@ -13,10 +13,12 @@ import java.util.* inline fun getResource(): T { val services = ServiceLoader.load(T::class.java) val logger = LoggerFactory.getLogger(T::class.java) - when (services.count()) { - 0 -> logger.error("No implementations found for interface ${T::class.simpleName}") - 1 -> {} - else -> logger.warn("Multiple implementations found for interface ${T::class.simpleName}") + return when (services.count()) { + 0 -> throw Exception("No implementations found for interface ${T::class.simpleName}") + 1 -> services.first() + else -> { + logger.warn("Multiple implementations found for interface ${T::class.simpleName}") + services.first() + } } - return services.first() } \ No newline at end of file diff --git a/prime-api/src/main/kotlin/org/ostelco/prime/ocs/OcsSubscriberService.kt b/prime-modules/src/main/kotlin/org/ostelco/prime/ocs/OcsSubscriberService.kt similarity index 71% rename from prime-api/src/main/kotlin/org/ostelco/prime/ocs/OcsSubscriberService.kt rename to prime-modules/src/main/kotlin/org/ostelco/prime/ocs/OcsSubscriberService.kt index f060d5599..492d6ff6b 100644 --- a/prime-api/src/main/kotlin/org/ostelco/prime/ocs/OcsSubscriberService.kt +++ b/prime-modules/src/main/kotlin/org/ostelco/prime/ocs/OcsSubscriberService.kt @@ -1,9 +1,10 @@ package org.ostelco.prime.ocs +import arrow.core.Either import org.ostelco.prime.model.Bundle interface OcsSubscriberService { - fun topup(subscriberId: String, sku: String) + fun topup(subscriberId: String, sku: String): Either } interface OcsAdminService { diff --git a/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/PaymentProcessor.kt b/prime-modules/src/main/kotlin/org/ostelco/prime/paymentprocessor/PaymentProcessor.kt similarity index 94% rename from prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/PaymentProcessor.kt rename to prime-modules/src/main/kotlin/org/ostelco/prime/paymentprocessor/PaymentProcessor.kt index 78d27867c..098abdb17 100644 --- a/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/PaymentProcessor.kt +++ b/prime-modules/src/main/kotlin/org/ostelco/prime/paymentprocessor/PaymentProcessor.kt @@ -1,7 +1,6 @@ package org.ostelco.prime.paymentprocessor import arrow.core.Either -import org.ostelco.prime.core.ApiError import org.ostelco.prime.paymentprocessor.core.* interface PaymentProcessor { @@ -32,6 +31,12 @@ interface PaymentProcessor { */ fun deletePaymentProfile(customerId: String): Either + /** + * @param userEmail: user email (Prime unique identifier for customer) + * @return Stripe customerId if exist + */ + fun getPaymentProfile(userEmail: String): Either + /** * @param productId Stripe product id * @param amount The amount to be charged in the interval specified @@ -77,7 +82,7 @@ interface PaymentProcessor { * @param customerId Stripe customer id * @return List of Stripe sourceId */ - fun getSavedSources(customerId: String): Either> + fun getSavedSources(customerId: String): Either> /** * @param customerId Stripe customer id diff --git a/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/core/Model.kt b/prime-modules/src/main/kotlin/org/ostelco/prime/paymentprocessor/core/Model.kt similarity index 60% rename from prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/core/Model.kt rename to prime-modules/src/main/kotlin/org/ostelco/prime/paymentprocessor/core/Model.kt index c87fc14cf..bf608316c 100644 --- a/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/core/Model.kt +++ b/prime-modules/src/main/kotlin/org/ostelco/prime/paymentprocessor/core/Model.kt @@ -6,6 +6,8 @@ class ProductInfo(val id: String) class ProfileInfo(val id: String) -class SourceInfo(val id: String, val details: Map? = null) +class SourceInfo(val id: String) + +class SourceDetailsInfo(val id: String, val type: String, val details: Map) class SubscriptionInfo(val id: String) diff --git a/prime-modules/src/main/kotlin/org/ostelco/prime/paymentprocessor/core/PaymentError.kt b/prime-modules/src/main/kotlin/org/ostelco/prime/paymentprocessor/core/PaymentError.kt new file mode 100644 index 000000000..ce3c7188a --- /dev/null +++ b/prime-modules/src/main/kotlin/org/ostelco/prime/paymentprocessor/core/PaymentError.kt @@ -0,0 +1,11 @@ +package org.ostelco.prime.paymentprocessor.core + +import org.ostelco.prime.apierror.InternalError + +sealed class PaymentError(val description: String, var externalErrorMessage : String? = null) : InternalError() + +class ForbiddenError(description: String, externalErrorMessage: String? = null) : PaymentError(description, externalErrorMessage) + +class NotFoundError(description: String, externalErrorMessage: String? = null) : PaymentError(description, externalErrorMessage ) + +class BadGatewayError(description: String, externalErrorMessage: String? = null) : PaymentError(description, externalErrorMessage) \ No newline at end of file diff --git a/prime-api/src/main/kotlin/org/ostelco/prime/pseudonymizer/PseudonymizerService.kt b/prime-modules/src/main/kotlin/org/ostelco/prime/pseudonymizer/PseudonymizerService.kt similarity index 100% rename from prime-api/src/main/kotlin/org/ostelco/prime/pseudonymizer/PseudonymizerService.kt rename to prime-modules/src/main/kotlin/org/ostelco/prime/pseudonymizer/PseudonymizerService.kt diff --git a/prime-api/src/main/kotlin/org/ostelco/prime/storage/Interfaces.kt b/prime-modules/src/main/kotlin/org/ostelco/prime/storage/Interfaces.kt similarity index 100% rename from prime-api/src/main/kotlin/org/ostelco/prime/storage/Interfaces.kt rename to prime-modules/src/main/kotlin/org/ostelco/prime/storage/Interfaces.kt diff --git a/prime-modules/src/main/kotlin/org/ostelco/prime/storage/StoreError.kt b/prime-modules/src/main/kotlin/org/ostelco/prime/storage/StoreError.kt new file mode 100644 index 000000000..2bcd065d1 --- /dev/null +++ b/prime-modules/src/main/kotlin/org/ostelco/prime/storage/StoreError.kt @@ -0,0 +1,23 @@ +package org.ostelco.prime.storage + +import org.ostelco.prime.apierror.InternalError + +sealed class StoreError(val type: String, val id: String, var message: String) : InternalError() + +class NotFoundError(type: String, id: String) : StoreError(type, id, message = "$type - $id not found.") + +class AlreadyExistsError(type: String, id: String) : StoreError(type, id, message = "$type - $id already exists.") + +class NotCreatedError( + type: String, + id: String = "", + val expectedCount: Int = 1, + val actualCount:Int = 0) : StoreError(type, id, message = "Failed to create $type - $id") + +class NotUpdatedError(type: String, id: String) : StoreError(type, id, message = "$type - $id not updated.") + +class NotDeletedError(type: String, id: String) : StoreError(type, id, message = "$type - $id not deleted.") + +class ValidationError( + type: String, id: String, + message: String) : StoreError(type, id, message) \ No newline at end of file diff --git a/prime-api/src/main/kotlin/org/ostelco/prime/storage/Variants.kt b/prime-modules/src/main/kotlin/org/ostelco/prime/storage/Variants.kt similarity index 95% rename from prime-api/src/main/kotlin/org/ostelco/prime/storage/Variants.kt rename to prime-modules/src/main/kotlin/org/ostelco/prime/storage/Variants.kt index c4b0d491d..aa157e587 100644 --- a/prime-api/src/main/kotlin/org/ostelco/prime/storage/Variants.kt +++ b/prime-modules/src/main/kotlin/org/ostelco/prime/storage/Variants.kt @@ -89,7 +89,7 @@ interface ClientGraphStore { /** * Get balance for Client */ - fun getBundles(subscriberId: String): Either?> + fun getBundles(subscriberId: String): Either> /** * Set balance after OCS Topup or Consumption @@ -148,6 +148,12 @@ interface AdminGraphStore { fun getReferredSubscriberCount(): Long fun getPaidSubscriberCount(): Long + // atomic import of Offer + Product + Segment + fun atomicImport( + offer: Offer, + segments: Collection = emptyList(), + products: Collection = emptyList()) : Either + // simple getAll // fun getOffers(): Collection // fun getSegments(): Collection diff --git a/prime-api/src/main/resources/META-INF/services/org.ostelco.prime.storage.AdminDataSource b/prime-modules/src/main/resources/META-INF/services/org.ostelco.prime.storage.AdminDataSource similarity index 100% rename from prime-api/src/main/resources/META-INF/services/org.ostelco.prime.storage.AdminDataSource rename to prime-modules/src/main/resources/META-INF/services/org.ostelco.prime.storage.AdminDataSource diff --git a/prime-api/src/main/resources/META-INF/services/org.ostelco.prime.storage.ClientDataSource b/prime-modules/src/main/resources/META-INF/services/org.ostelco.prime.storage.ClientDataSource similarity index 100% rename from prime-api/src/main/resources/META-INF/services/org.ostelco.prime.storage.ClientDataSource rename to prime-modules/src/main/resources/META-INF/services/org.ostelco.prime.storage.ClientDataSource diff --git a/prime/Dockerfile b/prime/Dockerfile index 20573fb4f..a7903c099 100644 --- a/prime/Dockerfile +++ b/prime/Dockerfile @@ -1,4 +1,4 @@ -FROM openjdk:8u171 +FROM azul/zulu-openjdk:8u181-8.31.0.1 MAINTAINER CSI "csi@telenordigital.com" diff --git a/prime/Dockerfile.test b/prime/Dockerfile.test index 06e6201a9..0c542a2b4 100644 --- a/prime/Dockerfile.test +++ b/prime/Dockerfile.test @@ -1,11 +1,14 @@ # This Dockerfile is used when running locally using docker-compose for Acceptance Testing. -FROM openjdk:8u171 +FROM azul/zulu-openjdk:8u181-8.31.0.1 MAINTAINER CSI "csi@telenordigital.com" RUN apt-get update \ - && apt-get install -y --no-install-recommends netcat socat=1.7.3.1-2+deb9u1 \ + && apt-get install -y --no-install-recommends \ + netcat \ + socat \ + curl \ && rm -rf /var/lib/apt/lists/* COPY script/start.sh /start.sh diff --git a/prime/build.gradle b/prime/build.gradle index d7f591f40..42fc01c4e 100644 --- a/prime/build.gradle +++ b/prime/build.gradle @@ -1,5 +1,5 @@ plugins { - id "org.jetbrains.kotlin.jvm" version "1.2.61" + id "org.jetbrains.kotlin.jvm" version "1.2.70" id "application" id "com.github.johnrengelman.shadow" version "2.0.4" id "idea" @@ -18,7 +18,7 @@ sourceSets { } } -version = "1.14.0" +version = "1.15.0" repositories { maven { @@ -28,7 +28,7 @@ repositories { dependencies { // interface module between prime and prime-modules - implementation project(':prime-api') + implementation project(':prime-modules') // prime-modules runtimeOnly project(':ocs') @@ -48,13 +48,13 @@ dependencies { implementation 'org.dhatim:dropwizard-prometheus:2.2.0' testImplementation "io.dropwizard:dropwizard-testing:$dropwizardVersion" - testImplementation 'org.mockito:mockito-core:2.18.3' + testImplementation "org.mockito:mockito-core:$mockitoVersion" testImplementation 'com.lmax:disruptor:3.4.2' testImplementation 'com.palantir.docker.compose:docker-compose-rule-junit4:0.34.0' testImplementation 'org.dhatim:dropwizard-prometheus:2.2.0' integrationImplementation "io.dropwizard:dropwizard-testing:$dropwizardVersion" - integrationImplementation 'org.mockito:mockito-core:2.18.3' + integrationImplementation "org.mockito:mockito-core:$mockitoVersion" integrationImplementation 'com.lmax:disruptor:3.4.2' integrationImplementation 'com.palantir.docker.compose:docker-compose-rule-junit4:0.34.0' } @@ -72,9 +72,7 @@ task integration(type: Test, description: 'Runs the integration tests.', group: classpath = sourceSets.integration.runtimeClasspath } -if (System.getenv("BUILD_ENV") != "CI_CD") { - build.dependsOn integration -} +build.dependsOn integration shadowJar { mainClassName = 'org.ostelco.prime.PrimeApplicationKt' diff --git a/prime/cloudbuild.dev.yaml b/prime/cloudbuild.dev.yaml index f3939d148..88c3dceca 100644 --- a/prime/cloudbuild.dev.yaml +++ b/prime/cloudbuild.dev.yaml @@ -50,7 +50,7 @@ steps: path: /root/out_zip # Build docker images - name: gcr.io/cloud-builders/docker - args: ['build', '--tag=gcr.io/$PROJECT_ID/prime:$SHORT_SHA', '--cache-from', 'openjdk:8u171', 'prime'] + args: ['build', '--tag=eu.gcr.io/$PROJECT_ID/prime:$SHORT_SHA', '--cache-from', 'azul/zulu-openjdk:8u181-8.31.0.1', 'prime'] timeout: 120s # Deploy new docker image to Google Kubernetes Engine (GKE) - name: ubuntu @@ -63,4 +63,4 @@ steps: timeout: 300s # Upload docker image into Google Container Registry (GCR) -images: ['gcr.io/$PROJECT_ID/prime:$SHORT_SHA'] \ No newline at end of file +images: ['eu.gcr.io/$PROJECT_ID/prime:$SHORT_SHA'] \ No newline at end of file diff --git a/prime/cloudbuild.yaml b/prime/cloudbuild.yaml index 1df09ecc2..d4cba67b0 100644 --- a/prime/cloudbuild.yaml +++ b/prime/cloudbuild.yaml @@ -50,7 +50,7 @@ steps: path: /root/out_zip # Build docker images - name: gcr.io/cloud-builders/docker - args: ['build', '--tag=gcr.io/$PROJECT_ID/prime:$TAG_NAME', '--cache-from', 'openjdk:8u171', 'prime'] + args: ['build', '--tag=eu.gcr.io/$PROJECT_ID/prime:$TAG_NAME', '--cache-from', 'azul/zulu-openjdk:8u181-8.31.0.1', 'prime'] timeout: 120s # Deploy new docker image to Google Kubernetes Engine (GKE) - name: ubuntu @@ -63,4 +63,4 @@ steps: timeout: 300s # Upload docker image into Google Container Registry (GCR) -images: ['gcr.io/$PROJECT_ID/prime:$TAG_NAME'] \ No newline at end of file +images: ['eu.gcr.io/$PROJECT_ID/prime:$TAG_NAME'] \ No newline at end of file diff --git a/prime/config/config.yaml b/prime/config/config.yaml index e12a6f1af..008759df7 100644 --- a/prime/config/config.yaml +++ b/prime/config/config.yaml @@ -2,21 +2,22 @@ modules: - type: firebase config: configFile: /secret/pantel-prod.json - databaseName: pantel-2decb rootPath: ${FIREBASE_ROOT_PATH} - type: neo4j config: - host: neo4j + host: ${NEO4J_HOST} protocol: bolt+routing - type: analytics config: projectId: pantel-2decb - dataTrafficTopicId: data-traffic - purchaseInfoTopicId: purchase-info + dataTrafficTopicId: ${DATA_TRAFFIC_TOPIC} + purchaseInfoTopicId: ${PURCHASE_INFO_TOPIC} - type: ocs config: lowBalanceThreshold: 100000000 - type: pseudonymizer + config: + namespace: ${DATASTORE_NAMESPACE:-""} - type: api config: authenticationCachePolicy: maximumSize=10000, expireAfterAccess=10m @@ -26,7 +27,6 @@ modules: - type: firebase-app-notifier config: configFile: /secret/pantel-prod.json - databaseName: pantel-2decb - type: admin server: diff --git a/prime/config/test.yaml b/prime/config/test.yaml index b67aa3372..a178eb66f 100644 --- a/prime/config/test.yaml +++ b/prime/config/test.yaml @@ -4,7 +4,6 @@ modules: - type: firebase config: configFile: /secret/pantel-prod.json - databaseName: pantel-2decb rootPath: test - type: neo4j config: @@ -31,7 +30,6 @@ modules: - type: firebase-app-notifier config: configFile: /secret/pantel-prod.json - databaseName: pantel-2decb - type: admin server: diff --git a/prime/infra/MONITORING.md b/prime/infra/MONITORING.md index 97e54c81b..64f39ed63 100644 --- a/prime/infra/MONITORING.md +++ b/prime/infra/MONITORING.md @@ -64,7 +64,9 @@ __`TODO: Figure out a better way / automatic way to backup dashboards and automa Contains a Deployment and Service configuration and a ConfigMap with predefined dashboards. -Grafana is exposed using a LoadBalancer. +Grafana is exposed using a LoadBalancer. + +The current dashboard is saved in `grafana-dashboard.json` in this folder ### [](#prometheus)[Prometheus](https://prometheus.io/) diff --git a/prime/infra/README.md b/prime/infra/README.md index 6c8208b21..9162e4760 100644 --- a/prime/infra/README.md +++ b/prime/infra/README.md @@ -114,13 +114,13 @@ Increment the docker image tag (version) for next two steps. Build the Docker image (In the folder with Dockerfile) ```bash -docker build -t gcr.io/${PROJECT_ID}/prime:${PRIME_VERSION} prime +docker build -t eu.gcr.io/${PROJECT_ID}/prime:${PRIME_VERSION} prime ``` Push to the registry ```bash -gcloud docker -- push gcr.io/${PROJECT_ID}/prime:${PRIME_VERSION} +docker push eu.gcr.io/${PROJECT_ID}/prime:${PRIME_VERSION} ``` Update the tag (version) of prime's docker image in `infra/prod/prime.yaml`. @@ -324,8 +324,8 @@ export SHORT_SHA="$(git log -1 --pretty=format:%h)" echo PROJECT_ID=${PROJECT_ID} echo SHORT_SHA=${SHORT_SHA} -docker build -t gcr.io/${PROJECT_ID}/prime:${SHORT_SHA} . -gcloud docker -- push gcr.io/${PROJECT_ID}/prime:${SHORT_SHA} +docker build -t eu.gcr.io/${PROJECT_ID}/prime:${SHORT_SHA} . +docker push eu.gcr.io/${PROJECT_ID}/prime:${SHORT_SHA} sed -e s/PRIME_VERSION/${SHORT_SHA}/g prime/infra/dev/prime.yaml | kubectl apply -f - ``` diff --git a/prime/infra/dev/prime-admin-api.yaml b/prime/infra/dev/prime-admin-api.yaml new file mode 100644 index 000000000..236f711aa --- /dev/null +++ b/prime/infra/dev/prime-admin-api.yaml @@ -0,0 +1,22 @@ +swagger: "2.0" + # THis is where this is going + # https://github.com/GoogleCloudPlatform/golang-samples/blob/master/endpoints/getting-started/openapi.yaml +info: + title: "Offer definition input" + description: "Input definitions of offers, products and segments for the consumption engine" + version: "1.0.0" +# This field will be replaced by the deploy_api.sh script. +# host: "YOUR-PROJECT-ID.appspot.com" +host: "import.endpoints.pantel-2decb.cloud.goog" +schemes: + - "https" +paths: + "/import/status": + get: + description: "If the status service is available, then return 200." + operationId: "getStatus" + responses: + 200: + description: "Success." + 400: + description: "Import service not available" diff --git a/prime/infra/dev/prime-client-api.yaml b/prime/infra/dev/prime-client-api.yaml index eea0a83a7..4f258dfa6 100644 --- a/prime/infra/dev/prime-client-api.yaml +++ b/prime/infra/dev/prime-client-api.yaml @@ -238,19 +238,34 @@ paths: - auth0_jwt: [] "/subscriptions": get: - description: "Get subscription (msisdn, balance) for the user (identified by bearer token)." + description: "Get subscription (msisdn) for the user (identified by bearer token)." produces: - application/json operationId: "getSubscriptions" responses: 200: - description: "Get the subscription for this user." + description: "Get subscriptions for this user." schema: $ref: '#/definitions/SubscriptionList' 404: description: "No subscription found for this user." security: - auth0_jwt: [] + "/bundles": + get: + description: "Get bundles (balance) for the user (identified by bearer token)." + produces: + - application/json + operationId: "getBundles" + responses: + 200: + description: "Get bundles for this user." + schema: + $ref: '#/definitions/BundleList' + 404: + description: "No bundle found for this user." + security: + - auth0_jwt: [] "/subscription/status": get: description: "Get subscription status for the user (identified by bearer token)." @@ -377,8 +392,22 @@ definitions: type: object properties: msisdn: - description: "The MSISDN for which the purchase was made." + description: "Mobile number for this subscription" type: string + BundleList: + type: array + items: + $ref: '#/definitions/Bundle' + Bundle: + type: object + properties: + id: + description: "Bundle ID" + type: string + balance: + description: "Balance units in this bundle" + type: integer + format: int64 SubscriptionStatus: type: object properties: @@ -442,41 +471,16 @@ definitions: id: description: "The identifier for the source" type: string + type: + description: "The type of source" + type: string details: description: "All information stored with the source" type: object - properties: - id: - type: string - accountType: - type: string - addressLine1: - type: string - addressLine2: - type: string - zip: - type: string - city: - type: string - state: - type: string - country: - type: string - currency: - type: string - brand: - type: string - last4: - type: string - expireMonth: - type: integer - expireYear: - type: integer - funding: - type: string - required: - - id - - accountType + additionalProperties: true + required: + - id + - type ConsentList: type: array items: diff --git a/prime/infra/dev/prime.yaml b/prime/infra/dev/prime.yaml index f2ad741c5..ec00224e2 100644 --- a/prime/infra/dev/prime.yaml +++ b/prime/infra/dev/prime.yaml @@ -54,6 +54,22 @@ spec: app: prime tier: backend --- +apiVersion: v1 +kind: Service +metadata: + name: pseudonym-server-service + labels: + app: prime + tier: backend +spec: + ports: + - protocol: TCP + port: 80 + targetPort: 8080 + selector: + app: prime + tier: backend +--- apiVersion: extensions/v1beta1 kind: Deployment metadata: @@ -126,11 +142,19 @@ spec: name: metrics-ostelco-ssl readOnly: true - name: prime - image: gcr.io/pantel-2decb/prime:PRIME_VERSION + image: eu.gcr.io/pantel-2decb/prime:PRIME_VERSION imagePullPolicy: Always env: + - name: NEO4J_HOST + value: neo4j + - name: DATASTORE_NAMESPACE + value: dev - name: FIREBASE_ROOT_PATH value: dev + - name: DATA_TRAFFIC_TOPIC + value: data-traffic-dev + - name: PURCHASE_INFO_TOPIC + value: purchase-info-dev - name: STRIPE_API_KEY valueFrom: secretKeyRef: @@ -157,4 +181,4 @@ spec: secretName: ocs-ostelco-ssl - name: metrics-ostelco-ssl secret: - secretName: metrics-ostelco-ssl \ No newline at end of file + secretName: metrics-ostelco-ssl diff --git a/prime/infra/grafana-dashboard.json b/prime/infra/grafana-dashboard.json new file mode 100644 index 000000000..542c40883 --- /dev/null +++ b/prime/infra/grafana-dashboard.json @@ -0,0 +1,1297 @@ +{ + "annotations": { + "list": [] + }, + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "hideControls": false, + "id": 3, + "links": [], + "rows": [ + { + "collapse": false, + "height": "250px", + "panels": [ + { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": false, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "datasource": "prometheus", + "format": "none", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "id": 1, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "span": 3, + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "targets": [ + { + "expr": "active_sessions", + "intervalFactor": 2, + "metric": "active_sessions", + "refId": "A", + "step": 600 + } + ], + "thresholds": "", + "title": "Active Sessions", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "current" + }, + { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": false, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "datasource": "prometheus", + "format": "none", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "id": 2, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "span": 3, + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "targets": [ + { + "expr": "scalar(total_users)", + "intervalFactor": 2, + "legendFormat": "", + "metric": "total_users", + "refId": "A", + "step": 600 + } + ], + "thresholds": "", + "title": "Total Users Provisioned", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "current" + }, + { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": false, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "datasource": "prometheus", + "description": "", + "format": "none", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "id": 3, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "span": 3, + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "targets": [ + { + "expr": "scalar(users_acquired_through_referrals)", + "intervalFactor": 2, + "legendFormat": "", + "metric": "users_acquired_through_referrals", + "refId": "A", + "step": 600 + } + ], + "thresholds": "", + "title": "Users acquired through referrals", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "current" + }, + { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": false, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "datasource": "prometheus", + "format": "none", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "id": 4, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "span": 3, + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "targets": [ + { + "expr": "scalar(users_paid_at_least_once)", + "intervalFactor": 2, + "legendFormat": "", + "metric": "users_paid_at_least_once", + "refId": "A", + "step": 600 + } + ], + "thresholds": "", + "title": "Users paid at least once", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "current" + } + ], + "repeat": null, + "repeatIteration": null, + "repeatRowId": null, + "showTitle": false, + "title": "Dashboard Row", + "titleSize": "h6" + }, + { + "collapse": false, + "height": 279, + "panels": [ + { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": false, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(208, 134, 42, 0.94)", + "rgba(50, 172, 45, 0.97)" + ], + "datasource": null, + "format": "none", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "id": 8, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "span": 2, + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "targets": [ + { + "expr": "sims_who_have_used_data_today", + "hide": false, + "intervalFactor": 2, + "metric": "sims_who_have_used_data_today", + "refId": "A", + "step": 600 + } + ], + "thresholds": "", + "timeFrom": null, + "timeShift": null, + "title": "Simcards using data today", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "current" + }, + { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": false, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "datasource": null, + "decimals": null, + "format": "none", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "id": 10, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": " MB", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "span": 2, + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "targets": [ + { + "expr": "total_data_used_today / 1000000", + "intervalFactor": 2, + "legendFormat": "", + "metric": "total_data_used_today", + "refId": "A", + "step": 600 + } + ], + "thresholds": "", + "title": "Total data usage today", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "current" + }, + { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": false, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "datasource": null, + "format": "none", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "id": 12, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "NOK ", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "span": 2, + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "targets": [ + { + "expr": "revenue_today / 100", + "intervalFactor": 2, + "legendFormat": "", + "metric": "revenue_today", + "refId": "A", + "step": 600 + } + ], + "thresholds": "", + "title": "Revenue today", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "current" + }, + { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": false, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "datasource": null, + "format": "none", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "id": 14, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "NOK ", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "span": 2, + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "targets": [ + { + "expr": "((revenue_today / 100) / scalar(total_users))", + "hide": false, + "intervalFactor": 2, + "legendFormat": "", + "metric": "total_users", + "refId": "A", + "step": 600 + } + ], + "thresholds": "", + "title": "ARPU today", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "current" + }, + { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": false, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "datasource": null, + "format": "none", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "id": 16, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "span": 2, + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "targets": [ + { + "expr": "total_paid_users_today", + "intervalFactor": 2, + "metric": "total_paid_users_today", + "refId": "A", + "step": 600 + } + ], + "thresholds": "", + "title": "Paid users today", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "current" + }, + { + "aliasColors": {}, + "bars": false, + "datasource": null, + "fill": 1, + "id": 18, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": false, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "span": 6, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "total_data_used_today", + "intervalFactor": 2, + "legendFormat": "", + "metric": "total_data_used_today", + "refId": "A", + "step": 60 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Data Usage today", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "decbytes", + "label": "Bytes", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": "", + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ] + } + ], + "repeat": null, + "repeatIteration": null, + "repeatRowId": null, + "showTitle": true, + "title": "So far Today", + "titleSize": "h6" + }, + { + "collapse": false, + "height": 286, + "panels": [ + { + "cacheTimeout": null, + "colorBackground": true, + "colorValue": false, + "colors": [ + "rgba(255, 0, 0, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "datasource": null, + "format": "none", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "id": 9, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "span": 2, + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "targets": [ + { + "expr": "sims_who_have_used_data_yesterday", + "hide": false, + "intervalFactor": 2, + "metric": "sims_who_have_used_data_yesterday", + "refId": "A", + "step": 600 + } + ], + "thresholds": "", + "title": "Sims used data yesterday", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "current" + }, + { + "cacheTimeout": null, + "colorBackground": true, + "colorValue": false, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "datasource": null, + "format": "none", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "id": 11, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": " MB", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "span": 2, + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "targets": [ + { + "expr": "total_data_used_yesterday / 1000000", + "intervalFactor": 2, + "legendFormat": "", + "metric": "total_data_used_yesterday", + "refId": "A", + "step": 600 + } + ], + "thresholds": "", + "title": "Total data usage yesterday", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "current" + }, + { + "cacheTimeout": null, + "colorBackground": true, + "colorValue": false, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "datasource": null, + "format": "none", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "id": 13, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "NOK ", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "span": 2, + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "targets": [ + { + "expr": "revenue_yesterday / 100", + "intervalFactor": 2, + "legendFormat": "", + "metric": "revenue_yesterday", + "refId": "A", + "step": 600 + } + ], + "thresholds": "", + "title": "Revenue yesterday", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "current" + }, + { + "cacheTimeout": null, + "colorBackground": true, + "colorValue": false, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "datasource": null, + "format": "none", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "id": 15, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "NOK ", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "span": 2, + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "targets": [ + { + "expr": "((revenue_yesterday / 100) / scalar(total_users))", + "intervalFactor": 2, + "legendFormat": "", + "metric": "revenue_yesterday", + "refId": "A", + "step": 600 + } + ], + "thresholds": "", + "title": "ARPU yesterday", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "current" + }, + { + "cacheTimeout": null, + "colorBackground": true, + "colorValue": false, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "datasource": null, + "format": "none", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "id": 17, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "span": 2, + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "targets": [ + { + "expr": "total_paid_users_yesterday", + "intervalFactor": 2, + "metric": "total_paid_users_yesterday", + "refId": "A", + "step": 600 + } + ], + "thresholds": "", + "title": "Paid users yesterday", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "current" + }, + { + "aliasColors": {}, + "bars": false, + "datasource": null, + "fill": 1, + "id": 19, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": false, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "span": 6, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "total_data_used_yesterday", + "intervalFactor": 2, + "legendFormat": "", + "metric": "total_data_used_yesterday", + "refId": "A", + "step": 60 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Data Used yesterday", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "decbytes", + "label": "Bytes", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ] + } + ], + "repeat": null, + "repeatIteration": null, + "repeatRowId": null, + "showTitle": true, + "title": "Yesterday", + "titleSize": "h6" + } + ], + "schemaVersion": 14, + "style": "dark", + "tags": [], + "templating": { + "list": [] + }, + "time": { + "from": "now-6h", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "browser", + "title": "Dashboard", + "version": 22 +} \ No newline at end of file diff --git a/prime/infra/dev/monitoring-pushgateway.yaml b/prime/infra/monitoring-pushgateway.yaml similarity index 100% rename from prime/infra/dev/monitoring-pushgateway.yaml rename to prime/infra/monitoring-pushgateway.yaml diff --git a/prime/infra/dev/monitoring.yaml b/prime/infra/monitoring.yaml similarity index 94% rename from prime/infra/dev/monitoring.yaml rename to prime/infra/monitoring.yaml index 12376307d..cb2fd1afd 100644 --- a/prime/infra/dev/monitoring.yaml +++ b/prime/infra/monitoring.yaml @@ -2012,125 +2012,108 @@ spec: --- apiVersion: v1 data: - prometheus.yaml: | + prometheus.yaml: |- global: - scrape_interval: 10s - scrape_timeout: 10s - evaluation_interval: 10s - rule_files: - - "/etc/prometheus-rules/*.rules" + scrape_interval: 60s + evaluation_interval: 60s scrape_configs: - - # https://github.com/prometheus/prometheus/blob/master/documentation/examples/prometheus-kubernetes.yml#L37 - - job_name: 'kubernetes-nodes' + - job_name: 'kubernetes-apiservers' + kubernetes_sd_configs: + - role: endpoints + scheme: https tls_config: ca_file: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt bearer_token_file: /var/run/secrets/kubernetes.io/serviceaccount/token - kubernetes_sd_configs: - - role: node relabel_configs: - - source_labels: [__address__] - regex: '(.*):10250' - replacement: '${1}:10255' - target_label: __address__ - - # https://github.com/prometheus/prometheus/blob/master/documentation/examples/prometheus-kubernetes.yml#L79 - - job_name: 'kubernetes-endpoints' + - source_labels: [__meta_kubernetes_namespace, __meta_kubernetes_service_name, __meta_kubernetes_endpoint_port_name] + action: keep + regex: default;kubernetes;https + - job_name: 'kubernetes-nodes' + scheme: https + tls_config: + ca_file: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt + bearer_token_file: /var/run/secrets/kubernetes.io/serviceaccount/token kubernetes_sd_configs: - - role: endpoints + - role: node relabel_configs: - - source_labels: [__meta_kubernetes_service_annotation_prometheus_io_scrape] - action: keep - regex: true - - source_labels: [__meta_kubernetes_service_annotation_prometheus_io_scheme] - action: replace - target_label: __scheme__ - regex: (https?) - - source_labels: [__meta_kubernetes_service_annotation_prometheus_io_path] - action: replace - target_label: __metrics_path__ - regex: (.+) - - source_labels: [__address__, __meta_kubernetes_service_annotation_prometheus_io_port] - action: replace - target_label: __address__ - regex: (.+)(?::\d+);(\d+) - replacement: $1:$2 - - action: labelmap - regex: __meta_kubernetes_service_label_(.+) - - source_labels: [__meta_kubernetes_namespace] - action: replace - target_label: kubernetes_namespace - - source_labels: [__meta_kubernetes_service_name] - action: replace - target_label: kubernetes_name + - action: labelmap + regex: __meta_kubernetes_node_label_(.+) + - target_label: __address__ + replacement: kubernetes.default.svc:443 + - source_labels: [__meta_kubernetes_node_name] + regex: (.+) + target_label: __metrics_path__ + replacement: /api/v1/nodes/${1}/proxy/metrics - # https://github.com/prometheus/prometheus/blob/master/documentation/examples/prometheus-kubernetes.yml#L119 - - job_name: 'kubernetes-services' - metrics_path: /probe - params: - module: [http_2xx] + - job_name: 'kubernetes-pods' kubernetes_sd_configs: - - role: service + - role: pod relabel_configs: - - source_labels: [__meta_kubernetes_service_annotation_prometheus_io_probe] - action: keep - regex: true - - source_labels: [__address__] - target_label: __param_target - - target_label: __address__ - replacement: blackbox - - source_labels: [__param_target] - target_label: instance - - action: labelmap - regex: __meta_kubernetes_service_label_(.+) - - source_labels: [__meta_kubernetes_namespace] - target_label: kubernetes_namespace - - source_labels: [__meta_kubernetes_service_name] - target_label: kubernetes_name - + - source_labels: [__meta_kubernetes_pod_annotation_prometheus_io_scrape] + action: keep + regex: true + - source_labels: [__meta_kubernetes_pod_annotation_prometheus_io_path] + action: replace + target_label: __metrics_path__ + regex: (.+) + - source_labels: [__address__, __meta_kubernetes_pod_annotation_prometheus_io_port] + action: replace + regex: ([^:]+)(?::\d+)?;(\d+) + replacement: $1:$2 + target_label: __address__ + - action: labelmap + regex: __meta_kubernetes_pod_label_(.+) + - source_labels: [__meta_kubernetes_namespace] + action: replace + target_label: kubernetes_namespace + - source_labels: [__meta_kubernetes_pod_name] + action: replace + target_label: kubernetes_pod_name - job_name: 'kubernetes-cadvisor' - metrics_path: /metrics/cadvisor - #metrics_path: /cadvisor + scheme: https tls_config: ca_file: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt bearer_token_file: /var/run/secrets/kubernetes.io/serviceaccount/token kubernetes_sd_configs: - - role: node + - role: node relabel_configs: - - source_labels: [__address__] - regex: '(.*):10250' - replacement: '${1}:10255' - #replacement: /api/v1/nodes/${1}/proxy/metrics/cadvisor - target_label: __address__ + - action: labelmap + regex: __meta_kubernetes_node_label_(.+) + - target_label: __address__ + replacement: kubernetes.default.svc:443 + - source_labels: [__meta_kubernetes_node_name] + regex: (.+) + target_label: __metrics_path__ + replacement: /api/v1/nodes/${1}/proxy/metrics/cadvisor - # https://github.com/prometheus/prometheus/blob/master/documentation/examples/prometheus-kubernetes.yml#L156 - - job_name: 'kubernetes-pods' + - job_name: 'kubernetes-service-endpoints' kubernetes_sd_configs: - - role: pod + - role: endpoints relabel_configs: - - source_labels: [__meta_kubernetes_pod_annotation_prometheus_io_scrape] - action: keep - regex: true - - source_labels: [__meta_kubernetes_pod_annotation_prometheus_io_path] - action: replace - target_label: __metrics_path__ - regex: (.+) - - source_labels: [__address__, __meta_kubernetes_pod_annotation_prometheus_io_port] - action: replace - regex: (.+):(?:\d+);(\d+) - replacement: ${1}:${2} - target_label: __address__ - - action: labelmap - regex: __meta_kubernetes_pod_label_(.+) - - source_labels: [__meta_kubernetes_namespace] - action: replace - target_label: kubernetes_namespace - - source_labels: [__meta_kubernetes_pod_name] - action: replace - target_label: kubernetes_pod_name - - source_labels: [__meta_kubernetes_pod_container_port_number] - action: keep - regex: 9\d{3} + - source_labels: [__meta_kubernetes_service_annotation_prometheus_io_scrape] + action: keep + regex: true + - source_labels: [__meta_kubernetes_service_annotation_prometheus_io_scheme] + action: replace + target_label: __scheme__ + regex: (https?) + - source_labels: [__meta_kubernetes_service_annotation_prometheus_io_path] + action: replace + target_label: __metrics_path__ + regex: (.+) + - source_labels: [__address__, __meta_kubernetes_service_annotation_prometheus_io_port] + action: replace + target_label: __address__ + regex: ([^:]+)(?::\d+)?;(\d+) + replacement: $1:$2 + - action: labelmap + regex: __meta_kubernetes_service_label_(.+) + - source_labels: [__meta_kubernetes_namespace] + action: replace + target_label: kubernetes_namespace + - source_labels: [__meta_kubernetes_service_name] + action: replace + target_label: kubernetes_name kind: ConfigMap metadata: creationTimestamp: null @@ -2160,6 +2143,14 @@ spec: component: core spec: serviceAccountName: prometheus-k8s + initContainers: + - name: "init-chown-data" + image: "busybox" + # 65534 is the nobody user that prometheus uses. + command: ["chown", "-R", "65534:65534", /prometheus/] + volumeMounts: + - name: data-volume + mountPath: /prometheus/ containers: - name: watch image: weaveworks/watch:master-5b2a6e5 @@ -2169,26 +2160,23 @@ spec: - name: config-volume mountPath: /etc/prometheus-rules - name: prometheus - image: prom/prometheus:v1.7.0 + image: prom/prometheus:v2.4.2 args: - - '-storage.local.retention=12h' - - '-storage.local.memory-chunks=500000' - - '-config.file=/etc/prometheus/prometheus.yaml' + - '--storage.tsdb.path=/prometheus/' + - '--config.file=/etc/prometheus/prometheus.yaml' ports: - name: webui containerPort: 9090 resources: - requests: - cpu: 500m - memory: 500M limits: - cpu: 500m - memory: 500M + memory: "2Gi" volumeMounts: - name: config-volume mountPath: /etc/prometheus - name: rules-volume mountPath: /etc/prometheus-rules + - name: data-volume + mountPath: /prometheus/ volumes: - name: config-volume configMap: @@ -2196,6 +2184,21 @@ spec: - name: rules-volume configMap: name: prometheus-rules + - name: data-volume + persistentVolumeClaim: + claimName: prometheus-data +--- +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: prometheus-data + namespace: monitoring +spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: 30Gi --- apiVersion: extensions/v1beta1 kind: Deployment diff --git a/prime/infra/new-dev/metrics-api.yaml b/prime/infra/new-dev/metrics-api.yaml new file mode 100644 index 000000000..109346180 --- /dev/null +++ b/prime/infra/new-dev/metrics-api.yaml @@ -0,0 +1,30 @@ +type: google.api.Service + +config_version: 3 + +name: metrics.new.dev.ostelco.org + +title: Prime Metrics Reporter Service gRPC API + +apis: + - name: org.ostelco.prime.metrics.api.OcsgwAnalyticsService + +usage: + rules: + # All methods can be called without an API Key. + - selector: "*" + allow_unregistered_calls: true + +authentication: + providers: + - id: google_service_account + issuer: esp-credentials@pi-ostelco-dev.iam.gserviceaccount.com + jwks_uri: https://www.googleapis.com/robot/v1/metadata/x509/esp-credentials@pi-ostelco-dev.iam.gserviceaccount.com + audiences: > + https://metrics.new.dev.ostelco.org/org.ostelco.prime.metrics.api.OcsgwAnalyticsService, + metrics.new.dev.ostelco.org/org.ostelco.prime.metrics.api.OcsgwAnalyticsService, + metrics.new.dev.ostelco.org + rules: + - selector: "*" + requirements: + - provider_id: google_service_account \ No newline at end of file diff --git a/prime/infra/new-dev/ocs-api.yaml b/prime/infra/new-dev/ocs-api.yaml new file mode 100644 index 000000000..a8a9b8865 --- /dev/null +++ b/prime/infra/new-dev/ocs-api.yaml @@ -0,0 +1,30 @@ +type: google.api.Service + +config_version: 3 + +name: ocs.new.dev.ostelco.org + +title: OCS Service gRPC API + +apis: + - name: org.ostelco.ocs.api.OcsService + +usage: + rules: + # All methods can be called without an API Key. + - selector: "*" + allow_unregistered_calls: true + +authentication: + providers: + - id: google_service_account + issuer: esp-credentials@pi-ostelco-dev.iam.gserviceaccount.com + jwks_uri: https://www.googleapis.com/robot/v1/metadata/x509/esp-credentials@pi-ostelco-dev.iam.gserviceaccount.com + audiences: > + https://ocs.new.dev.ostelco.org/org.ostelco.ocs.api.OcsService, + ocs.new.dev.ostelco.org/org.ostelco.ocs.api.OcsService, + ocs.new.dev.ostelco.org + rules: + - selector: "*" + requirements: + - provider_id: google_service_account \ No newline at end of file diff --git a/prime/infra/new-dev/prime-client-api.yaml b/prime/infra/new-dev/prime-client-api.yaml new file mode 100644 index 000000000..d192203a3 --- /dev/null +++ b/prime/infra/new-dev/prime-client-api.yaml @@ -0,0 +1,576 @@ +swagger: "2.0" +info: + title: "Ostelco API" + description: "The client API for Panacea." + version: "1.0.0" +host: "api.new.dev.ostelco.org" +x-google-endpoints: + - name: "api.new.dev.ostelco.org" + allowCors: true +schemes: + - "https" +paths: + "/profile": + get: + description: "Get profile for the user (email-id present in the bearer token)." + produces: + - application/json + operationId: "getProfile" + responses: + 200: + description: "Get the profile for this user." + schema: + $ref: '#/definitions/Profile' + 404: + description: "Profile not found." + security: + - auth0_jwt: [] + post: + description: "Create a new profile." + consumes: + - application/json + produces: + - application/json + operationId: "createProfile" + parameters: + - name: profile + in: body + description: The profile to create. + schema: + $ref: '#/definitions/Profile' + - name: referred_by + in: query + description: "Referral ID of user who has invited this user" + type: string + responses: + 201: + description: "Successfully created the profile." + schema: + $ref: '#/definitions/Profile' + security: + - auth0_jwt: [] + put: + description: "Update an existing profile." + consumes: + - application/json + produces: + - application/json + operationId: "updateProfile" + parameters: + - in: body + name: profile + description: The updated profile. + schema: + $ref: '#/definitions/Profile' + responses: + 200: + description: "Successfully updated the profile." + schema: + $ref: '#/definitions/Profile' + 404: + description: "Profile not found." + security: + - auth0_jwt: [] + "/applicationtoken": + post: + description: "Store application token" + consumes: + - application/json + produces: + - application/json + operationId: "storeApplicationToken" + parameters: + - name: applicationToken + in: body + description: application token + schema: + $ref: '#/definitions/ApplicationToken' + responses: + 201: + description: "Successfully stored token." + schema: + $ref: '#/definitions/ApplicationToken' + 404: + description: "User not found." + 507: + description: "Not able to store token." + security: + - auth0_jwt: [] + "/paymentSources": + get: + description: "Get all payment sources for the user." + produces: + - application/json + operationId: "listSources" + responses: + 200: + description: "List of payment sources." + schema: + $ref: '#/definitions/PaymentSourceList' + 404: + description: "No user found." + security: + - auth0_jwt: [] + post: + description: "Add a new payment source for user" + produces: + - application/json + operationId: "createSource" + parameters: + - name: sourceId + in: query + description: "The stripe-id of the source to be added to user" + required: true + type: string + responses: + 201: + description: "Successfully added source to user" + schema: + $ref: '#/definitions/PaymentSource' + 404: + description: "User not found." + security: + - auth0_jwt: [] + put: + description: "Set the source as default for user" + produces: + - application/json + operationId: "setDefaultSource" + parameters: + - name: sourceId + in: query + description: "The stripe-id of the default source" + required: true + type: string + responses: + 200: + description: "Successfully set as default source to user" + schema: + $ref: '#/definitions/PaymentSource' + 404: + description: "User not found." + security: + - auth0_jwt: [] + "/products": + get: + description: "Get all products for the user." + produces: + - application/json + operationId: "getAllProducts" + responses: + 200: + description: "List of products." + schema: + $ref: '#/definitions/ProductList' + 404: + description: "No products found for the user." + security: + - auth0_jwt: [] + "/products/{sku}": + post: + description: "Buy the product specified in sku parameter." + produces: + - application/json + - text/plain + operationId: "buyProductDeprecated" + responses: + 201: + description: "Successfully purchased the product." + schema: + $ref: '#/definitions/Product' + 404: + description: "Product not found." + security: + - auth0_jwt: [] + parameters: + - name: sku + in: path + description: SKU to be purchased + required: true + type: string + "/products/{sku}/purchase": + post: + description: "Buy the product specified in sku parameter." + produces: + - application/json + - text/plain + operationId: "purchaseProduct" + parameters: + - name: sku + in: path + description: "SKU to be purchased" + required: true + type: string + - name: sourceId + in: query + description: "The stripe-id of the source to be used for this purchase (if empty, use default source)" + required: false + type: string + - name: saveCard + in: query + description: "Whether to save this card as a source for this user (default = false)" + required: false + type: boolean + responses: + 201: + description: "Successfully purchased the product." + schema: + $ref: '#/definitions/Product' + 404: + description: "Product not found." + security: + - auth0_jwt: [] + "/purchases": + get: + description: "Get list of all purchases." + produces: + - application/json + - text/plain + operationId: "getPurchaseHistory" + responses: + 200: + description: "List of Purchase Records." + schema: + $ref: '#/definitions/PurchaseRecordList' + 404: + description: "No Purchase Records found for the user." + security: + - auth0_jwt: [] + "/subscriptions": + get: + description: "Get subscription (msisdn) for the user (identified by bearer token)." + produces: + - application/json + operationId: "getSubscriptions" + responses: + 200: + description: "Get subscriptions for this user." + schema: + $ref: '#/definitions/SubscriptionList' + 404: + description: "No subscription found for this user." + security: + - auth0_jwt: [] + "/bundles": + get: + description: "Get bundles (balance) for the user (identified by bearer token)." + produces: + - application/json + operationId: "getBundles" + responses: + 200: + description: "Get bundles for this user." + schema: + $ref: '#/definitions/BundleList' + 404: + description: "No bundle found for this user." + security: + - auth0_jwt: [] + "/subscription/status": + get: + description: "Get subscription status for the user (identified by bearer token)." + produces: + - application/json + operationId: "getSubscriptionStatus" + responses: + 200: + description: "Get the subscription status for this user." + schema: + $ref: '#/definitions/SubscriptionStatus' + 404: + description: "No subscription status found for this user." + security: + - auth0_jwt: [] + "/subscription/activePseudonyms": + get: + description: "Get currently active pseudonyms for the user's msisdn (identified by bearer token)." + produces: + - application/json + operationId: "getActivePseudonyms" + responses: + 200: + description: "Get active pseudonyms for the user's msisdn." + schema: + $ref: '#/definitions/ActivePseudonyms' + 404: + description: "No subscription found for this user." + security: + - auth0_jwt: [] + "/referred": + get: + description: "Get list of people whom the user has referred to." + produces: + - application/json + operationId: "getReferred" + responses: + 200: + description: "List of people whom this person has referred to." + schema: + $ref: '#/definitions/PersonList' + 404: + description: "No referrals found for this user." + security: + - auth0_jwt: [] + "/referred/by": + get: + description: "Get the people who had referred this user." + produces: + - application/json + operationId: "getReferredBy" + responses: + 200: + description: "List of people whom this person has referred to." + schema: + $ref: '#/definitions/Person' + 404: + description: "No 'referred by' found for this user." + security: + - auth0_jwt: [] + "/consents": + get: + description: "Get all consents for the user." + produces: + - application/json + operationId: "getConsents" + responses: + 200: + description: "List of consents." + schema: + $ref: '#/definitions/ConsentList' + 404: + description: "No consents found for the user." + security: + - auth0_jwt: [] + "/consents/{consent-id}": + put: + description: "Change the value for the specified consent." + operationId: "updateConsent" + responses: + 200: + description: "Successfully updated the consent." + 404: + description: "Consent not found." + security: + - auth0_jwt: [] + parameters: + - name: consent-id + in: path + description: "Id of the consent to be changed" + required: true + type: string + - name: accepted + in: query + description: "Whether user accepted the consent (default = true)" + required: false + type: boolean +definitions: + Profile: + type: object + properties: + name: + type: string + address: + type: string + postCode: + type: string + city: + type: string + country: + type: string + email: + type: string + format: email + referralId: + type: string + required: + - email + SubscriptionList: + type: array + items: + $ref: '#/definitions/Subscription' + Subscription: + type: object + properties: + msisdn: + description: "Mobile number for this subscription" + type: string + BundleList: + type: array + items: + $ref: '#/definitions/Bundle' + Bundle: + type: object + properties: + id: + description: "Bundle ID" + type: string + balance: + description: "Balance units in this bundle" + type: integer + format: int64 + SubscriptionStatus: + type: object + properties: + remaining: + description: "Remaining data" + type: integer + format: int64 + purchaseRecords: + description: "List of Purchases" + type: array + items: + $ref: '#/definitions/PurchaseRecord' + PurchaseRecordList: + type: array + items: + $ref: '#/definitions/PurchaseRecord' + PurchaseRecord: + type: object + properties: + id: + description: "Purchase Record ID" + type: string + msisdn: + description: "Deprecated: The MSISDN for which the purchase was made." + type: string + timestamp: + description: "The time stamp of the purchase" + type: integer + format: int64 + product: + $ref: '#/definitions/Product' + required: + - timestamp + - product + ProductList: + type: array + items: + $ref: '#/definitions/Product' + Product: + type: object + properties: + sku: + description: "A unique Id representing a SKU" + type: string + price: + $ref: '#/definitions/Price' + properties: + type: object + presentation: + type: object + required: + - sku + - price + PaymentSourceList: + type: array + items: + $ref: '#/definitions/PaymentSource' + PaymentSource: + type: object + properties: + id: + description: "The identifier for the source" + type: string + type: + description: "The type of source" + type: string + details: + description: "All information stored with the source" + type: object + additionalProperties: true + required: + - id + - type + ConsentList: + type: array + items: + $ref: '#/definitions/Consent' + Consent: + type: object + properties: + consentId: + description: "The identifier of the consent" + type: string + description: + description: "A description of the consent" + type: string + accepted: + description: "Whether user has accepted the consent or not" + type: boolean + Price: + type: object + properties: + amount: + description: "A positive integer in the smallest currency unit" + type: integer + minimum: 0 + currency: + description: "ISO 4217 currency code (three letter alphabetic code)" + type: string + required: + - amount + - currency + ApplicationToken: + type: object + properties: + token: + description: "Application token" + type: string + applicationID: + description: "Uniquely identifier for the app instance" + type: string + tokenType: + description: "Type of application token (FCM)" + type: string + required: + - token + - applicationID + PseudonymEntity: + type: object + properties: + sourceId: + type: string + pseudonym: + type: string + start: + description: "The start time stamp for this pseudonym" + type: integer + format: int64 + end: + description: "The start time stamp for this pseudonym" + type: integer + format: int64 + required: + - sourceId + - pseudonym + - start + - end + Person: + type: object + properties: + name: + type: string + required: + - name + PersonList: + type: array + items: + $ref: '#/definitions/Person' + ActivePseudonyms: + type: object + properties: + current: + $ref: '#/definitions/PseudonymEntity' + next: + $ref: '#/definitions/PseudonymEntity' + required: + - current + - next +securityDefinitions: + auth0_jwt: + authorizationUrl: "https://ostelco.eu.auth0.com/authorize" + flow: "implicit" + type: "oauth2" + x-google-issuer: "https://ostelco.eu.auth0.com/" + x-google-jwks_uri: "https://ostelco.eu.auth0.com/.well-known/jwks.json" + x-google-audiences: "http://google_api" \ No newline at end of file diff --git a/prime/infra/new-prod/metrics-api.yaml b/prime/infra/new-prod/metrics-api.yaml new file mode 100644 index 000000000..c8775db24 --- /dev/null +++ b/prime/infra/new-prod/metrics-api.yaml @@ -0,0 +1,30 @@ +type: google.api.Service + +config_version: 3 + +name: prod-metrics.new.dev.ostelco.org + +title: Prime Metrics Reporter Service gRPC API + +apis: + - name: org.ostelco.prime.metrics.api.OcsgwAnalyticsService + +usage: + rules: + # All methods can be called without an API Key. + - selector: "*" + allow_unregistered_calls: true + +authentication: + providers: + - id: google_service_account + issuer: ci-endpoint-update@pi-ostelco-prod.iam.gserviceaccount.com + jwks_uri: https://www.googleapis.com/robot/v1/metadata/x509/ci-endpoint-update@pi-ostelco-prod.iam.gserviceaccount.com + audiences: > + https://prod-metrics.new.dev.ostelco.org/org.ostelco.prime.metrics.api.OcsgwAnalyticsService, + prod-metrics.new.dev.ostelco.org/org.ostelco.prime.metrics.api.OcsgwAnalyticsService, + prod-metrics.new.dev.ostelco.org + rules: + - selector: "*" + requirements: + - provider_id: google_service_account \ No newline at end of file diff --git a/prime/infra/new-prod/ocs-api.yaml b/prime/infra/new-prod/ocs-api.yaml new file mode 100644 index 000000000..5b2f99f09 --- /dev/null +++ b/prime/infra/new-prod/ocs-api.yaml @@ -0,0 +1,30 @@ +type: google.api.Service + +config_version: 3 + +name: prod-ocs.new.dev.ostelco.org + +title: OCS Service gRPC API + +apis: + - name: org.ostelco.ocs.api.OcsService + +usage: + rules: + # All methods can be called without an API Key. + - selector: "*" + allow_unregistered_calls: true + +authentication: + providers: + - id: google_service_account + issuer: ci-endpoint-update@pi-ostelco-prod.iam.gserviceaccount.com + jwks_uri: https://www.googleapis.com/robot/v1/metadata/x509/ci-endpoint-update@pi-ostelco-prod.iam.gserviceaccount.com + audiences: > + https://prod-ocs.new.dev.ostelco.org/org.ostelco.ocs.api.OcsService, + prod-ocs.new.dev.ostelco.org/org.ostelco.ocs.api.OcsService, + prod-ocs.new.dev.ostelco.org + rules: + - selector: "*" + requirements: + - provider_id: google_service_account \ No newline at end of file diff --git a/prime/infra/new-prod/prime-client-api.yaml b/prime/infra/new-prod/prime-client-api.yaml new file mode 100644 index 000000000..7c705f9ae --- /dev/null +++ b/prime/infra/new-prod/prime-client-api.yaml @@ -0,0 +1,576 @@ +swagger: "2.0" +info: + title: "Ostelco API" + description: "The client API for Panacea." + version: "1.0.0" +host: "prod-api.new.dev.ostelco.org" +x-google-endpoints: + - name: "prod-api.new.dev.ostelco.org" + allowCors: true +schemes: + - "https" +paths: + "/profile": + get: + description: "Get profile for the user (email-id present in the bearer token)." + produces: + - application/json + operationId: "getProfile" + responses: + 200: + description: "Get the profile for this user." + schema: + $ref: '#/definitions/Profile' + 404: + description: "Profile not found." + security: + - auth0_jwt: [] + post: + description: "Create a new profile." + consumes: + - application/json + produces: + - application/json + operationId: "createProfile" + parameters: + - name: profile + in: body + description: The profile to create. + schema: + $ref: '#/definitions/Profile' + - name: referred_by + in: query + description: "Referral ID of user who has invited this user" + type: string + responses: + 201: + description: "Successfully created the profile." + schema: + $ref: '#/definitions/Profile' + security: + - auth0_jwt: [] + put: + description: "Update an existing profile." + consumes: + - application/json + produces: + - application/json + operationId: "updateProfile" + parameters: + - in: body + name: profile + description: The updated profile. + schema: + $ref: '#/definitions/Profile' + responses: + 200: + description: "Successfully updated the profile." + schema: + $ref: '#/definitions/Profile' + 404: + description: "Profile not found." + security: + - auth0_jwt: [] + "/applicationtoken": + post: + description: "Store application token" + consumes: + - application/json + produces: + - application/json + operationId: "storeApplicationToken" + parameters: + - name: applicationToken + in: body + description: application token + schema: + $ref: '#/definitions/ApplicationToken' + responses: + 201: + description: "Successfully stored token." + schema: + $ref: '#/definitions/ApplicationToken' + 404: + description: "User not found." + 507: + description: "Not able to store token." + security: + - auth0_jwt: [] + "/paymentSources": + get: + description: "Get all payment sources for the user." + produces: + - application/json + operationId: "listSources" + responses: + 200: + description: "List of payment sources." + schema: + $ref: '#/definitions/PaymentSourceList' + 404: + description: "No user found." + security: + - auth0_jwt: [] + post: + description: "Add a new payment source for user" + produces: + - application/json + operationId: "createSource" + parameters: + - name: sourceId + in: query + description: "The stripe-id of the source to be added to user" + required: true + type: string + responses: + 201: + description: "Successfully added source to user" + schema: + $ref: '#/definitions/PaymentSource' + 404: + description: "User not found." + security: + - auth0_jwt: [] + put: + description: "Set the source as default for user" + produces: + - application/json + operationId: "setDefaultSource" + parameters: + - name: sourceId + in: query + description: "The stripe-id of the default source" + required: true + type: string + responses: + 200: + description: "Successfully set as default source to user" + schema: + $ref: '#/definitions/PaymentSource' + 404: + description: "User not found." + security: + - auth0_jwt: [] + "/products": + get: + description: "Get all products for the user." + produces: + - application/json + operationId: "getAllProducts" + responses: + 200: + description: "List of products." + schema: + $ref: '#/definitions/ProductList' + 404: + description: "No products found for the user." + security: + - auth0_jwt: [] + "/products/{sku}": + post: + description: "Buy the product specified in sku parameter." + produces: + - application/json + - text/plain + operationId: "buyProductDeprecated" + responses: + 201: + description: "Successfully purchased the product." + schema: + $ref: '#/definitions/Product' + 404: + description: "Product not found." + security: + - auth0_jwt: [] + parameters: + - name: sku + in: path + description: SKU to be purchased + required: true + type: string + "/products/{sku}/purchase": + post: + description: "Buy the product specified in sku parameter." + produces: + - application/json + - text/plain + operationId: "purchaseProduct" + parameters: + - name: sku + in: path + description: "SKU to be purchased" + required: true + type: string + - name: sourceId + in: query + description: "The stripe-id of the source to be used for this purchase (if empty, use default source)" + required: false + type: string + - name: saveCard + in: query + description: "Whether to save this card as a source for this user (default = false)" + required: false + type: boolean + responses: + 201: + description: "Successfully purchased the product." + schema: + $ref: '#/definitions/Product' + 404: + description: "Product not found." + security: + - auth0_jwt: [] + "/purchases": + get: + description: "Get list of all purchases." + produces: + - application/json + - text/plain + operationId: "getPurchaseHistory" + responses: + 200: + description: "List of Purchase Records." + schema: + $ref: '#/definitions/PurchaseRecordList' + 404: + description: "No Purchase Records found for the user." + security: + - auth0_jwt: [] + "/subscriptions": + get: + description: "Get subscription (msisdn) for the user (identified by bearer token)." + produces: + - application/json + operationId: "getSubscriptions" + responses: + 200: + description: "Get subscriptions for this user." + schema: + $ref: '#/definitions/SubscriptionList' + 404: + description: "No subscription found for this user." + security: + - auth0_jwt: [] + "/bundles": + get: + description: "Get bundles (balance) for the user (identified by bearer token)." + produces: + - application/json + operationId: "getBundles" + responses: + 200: + description: "Get bundles for this user." + schema: + $ref: '#/definitions/BundleList' + 404: + description: "No bundle found for this user." + security: + - auth0_jwt: [] + "/subscription/status": + get: + description: "Get subscription status for the user (identified by bearer token)." + produces: + - application/json + operationId: "getSubscriptionStatus" + responses: + 200: + description: "Get the subscription status for this user." + schema: + $ref: '#/definitions/SubscriptionStatus' + 404: + description: "No subscription status found for this user." + security: + - auth0_jwt: [] + "/subscription/activePseudonyms": + get: + description: "Get currently active pseudonyms for the user's msisdn (identified by bearer token)." + produces: + - application/json + operationId: "getActivePseudonyms" + responses: + 200: + description: "Get active pseudonyms for the user's msisdn." + schema: + $ref: '#/definitions/ActivePseudonyms' + 404: + description: "No subscription found for this user." + security: + - auth0_jwt: [] + "/referred": + get: + description: "Get list of people whom the user has referred to." + produces: + - application/json + operationId: "getReferred" + responses: + 200: + description: "List of people whom this person has referred to." + schema: + $ref: '#/definitions/PersonList' + 404: + description: "No referrals found for this user." + security: + - auth0_jwt: [] + "/referred/by": + get: + description: "Get the people who had referred this user." + produces: + - application/json + operationId: "getReferredBy" + responses: + 200: + description: "List of people whom this person has referred to." + schema: + $ref: '#/definitions/Person' + 404: + description: "No 'referred by' found for this user." + security: + - auth0_jwt: [] + "/consents": + get: + description: "Get all consents for the user." + produces: + - application/json + operationId: "getConsents" + responses: + 200: + description: "List of consents." + schema: + $ref: '#/definitions/ConsentList' + 404: + description: "No consents found for the user." + security: + - auth0_jwt: [] + "/consents/{consent-id}": + put: + description: "Change the value for the specified consent." + operationId: "updateConsent" + responses: + 200: + description: "Successfully updated the consent." + 404: + description: "Consent not found." + security: + - auth0_jwt: [] + parameters: + - name: consent-id + in: path + description: "Id of the consent to be changed" + required: true + type: string + - name: accepted + in: query + description: "Whether user accepted the consent (default = true)" + required: false + type: boolean +definitions: + Profile: + type: object + properties: + name: + type: string + address: + type: string + postCode: + type: string + city: + type: string + country: + type: string + email: + type: string + format: email + referralId: + type: string + required: + - email + SubscriptionList: + type: array + items: + $ref: '#/definitions/Subscription' + Subscription: + type: object + properties: + msisdn: + description: "Mobile number for this subscription" + type: string + BundleList: + type: array + items: + $ref: '#/definitions/Bundle' + Bundle: + type: object + properties: + id: + description: "Bundle ID" + type: string + balance: + description: "Balance units in this bundle" + type: integer + format: int64 + SubscriptionStatus: + type: object + properties: + remaining: + description: "Remaining data" + type: integer + format: int64 + purchaseRecords: + description: "List of Purchases" + type: array + items: + $ref: '#/definitions/PurchaseRecord' + PurchaseRecordList: + type: array + items: + $ref: '#/definitions/PurchaseRecord' + PurchaseRecord: + type: object + properties: + id: + description: "Purchase Record ID" + type: string + msisdn: + description: "Deprecated: The MSISDN for which the purchase was made." + type: string + timestamp: + description: "The time stamp of the purchase" + type: integer + format: int64 + product: + $ref: '#/definitions/Product' + required: + - timestamp + - product + ProductList: + type: array + items: + $ref: '#/definitions/Product' + Product: + type: object + properties: + sku: + description: "A unique Id representing a SKU" + type: string + price: + $ref: '#/definitions/Price' + properties: + type: object + presentation: + type: object + required: + - sku + - price + PaymentSourceList: + type: array + items: + $ref: '#/definitions/PaymentSource' + PaymentSource: + type: object + properties: + id: + description: "The identifier for the source" + type: string + type: + description: "The type of source" + type: string + details: + description: "All information stored with the source" + type: object + additionalProperties: true + required: + - id + - type + ConsentList: + type: array + items: + $ref: '#/definitions/Consent' + Consent: + type: object + properties: + consentId: + description: "The identifier of the consent" + type: string + description: + description: "A description of the consent" + type: string + accepted: + description: "Whether user has accepted the consent or not" + type: boolean + Price: + type: object + properties: + amount: + description: "A positive integer in the smallest currency unit" + type: integer + minimum: 0 + currency: + description: "ISO 4217 currency code (three letter alphabetic code)" + type: string + required: + - amount + - currency + ApplicationToken: + type: object + properties: + token: + description: "Application token" + type: string + applicationID: + description: "Uniquely identifier for the app instance" + type: string + tokenType: + description: "Type of application token (FCM)" + type: string + required: + - token + - applicationID + PseudonymEntity: + type: object + properties: + sourceId: + type: string + pseudonym: + type: string + start: + description: "The start time stamp for this pseudonym" + type: integer + format: int64 + end: + description: "The start time stamp for this pseudonym" + type: integer + format: int64 + required: + - sourceId + - pseudonym + - start + - end + Person: + type: object + properties: + name: + type: string + required: + - name + PersonList: + type: array + items: + $ref: '#/definitions/Person' + ActivePseudonyms: + type: object + properties: + current: + $ref: '#/definitions/PseudonymEntity' + next: + $ref: '#/definitions/PseudonymEntity' + required: + - current + - next +securityDefinitions: + auth0_jwt: + authorizationUrl: "https://ostelco.eu.auth0.com/authorize" + flow: "implicit" + type: "oauth2" + x-google-issuer: "https://ostelco.eu.auth0.com/" + x-google-jwks_uri: "https://ostelco.eu.auth0.com/.well-known/jwks.json" + x-google-audiences: "http://google_api" \ No newline at end of file diff --git a/prime/infra/prod/prime-client-api.yaml b/prime/infra/prod/prime-client-api.yaml index 1b4e1b0dd..2312044bf 100644 --- a/prime/infra/prod/prime-client-api.yaml +++ b/prime/infra/prod/prime-client-api.yaml @@ -238,19 +238,34 @@ paths: - auth0_jwt: [] "/subscriptions": get: - description: "Get subscription (msisdn, balance) for the user (identified by bearer token)." + description: "Get subscription (msisdn) for the user (identified by bearer token)." produces: - application/json operationId: "getSubscriptions" responses: 200: - description: "Get the subscription for this user." + description: "Get subscriptions for this user." schema: $ref: '#/definitions/SubscriptionList' 404: description: "No subscription found for this user." security: - auth0_jwt: [] + "/bundles": + get: + description: "Get bundles (balance) for the user (identified by bearer token)." + produces: + - application/json + operationId: "getBundles" + responses: + 200: + description: "Get bundles for this user." + schema: + $ref: '#/definitions/BundleList' + 404: + description: "No bundle found for this user." + security: + - auth0_jwt: [] "/subscription/status": get: description: "Get subscription status for the user (identified by bearer token)." @@ -377,8 +392,22 @@ definitions: type: object properties: msisdn: - description: "The MSISDN for which the purchase was made." + description: "Mobile number for this subscription" + type: string + BundleList: + type: array + items: + $ref: '#/definitions/Bundle' + Bundle: + type: object + properties: + id: + description: "Bundle ID" type: string + balance: + description: "Balance units in this bundle" + type: integer + format: int64 SubscriptionStatus: type: object properties: @@ -442,6 +471,16 @@ definitions: id: description: "The identifier for the source" type: string + type: + description: "The type of source" + type: string + details: + description: "All information stored with the source" + type: object + additionalProperties: true + required: + - id + - type ConsentList: type: array items: diff --git a/prime/infra/prod/prime.yaml b/prime/infra/prod/prime.yaml index 725015ddc..56189a91c 100644 --- a/prime/infra/prod/prime.yaml +++ b/prime/infra/prod/prime.yaml @@ -35,6 +35,22 @@ spec: app: prime tier: backend --- +apiVersion: v1 +kind: Service +metadata: + name: pseudonym-server-service + labels: + app: prime + tier: backend +spec: + ports: + - protocol: TCP + port: 80 + targetPort: 8080 + selector: + app: prime + tier: backend +--- apiVersion: extensions/v1beta1 kind: Deployment metadata: @@ -90,11 +106,17 @@ spec: name: api-ostelco-ssl readOnly: true - name: prime - image: gcr.io/pantel-2decb/prime:PRIME_VERSION + image: eu.gcr.io/pantel-2decb/prime:PRIME_VERSION imagePullPolicy: Always env: + - name: NEO4J_HOST + value: neo4j - name: FIREBASE_ROOT_PATH value: v2 + - name: DATA_TRAFFIC_TOPIC + value: data-traffic + - name: PURCHASE_INFO_TOPIC + value: purchase-info - name: STRIPE_API_KEY valueFrom: secretKeyRef: @@ -117,4 +139,4 @@ spec: secretName: api-ostelco-ssl - name: ocs-ostelco-ssl secret: - secretName: ocs-ostelco-ssl \ No newline at end of file + secretName: ocs-ostelco-ssl diff --git a/prime/script/deploy-dev-direct.sh b/prime/script/deploy-dev-direct.sh index f25997bbd..aaef0410f 100755 --- a/prime/script/deploy-dev-direct.sh +++ b/prime/script/deploy-dev-direct.sh @@ -21,8 +21,8 @@ echo TAG=${TAG} gradle prime:clean prime:build -docker build -t gcr.io/${PROJECT_ID}/prime:${TAG} prime -gcloud docker -- push gcr.io/${PROJECT_ID}/prime:${TAG} +docker build -t eu.gcr.io/${PROJECT_ID}/prime:${TAG} prime +docker push eu.gcr.io/${PROJECT_ID}/prime:${TAG} echo "Deploying prime to GKE" diff --git a/prime/script/deploy-direct.sh b/prime/script/deploy-direct.sh index 6568413dd..6ec9cb82c 100755 --- a/prime/script/deploy-direct.sh +++ b/prime/script/deploy-direct.sh @@ -32,8 +32,8 @@ echo TAG=${TAG} gradle prime:clean prime:build -docker build -t gcr.io/${PROJECT_ID}/prime:${TAG} prime -gcloud docker -- push gcr.io/${PROJECT_ID}/prime:${TAG} +docker build -t eu.gcr.io/${PROJECT_ID}/prime:${TAG} prime +docker push eu.gcr.io/${PROJECT_ID}/prime:${TAG} echo "Deploying prime to GKE" diff --git a/prime/script/deploy_api.sh b/prime/script/deploy_api.sh new file mode 100644 index 000000000..7ed917c6b --- /dev/null +++ b/prime/script/deploy_api.sh @@ -0,0 +1,59 @@ +#!/bin/bash +# Copyright 2017 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Original: https://github.com/GoogleCloudPlatform/endpoints-quickstart/blob/master/scripts/deploy_api.sh + +set -euo pipefail + +source util.sh + +main() { + # Get our working project, or exit if it's not set. + local project_id=$(get_project_id) + if [[ -z "$project_id" ]]; then + exit 1 + fi + local temp_file=$(mktemp) + export TEMP_FILE="${temp_file}.yaml" + mv "$temp_file" "$TEMP_FILE" + # Because the included API is a template, we have to do some string + # substitution before we can deploy it. Sed does this nicely. + < "$API_FILE" sed -E "s/YOUR-PROJECT-ID/${project_id}/g" > "$TEMP_FILE" + echo "Deploying $API_FILE..." + echo "gcloud endpoints services deploy $API_FILE" + gcloud endpoints services deploy "$TEMP_FILE" +} + +cleanup() { + rm "$TEMP_FILE" +} + +# Defaults. +API_FILE="../openapi/prime-openapi.yaml" + +if [[ "$#" == 0 ]]; then + : # Use defaults. +elif [[ "$#" == 1 ]]; then + API_FILE="$1" +else + echo "Wrong number of arguments specified." + echo "Usage: deploy_api.sh [api-file]" + exit 1 +fi + +# Cleanup our temporary files even if our deployment fails. +trap cleanup EXIT + +main "$@" diff --git a/prime/script/util.sh b/prime/script/util.sh new file mode 100644 index 000000000..d7c653721 --- /dev/null +++ b/prime/script/util.sh @@ -0,0 +1,42 @@ +#!/bin/bash +# Copyright 2017 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Make Bash a little less error-prone. +set -euo pipefail + +get_latest_config_id() { + # Given a service name, this returns the most recent deployment of that + # API. + service_name="$1" + gcloud endpoints configs list \ + --service="$service_name" \ + --sort-by="~config_id" --limit=1 --format="value(CONFIG_ID)" \ + | tr -d '[:space:]' +} + +get_project_id() { + # Find the project ID first by DEVSHELL_PROJECT_ID (in Cloud Shell) + # and then by querying the gcloud default project. + local project="${DEVSHELL_PROJECT_ID:-}" + if [[ -z "$project" ]]; then + project=$(gcloud config get-value project 2> /dev/null) + fi + if [[ -z "$project" ]]; then + >&2 echo "No default project was found, and DEVSHELL_PROJECT_ID is not set." + >&2 echo "Please use the Cloud Shell or set your default project by typing:" + >&2 echo "gcloud config set project YOUR-PROJECT-NAME" + fi + echo "$project" +} diff --git a/prime/src/integration-tests/kotlin/org/ostelco/prime/ocs/OcsTest.kt b/prime/src/integration-tests/kotlin/org/ostelco/prime/ocs/OcsTest.kt index 8627bd178..f7c47c525 100644 --- a/prime/src/integration-tests/kotlin/org/ostelco/prime/ocs/OcsTest.kt +++ b/prime/src/integration-tests/kotlin/org/ostelco/prime/ocs/OcsTest.kt @@ -1,6 +1,5 @@ package org.ostelco.prime.ocs -import com.lmax.disruptor.TimeoutException import com.palantir.docker.compose.DockerComposeRule import com.palantir.docker.compose.connection.waiting.HealthChecks import io.grpc.ManagedChannelBuilder @@ -21,14 +20,15 @@ import org.ostelco.ocs.api.MultipleServiceCreditControl import org.ostelco.ocs.api.OcsServiceGrpc import org.ostelco.ocs.api.OcsServiceGrpc.OcsServiceStub import org.ostelco.ocs.api.ServiceUnit +import org.ostelco.prime.consumption.OcsGrpcServer +import org.ostelco.prime.consumption.OcsService import org.ostelco.prime.disruptor.EventProducerImpl import org.ostelco.prime.disruptor.OcsDisruptor -import org.ostelco.prime.logger +import org.ostelco.prime.getLogger import org.ostelco.prime.storage.firebase.initFirebaseConfigRegistry import org.ostelco.prime.storage.graph.Config import org.ostelco.prime.storage.graph.ConfigRegistry import org.ostelco.prime.storage.graph.Neo4jClient -import java.io.IOException import java.util.concurrent.CountDownLatch import java.util.concurrent.TimeUnit @@ -40,11 +40,11 @@ import java.util.concurrent.TimeUnit */ class OcsTest { - private val logger by logger() + private val logger by getLogger() abstract class AbstractObserver : StreamObserver { - private val logger by logger() + private val logger by getLogger() override fun onError(t: Throwable) { // Ignore errors @@ -75,7 +75,6 @@ class OcsTest { * the gRPC interface. */ @Test - @Throws(InterruptedException::class) fun testFetchDataRequest() { // If this latch reaches zero, then things went well. @@ -115,7 +114,6 @@ class OcsTest { * @throws InterruptedException */ @Test - @Throws(InterruptedException::class) fun testActivateMsisdn() { val cdl = CountDownLatch(2) @@ -142,7 +140,10 @@ class OcsTest { // Send a report using the producer to the pipeline that will // inject a PrimeEvent that will top up the data bundle balance. - producer.topupDataBundleBalanceEvent(BUNDLE_ID, NO_OF_BYTES_TO_ADD) + producer.topupDataBundleBalanceEvent( + requestId = TOPUP_REQ_ID, + bundleId = BUNDLE_ID, + bytes = NO_OF_BYTES_TO_ADD) // Now wait, again, for the latch to reach zero, and fail the test // ff it hasn't. @@ -165,6 +166,8 @@ class OcsTest { private const val BUNDLE_ID = "foo@bar.com" + private const val TOPUP_REQ_ID = "req-id" + // Default chunk of byte used in various test cases private const val BYTES: Long = 100 @@ -219,7 +222,6 @@ class OcsTest { @BeforeClass @JvmStatic - @Throws(IOException::class) fun setUp() { ConfigRegistry.config = Config().apply { this.host = "0.0.0.0" @@ -236,7 +238,7 @@ class OcsTest { // Set up the gRPC server at a particular port with a particular // service, that is connected to the processing pipeline. val ocsService = OcsService(producer) - ocsServer = OcsGrpcServer(PORT, ocsService.asOcsServiceImplBase()) + ocsServer = OcsGrpcServer(PORT, ocsService.ocsGrpcService) val ocsState = OcsState() ocsState.msisdnToBundleIdMap[MSISDN] = BUNDLE_ID @@ -248,7 +250,7 @@ class OcsTest { // Producer:(OcsService, Subscriber) // -> Handler:(OcsState) // -> Handler:(OcsService, Subscriber) - disruptor.disruptor.handleEventsWith(ocsState).then(ocsService.asEventHandler()) + disruptor.disruptor.handleEventsWith(ocsState).then(ocsService.eventHandler) // start disruptor and ocs services. disruptor.start() @@ -268,7 +270,6 @@ class OcsTest { @AfterClass @JvmStatic - @Throws(InterruptedException::class, TimeoutException::class) fun tearDown() { disruptor.stop() ocsServer.forceStop() diff --git a/prime/src/integration-tests/kotlin/org/ostelco/prime/storage/firebase/FbConfigSetup.kt b/prime/src/integration-tests/kotlin/org/ostelco/prime/storage/firebase/FbConfigSetup.kt index 8621a4306..be9b92715 100644 --- a/prime/src/integration-tests/kotlin/org/ostelco/prime/storage/firebase/FbConfigSetup.kt +++ b/prime/src/integration-tests/kotlin/org/ostelco/prime/storage/firebase/FbConfigSetup.kt @@ -1,9 +1,7 @@ package org.ostelco.prime.storage.firebase fun initFirebaseConfigRegistry() { - val firebaseConfig = FirebaseConfig() - firebaseConfig.databaseName = "pantel-2decb" - firebaseConfig.configFile = "config/pantel-prod.json" - firebaseConfig.rootPath = "test" - FirebaseConfigRegistry.firebaseConfig = firebaseConfig + FirebaseConfigRegistry.firebaseConfig = FirebaseConfig( + configFile = "config/pantel-prod.json", + rootPath = "test") } \ No newline at end of file diff --git a/prime/src/integration-tests/kotlin/org/ostelco/prime/storage/firebase/FbStorageTest.kt b/prime/src/integration-tests/kotlin/org/ostelco/prime/storage/firebase/FbStorageTest.kt index 1fbf5716c..b0f85e96b 100644 --- a/prime/src/integration-tests/kotlin/org/ostelco/prime/storage/firebase/FbStorageTest.kt +++ b/prime/src/integration-tests/kotlin/org/ostelco/prime/storage/firebase/FbStorageTest.kt @@ -16,7 +16,6 @@ class FbStorageTest { private lateinit var prids: MutableCollection @Before - @Throws(InterruptedException::class) fun setUp() { initFirebaseConfigRegistry() this.storage = FirebaseStorage() diff --git a/prime/src/integration-tests/kotlin/org/ostelco/prime/storage/graph/Neo4jStorageTest.kt b/prime/src/integration-tests/kotlin/org/ostelco/prime/storage/graph/Neo4jStorageTest.kt index 9685a2420..bc960b646 100644 --- a/prime/src/integration-tests/kotlin/org/ostelco/prime/storage/graph/Neo4jStorageTest.kt +++ b/prime/src/integration-tests/kotlin/org/ostelco/prime/storage/graph/Neo4jStorageTest.kt @@ -31,7 +31,6 @@ class Neo4jStorageTest { private lateinit var storage: GraphStore @Before - @Throws(InterruptedException::class) fun setUp() { this.storage = Neo4jStore() @@ -61,7 +60,7 @@ class Neo4jStorageTest { { fail(it.message) }, { bundles -> assertEquals(RANDOM_NO_OF_BYTES_TO_USE_BY_REMAINING_MSISDN_TESTS, - bundles?.first { it.id == EPHERMERAL_EMAIL }?.balance) + bundles.firstOrNull { it.id == EPHERMERAL_EMAIL }?.balance) }) storage.updateBundle(Bundle(EPHERMERAL_EMAIL, 0)) @@ -69,7 +68,7 @@ class Neo4jStorageTest { { fail(it.message) }, { bundles -> assertEquals(0L, - bundles?.first { it.id == EPHERMERAL_EMAIL }?.balance) + bundles.firstOrNull { it.id == EPHERMERAL_EMAIL }?.balance) }) } diff --git a/prime/src/integration-tests/resources/config.yaml b/prime/src/integration-tests/resources/config.yaml index 6e4faf8eb..123175946 100644 --- a/prime/src/integration-tests/resources/config.yaml +++ b/prime/src/integration-tests/resources/config.yaml @@ -2,7 +2,6 @@ modules: - type: firebase config: configFile: config/pantel-prod.json - databaseName: pantel-2decb rootPath: test - type: neo4j config: @@ -26,7 +25,6 @@ modules: - type: firebase-app-notifier config: configFile: config/pantel-prod.json - databaseName: pantel-2decb - type: admin server: diff --git a/prime/src/main/kotlin/org/ostelco/prime/PrimeApplication.kt b/prime/src/main/kotlin/org/ostelco/prime/PrimeApplication.kt index e8ca34def..4cb268b29 100644 --- a/prime/src/main/kotlin/org/ostelco/prime/PrimeApplication.kt +++ b/prime/src/main/kotlin/org/ostelco/prime/PrimeApplication.kt @@ -17,7 +17,7 @@ class PrimeApplication : Application() { override fun initialize(bootstrap: Bootstrap) { bootstrap.configurationSourceProvider = SubstitutingSourceProvider( bootstrap.configurationSourceProvider, - EnvironmentVariableSubstitutor()) + EnvironmentVariableSubstitutor(false)) bootstrap.objectMapper.registerModule(KotlinModule()) bootstrap.addBundle(PrometheusBundle()) } diff --git a/prime/src/main/resources/boxfuse.yml b/prime/src/main/resources/boxfuse.yml deleted file mode 100644 index 04aaca9ce..000000000 --- a/prime/src/main/resources/boxfuse.yml +++ /dev/null @@ -1,14 +0,0 @@ -# server: -# applicationConnectors: -# - type: http -# port: 80 -# requestLog: -# appenders: [] -# logging: -# appenders: -# - type: console -# threshold: WARN - -eventProcessor: - databaseName: pantel-test - configFile: pantel-tests-1da9f050eea9.json diff --git a/pseudonym-server/README.md b/pseudonym-server/README.md index 5634def27..94aab5efb 100644 --- a/pseudonym-server/README.md +++ b/pseudonym-server/README.md @@ -1,55 +1,4 @@ -# Pseudonym Server - - #PROJECT_ID=pantel-2decb - export PROJECT_ID="$(gcloud config get-value project -q)" - export PSEUDONYM_VERSION="$(gradle properties -q | grep "version:" | awk '{print $2}' | tr -d '[:space:]')" - - -Build the Docker image (In the folder with Dockerfile) - - docker build -t gcr.io/${PROJECT_ID}/pseudonym-server:${PSEUDONYM_VERSION} . - -Push to the registry - - gcloud docker -- push gcr.io/${PROJECT_ID}/pseudonym-server:${PSEUDONYM_VERSION} - -Update the tag (version) of prime's docker image in `pseudonym-server.yaml`. - -Apply the deployment & service - - sed -e "s/PSEUDONYM_VERSION/$PSEUDONYM_VERSION/" pseudonym-server.yaml | kubectl apply -f - - - -Details of the deployment - - kubectl describe deployment pseudonym-server - kubectl get pods - - -Helper Commands - -Create cluster - - gcloud container clusters create private-cluster --scopes=default,bigquery,datastore,pubsub,sql,storage-rw --num-nodes=3 - -Delete cluster - - gcloud container clusters delete private-cluster - -Delete service - - kubectl delete service pseudonym-server-service - -Delete deployment - - kubectl delete deployment pseudonym-server - - -Container to test DNS - - kubectl run curl --image=radial/busyboxplus:curl -i --tty - nslookup pseudonym-server-service - curl pseudonym-server-service.default.svc.cluster.local/pseudonym/current/47333 +# Module Pseudonym Server SQL for joining dataconsumption and pseudonyms table @@ -61,7 +10,7 @@ SQL for joining dataconsumption and pseudonyms table [pantel-2decb:exported_pseudonyms.3ebcdc4a7ecc4cd385e82087e49b7b7b] as ps ON ps.msisdn = hc.msisdn -Login to gcr.io for pushing images +Login to eu.gcr.io for pushing images - docker login -u oauth2accesstoken -p "$(gcloud auth print-access-token)" https://gcr.io + docker login -u oauth2accesstoken -p "$(gcloud auth print-access-token)" https://eu.gcr.io diff --git a/pseudonym-server/build.gradle b/pseudonym-server/build.gradle index 926ea958c..eb1a06383 100644 --- a/pseudonym-server/build.gradle +++ b/pseudonym-server/build.gradle @@ -1,5 +1,5 @@ plugins { - id "org.jetbrains.kotlin.jvm" version "1.2.61" + id "org.jetbrains.kotlin.jvm" version "1.2.70" id "java-library" } @@ -7,15 +7,15 @@ dependencies { implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlinVersion" implementation project(':ocs-grpc-api') - implementation project(':prime-api') + implementation project(':prime-modules') implementation project(':model') implementation project(':analytics-grpc-api') implementation "io.dropwizard:dropwizard-client:$dropwizardVersion" implementation "com.google.guava:guava:$guavaVersion" // Match with grpc-netty-shaded via PubSub - // removing io.grpc:grpc-netty-shaded:1.14.0 causes ALPN error - implementation 'io.grpc:grpc-netty-shaded:1.14.0' + // removing io.grpc:grpc-netty-shaded causes ALPN error + implementation "io.grpc:grpc-netty-shaded:$grpcVersion" implementation "com.google.cloud:google-cloud-bigquery:$googleCloudVersion" implementation "com.google.cloud:google-cloud-datastore:$googleCloudVersion" implementation "com.google.cloud:google-cloud-pubsub:$googleCloudVersion" diff --git a/pseudonym-server/cloudbuild.yaml b/pseudonym-server/cloudbuild.yaml deleted file mode 100644 index 547fdec77..000000000 --- a/pseudonym-server/cloudbuild.yaml +++ /dev/null @@ -1,24 +0,0 @@ -steps: -# Check if branch is master [using same script as prime] - - name: gcr.io/$PROJECT_ID/ubuntu-git - args: ['bash', 'pseudonym-server/script/check_repo.sh', 'pseudonym-server', '$TAG_NAME', '$BRANCH_NAME'] -# Gradle clean and build pseudonym-server & its dependencies only - - name: gcr.io/cloud-builders/gradle - args: ['clean', 'pseudonym-server:build'] - timeout: 600s -# Build docker images - - name: gcr.io/cloud-builders/docker - args: ['build', '--tag=gcr.io/$PROJECT_ID/pseudonym-server:$TAG_NAME', 'pseudonym-server'] - timeout: 120s -# Deploy new docker image to Google Kubernetes Engine (GKE) - - name: ubuntu - args: ['sed', '-i', 's/PSEUDONYM_VERSION/$TAG_NAME/g', 'pseudonym-server/pseudonym-server.yaml'] - - name: gcr.io/cloud-builders/kubectl - args: ['apply', '-f', 'pseudonym-server/pseudonym-server.yaml'] - env: - - 'CLOUDSDK_COMPUTE_ZONE=europe-west1-b' - - 'CLOUDSDK_CONTAINER_CLUSTER=private-cluster' - timeout: 300s - -# Upload docker image into Google Container Registry (GCR) -images: ['gcr.io/$PROJECT_ID/pseudonym-server:$TAG_NAME'] diff --git a/pseudonym-server/pseudonym-server-service.yaml b/pseudonym-server/pseudonym-server-service.yaml deleted file mode 100644 index 69404d4c8..000000000 --- a/pseudonym-server/pseudonym-server-service.yaml +++ /dev/null @@ -1,18 +0,0 @@ -apiVersion: v1 -kind: Service -metadata: - name: pseudonym-server-service - labels: - app: pseudonym - tier: backend -spec: - # if your cluster supports it, uncomment the following to automatically create - # an external load-balanced IP for the frontend service. - # type: LoadBalancer - ports: - - protocol: TCP - port: 80 - targetPort: 8080 - selector: - app: pseudonym - tier: backend diff --git a/pseudonym-server/pseudonym-server.yaml b/pseudonym-server/pseudonym-server.yaml deleted file mode 100644 index f31e72d09..000000000 --- a/pseudonym-server/pseudonym-server.yaml +++ /dev/null @@ -1,20 +0,0 @@ -apiVersion: extensions/v1beta1 -kind: Deployment -metadata: - name: pseudonym-server - labels: - app: pseudonym - tier: backend -spec: - replicas: 2 - template: - metadata: - labels: - app: pseudonym - tier: backend - spec: - containers: - - name: pseudonym-server - image: gcr.io/pantel-2decb/pseudonym-server:PSEUDONYM_VERSION - ports: - - containerPort: 8080 diff --git a/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/PseudonymModule.kt b/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/PseudonymModule.kt index 2a79b6e80..e4849d0fa 100644 --- a/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/PseudonymModule.kt +++ b/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/PseudonymModule.kt @@ -31,4 +31,5 @@ object ConfigRegistry { */ class PseudonymServerConfig : Configuration() { var datastoreType = "default" + var namespace = "" } \ No newline at end of file diff --git a/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymExport.kt b/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymExport.kt index 242c7d0dd..8ed1adea1 100644 --- a/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymExport.kt +++ b/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymExport.kt @@ -16,20 +16,22 @@ import com.google.cloud.datastore.Query import com.google.cloud.datastore.StructuredQuery import com.google.common.cache.Cache import com.google.common.cache.CacheBuilder -import org.ostelco.pseudonym.ExportTaskKind -import org.ostelco.pseudonym.MsisdnPseudonymEntityKind -import org.ostelco.pseudonym.errorPropertyName -import org.ostelco.pseudonym.exportIdPropertyName -import org.ostelco.pseudonym.msisdnPropertyName -import org.ostelco.pseudonym.pseudonymPropertyName -import org.ostelco.pseudonym.statusPropertyName +import org.apache.commons.codec.binary.Hex +import org.ostelco.prime.model.Subscriber +import org.ostelco.prime.model.Subscription +import org.ostelco.prime.module.getResource +import org.ostelco.prime.storage.AdminDataSource +import org.ostelco.pseudonym.* import org.slf4j.LoggerFactory -import java.util.* +import java.net.URLEncoder +import java.security.MessageDigest +import java.util.UUID private const val datasetName = "exported_pseudonyms" -private const val msisdnFieldName = "msisdn" -private const val pseudonymFiledName = "pseudonym" -private const val idFieldName = "msisdnid" +private const val consumptionDatasetName = "exported_data_consumption" + +private const val idFieldName = "pseudoid" +private const val msisdnIdPropertyName = "msisdnId" /** * Exports pseudonym objects to a bigquery Table @@ -44,92 +46,50 @@ class PseudonymExport(private val exportId: String, private val bigquery: BigQue INITIAL, RUNNING, FINISHED, ERROR } - private val tableName: String = exportId.replace("-", "") - private val idCache: Cache = CacheBuilder.newBuilder() - .maximumSize(5000) - .build() private var status = Status.INITIAL private var error: String = "" + private val randomKey = "$exportId-${UUID.randomUUID()}" + private val msisdnExporter: DS2BQExporter = DS2BQExporter( + tableName = tableName("msisdn"), + sourceEntity = MsisdnPseudonymEntityKind, + sourceField = msisdnPropertyName, + datasetName = datasetName, + randomKey = randomKey, + datastore = datastore, + bigquery = bigquery) + private val subscriberIdExporter: DS2BQExporter = DS2BQExporter( + tableName = tableName("subscriber"), + sourceEntity = SubscriberIdPseudonymEntityKind, + sourceField = subscriberIdPropertyName, + datasetName = datasetName, + randomKey = randomKey, + datastore = datastore, + bigquery = bigquery) + private val msisdnMappingExporter: SubscriberMsisdnMappingExporter = SubscriberMsisdnMappingExporter( + tableName = tableName("sub2msisdn"), + msisdnExporter = msisdnExporter, + subscriberIdExporter = subscriberIdExporter, + datasetName = consumptionDatasetName, + bigquery = bigquery) init { upsertTaskStatus() } - private fun createTable(): Table { - // Delete existing table - val deleted = bigquery.delete(datasetName, tableName) - if (deleted) { - logger.info("Existing table deleted.") - } - val tableId = TableId.of(datasetName, tableName) - // Table field definition - val id = Field.of(idFieldName, LegacySQLTypeName.STRING) - val pseudonym = Field.of(pseudonymFiledName, LegacySQLTypeName.STRING) - val msisdn = Field.of(msisdnFieldName, LegacySQLTypeName.STRING) - // Table schema definition - val schema = Schema.of(id, pseudonym, msisdn) - val tableDefinition = StandardTableDefinition.of(schema) - val tableInfo = TableInfo.newBuilder(tableId, tableDefinition).build() - return bigquery.create(tableInfo) - } - - private fun getIdForMsisdn(msisdn: String): String { - // Retrieves the element from cache. - // Incase of cache miss, generate a new UUID - return idCache.get(msisdn) { UUID.randomUUID().toString() } - } - - private fun createTablePage(pageSize: Int, cursor: Cursor?, table: Table): Cursor? { - // Dump pseudonyms to BQ, one page at a time. Since all records in a - // page are inserted at once, use a small page size - val queryBuilder = Query.newEntityQueryBuilder() - .setKind(MsisdnPseudonymEntityKind) - .setOrderBy(StructuredQuery.OrderBy.asc(msisdnPropertyName)) - .setLimit(pageSize) - if (cursor != null) { - queryBuilder.setStartCursor(cursor) - } - val rows = ArrayList() - val pseudonyms = datastore.run(queryBuilder.build()) - var totalPseudonyms = 0 - while (pseudonyms.hasNext()) { - val entity = pseudonyms.next() - totalPseudonyms++ - val row = hashMapOf( - msisdnFieldName to entity.getString(msisdnPropertyName), - pseudonymFiledName to entity.getString(pseudonymPropertyName), - idFieldName to getIdForMsisdn(entity.getString(msisdnPropertyName))) - val rowId = "rowId$totalPseudonyms" - rows.add(RowToInsert.of(rowId, row)) - } - if (totalPseudonyms != 0) { - val response = table.insert(rows, true, true) - if (response.hasErrors()) { - logger.error("Failed to insert Records", response.insertErrors) - error = "$error${response.insertErrors}\n" - } - } - return if (totalPseudonyms < pageSize) { - null - } else { - pseudonyms.cursorAfter - } - } + private fun tableName(suffix: String) = "${exportId.replace("-", "")}_$suffix" private fun start() { logger.info("Starting to export Pseudonyms for $exportId") status = Status.RUNNING upsertTaskStatus() - val table = createTable() - var cursor: Cursor? = null - do { - cursor = createTablePage(100, cursor, table) - } while (cursor != null) + msisdnExporter.doExport() + subscriberIdExporter.doExport() + msisdnMappingExporter.doExport() if (status == Status.RUNNING) { status = Status.FINISHED upsertTaskStatus() } - logger.info("Exported Pseudonyms for $exportId") + logger.info("Exported msisdn and subscriber pseudonyms for $exportId") } /** @@ -169,4 +129,182 @@ class PseudonymExport(private val exportId: String, private val bigquery: BigQue } } -} \ No newline at end of file + +} + +/** + * Class for exporting Datastore tables to BigQuery. + */ +class DS2BQExporter( + tableName: String, + private val sourceEntity: String, + private val sourceField: String, + datasetName: String, + private val randomKey: String, + private val datastore: Datastore, + bigquery: BigQuery): BQExporter(tableName, randomKey, datasetName, bigquery) { + + override val logger = LoggerFactory.getLogger(DS2BQExporter::class.java) + private val idCache: Cache = CacheBuilder.newBuilder() + .maximumSize(5000) + .build() + private val digest = MessageDigest.getInstance("SHA-256") + + override fun getSchema(): Schema { + val id = Field.of(idFieldName, LegacySQLTypeName.STRING) + val pseudonym = Field.of(pseudonymPropertyName, LegacySQLTypeName.STRING) + val source = Field.of(sourceField, LegacySQLTypeName.STRING) + return Schema.of(id, pseudonym, source) + } + + fun getIdForKey(key: String): String { + // Retrieves the element from cache. + // Incase of cache miss, generate a new SHA + return idCache.get(key) { + val keyString: String = "$randomKey-$key" + val hash = digest.digest(keyString.toByteArray(Charsets.UTF_8)) + String(Hex.encodeHex(hash)) + } + } + + fun exportPage(pageSize: Int, cursor: Cursor?, table: Table): Cursor? { + // Dump pseudonyms to BQ, one page at a time. Since all records in a + // page are inserted at once, use a small page size + val queryBuilder = Query.newEntityQueryBuilder() + .setKind(sourceEntity) + .setOrderBy(StructuredQuery.OrderBy.asc(sourceField)) + .setLimit(pageSize) + if (cursor != null) { + queryBuilder.setStartCursor(cursor) + } + val rows = ArrayList() + val pseudonyms = datastore.run(queryBuilder.build()) + while (pseudonyms.hasNext()) { + val entity = pseudonyms.next() + totalRows++ + val row = hashMapOf( + sourceField to entity.getString(sourceField), + pseudonymPropertyName to entity.getString(pseudonymPropertyName), + idFieldName to getIdForKey(entity.getString(sourceField))) + val rowId = "rowId$totalRows" + rows.add(RowToInsert.of(rowId, row)) + } + insertToBq(table, rows) + return if (rows.size < pageSize) { + null + } else { + pseudonyms.cursorAfter + } + } + + /** + * Export the Datastore table to BQ. + * This is done in pages of 100 records. + */ + override fun doExport() { + logger.info("Starting export to ${tableName}") + val table = createTable() + var cursor: Cursor? = null + do { + cursor = exportPage(100, cursor, table) + } while (cursor != null) + logger.info("Exported ${totalRows} rows to ${tableName}") + } +} + + +/** + * Class for exporting Subscriber -> Msisidn mapping. + */ +class SubscriberMsisdnMappingExporter( + tableName: String, + private val msisdnExporter: DS2BQExporter, + private val subscriberIdExporter: DS2BQExporter, + datasetName: String, + bigquery: BigQuery) : + BQExporter(tableName, "", datasetName, bigquery) { + + private val storage by lazy { getResource() } + override val logger = LoggerFactory.getLogger(SubscriberMsisdnMappingExporter::class.java) + + override fun getSchema(): Schema { + val subscriberId = Field.of(subscriberIdPropertyName, LegacySQLTypeName.STRING) + val msisdnId = Field.of(msisdnIdPropertyName, LegacySQLTypeName.STRING) + return Schema.of(subscriberId, msisdnId) + } + + private fun exportAllPages(table: Table, pageSize: Int) { + // Dump pseudonyms to BQ, one page at a time. Since all records in a + // page are inserted at once, use a small page size + val map: Map = storage.getSubscriberToMsisdnMap() + var rows = ArrayList() + for ((subscriber, subscription) in map) { + val encodedSubscriberId = URLEncoder.encode(subscriber.email, "UTF-8") + totalRows++ + val row = hashMapOf( + msisdnIdPropertyName to msisdnExporter.getIdForKey(subscription.msisdn), + subscriberIdPropertyName to subscriberIdExporter.getIdForKey(encodedSubscriberId)) + val rowId = "rowId$totalRows" + rows.add(RowToInsert.of(rowId, row)) + if (rows.size == pageSize) { + // Insert current page to BQ + insertToBq(table, rows) + // Reset rows array. + rows = ArrayList() + } + } + // Insert remaining rows to BQ + insertToBq(table, rows) + } + + /** + * Export all subscription mapping to BQ. + * This is done in pages of 100 records. + */ + override fun doExport() { + logger.info("Starting export to ${tableName}") + val table = createTable() + exportAllPages(table, 100) + logger.info("Exported ${totalRows} rows to ${tableName}") + } +} + +/** + * Class for exporting Subscriber -> Msisidn mapping. + */ +abstract class BQExporter( + val tableName: String, + private val randomKey: String, + private val datasetName: String, + private val bigquery: BigQuery) { + + open val logger = LoggerFactory.getLogger(BQExporter::class.java) + var error: String = "" + var totalRows = 0 + + fun createTable(): Table { + // Delete existing table + val deleted = bigquery.delete(datasetName, tableName) + if (deleted) { + logger.info("Existing table '$tableName' deleted.") + } + val tableId = TableId.of(datasetName, tableName) + val schema = getSchema() + val tableDefinition = StandardTableDefinition.of(schema) + val tableInfo = TableInfo.newBuilder(tableId, tableDefinition).build() + return bigquery.create(tableInfo) + } + + fun insertToBq(table: Table, rows: ArrayList) { + if (rows.size != 0) { + val response = table.insert(rows, true, true) + if (response.hasErrors()) { + logger.error("Failed to insert Records to '$tableName'", response.insertErrors) + error = "$error${response.insertErrors}\n" + } + } + } + + abstract fun getSchema(): Schema + abstract fun doExport() +} diff --git a/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymizerServiceSingleton.kt b/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymizerServiceSingleton.kt index e82932c4c..54c148987 100644 --- a/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymizerServiceSingleton.kt +++ b/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymizerServiceSingleton.kt @@ -1,20 +1,17 @@ package org.ostelco.pseudonym.service import com.codahale.metrics.health.HealthCheck +import com.google.cloud.NoCredentials import com.google.cloud.bigquery.BigQuery import com.google.cloud.bigquery.BigQueryOptions -import com.google.cloud.datastore.Datastore -import com.google.cloud.datastore.DatastoreOptions -import com.google.cloud.datastore.Entity -import com.google.cloud.datastore.Key -import com.google.cloud.datastore.Query +import com.google.cloud.datastore.* import com.google.cloud.datastore.StructuredQuery.PropertyFilter import com.google.cloud.datastore.testing.LocalDatastoreHelper import com.google.cloud.http.HttpTransportOptions import com.google.common.cache.Cache import com.google.common.cache.CacheBuilder import io.dropwizard.setup.Environment -import org.ostelco.prime.logger +import org.ostelco.prime.getLogger import org.ostelco.prime.model.ActivePseudonyms import org.ostelco.prime.model.PseudonymEntity import org.ostelco.prime.pseudonymizer.PseudonymizerService @@ -52,7 +49,7 @@ interface DateBounds { object PseudonymizerServiceSingleton : PseudonymizerService { - private val logger by logger() + private val logger by getLogger() private lateinit var datastore: Datastore private var bigQuery: BigQuery? = null @@ -62,10 +59,10 @@ object PseudonymizerServiceSingleton : PseudonymizerService { private val subscriberIdPseudonymiser: Pseudonymizer = Pseudonymizer(SubscriberIdPseudonymEntityKind, subscriberIdPropertyName) private val executor = Executors.newFixedThreadPool(3) - val msisdnPseudonymCache: Cache = CacheBuilder.newBuilder() + private val msisdnPseudonymCache: Cache = CacheBuilder.newBuilder() .maximumSize(5000) .build() - val subscriberIdPseudonymCache: Cache = CacheBuilder.newBuilder() + private val subscriberIdPseudonymCache: Cache = CacheBuilder.newBuilder() .maximumSize(5000) .build() @@ -143,12 +140,16 @@ object PseudonymizerServiceSingleton : PseudonymizerService { DatastoreOptions .newBuilder() .setHost("localhost:9090") + .setCredentials(NoCredentials.getInstance()) .setTransportOptions(HttpTransportOptions.newBuilder().build()) .build() } else -> { logger.info("Created default instance of datastore client") - DatastoreOptions.getDefaultInstance() + DatastoreOptions + .newBuilder() + .setNamespace(ConfigRegistry.config.namespace) + .build() } }.service @@ -191,7 +192,7 @@ object PseudonymizerServiceSingleton : PseudonymizerService { class Pseudonymizer(val entityKind: String, val sourcePropertyName: String) { - private val logger by logger() + private val logger by getLogger() private lateinit var datastore: Datastore private var bigQuery: BigQuery? = null private lateinit var dateBounds: DateBounds diff --git a/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/utils/DateUtils.kt b/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/utils/DateUtils.kt index 0c201f843..7c3fbdf7f 100644 --- a/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/utils/DateUtils.kt +++ b/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/utils/DateUtils.kt @@ -10,7 +10,7 @@ import java.util.* */ class WeeklyBounds : DateBounds { private val timeZone = TimeZone.getTimeZone("UTC") - private val locale = java.util.Locale.UK + private val locale = java.util.Locale.UK /** * Returns the boundaries for the week of the given timestamp. */ diff --git a/scripts/distribute-pantel-secrets.sh b/scripts/distribute-pantel-secrets.sh new file mode 100755 index 000000000..a69d5b0ee --- /dev/null +++ b/scripts/distribute-pantel-secrets.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +# This script finds directories where pantel-prod.json is gitignored in and copies +# the PANTEL_SECRETS_FILE environment variable into these directories. +# These files are needed for the docker compose acceptance tests. + +#### sanity check +if [ -z "${PANTEL_SECRETS_FILE}" ] ; then + echo "ERROR: PANTEL_SECRETS_FILE env var is empty. Aborting!" + exit 1 +fi +#### + +echo; echo "======> Creating pantel-prod.json file, using the env variable PANTEL_SECRETS_FILE" +for LOCATION in $(find . -name .gitignore -exec grep pantel-prod.json '{}' '+' ); do + DIR_NAME=$(dirname $LOCATION) + echo "Creating secrets file: ${DIR_NAME}/pantel-prod.json ..." + echo ${PANTEL_SECRETS_FILE} | base64 -d > ${DIR_NAME}/pantel-prod.json + ls -l ${DIR_NAME}/pantel-prod.json +done +echo '' diff --git a/scripts/generate-codacy-coverage.sh b/scripts/generate-codacy-coverage.sh new file mode 100755 index 000000000..e5a29d34d --- /dev/null +++ b/scripts/generate-codacy-coverage.sh @@ -0,0 +1,42 @@ +#!/bin/bash + +# This script finds all instances of jacocoTestReport.xml and generates a code coverage report +# and uploads it to Codacy wherever that file is found. This script needs a set of environment +# variables which are defined in the pipeline config (.circleci/config.yml). + +#### sanity checks +if [ -z "${CODACY_JAR_FILE}" ]; then + echo "ERROR: CODACY_JAR_FILE is not set in the environment! Aborting!" + exit 1 +fi + +if [ -z "${CODACY_MODULE}" ]; then + echo "ERROR: CODACY_MODULE is not set in the environment! Aborting!" + exit 1 +fi +#### + +REPORT_TARGETS=$(find . -name jacocoTestReport.xml) + +if [ -n "${REPORT_TARGETS}" ]; then + echo "Found 'jacocoTestReport.xml' file under 'build' directories in the following modules," + echo ", implying - tests were run for them." + echo "${REPORT_TARGETS}" ; echo +else + echo "There were no 'build' directories found under each module." + echo "This means tests were not run in the previous build job." +fi + +for REPORT_TARGET in ${REPORT_TARGETS}; do + echo "======> Processing code-coverage report for ======> ${REPORT_TARGET} <======" + java -cp ~/${CODACY_JAR_FILE} ${CODACY_MODULE} report -l Java -r ${REPORT_TARGET} --partial +done + +if [ -n "${REPORT_TARGETS}" ]; then + echo "======> Uploading final code-coverage report to CODACY website. <======" + java -cp ~/${CODACY_JAR_FILE} ${CODACY_MODULE} final +else + echo "There were no 'jacocoTestReport.xml' files found under 'build' directories in each module." + echo "This means tests were not run in the previous build job." + echo "... so, not uploading any code-coverage reports to CODACY website. " +fi diff --git a/scripts/generate-selfsigned-ssl-certs.sh b/scripts/generate-selfsigned-ssl-certs.sh new file mode 100755 index 000000000..67e1e2a0c --- /dev/null +++ b/scripts/generate-selfsigned-ssl-certs.sh @@ -0,0 +1,40 @@ +#!/bin/bash + +# This script generates a slefsigned SSL certificate for a given input domain. + +#### input +DOMAIN_NAME=$1 +#### + +#### sanity check +if [ -z "${DOMAIN_NAME}" ]; then + echo "ERROR: No domain-name was provided in input. Aborting!" + exit 1 +fi +#### + +# Real path is not on every linux distribution. + +# SCRIPT_REAL_PATH=$(dirname $(realpath $0)) +SCRIPT_REAL_PATH="$( cd "$(dirname "$0")" ; pwd -P )" + +pushd ${SCRIPT_REAL_PATH} + +CERTS_DIR=../certs/${DOMAIN_NAME} + +if [ -d ${CERTS_DIR} ]; then + echo "Found the matching domain in certs. Generating SSL certs for domain ${DOMAIN_NAME} in ${CERTS_DIR} ..." + + openssl req -x509 -nodes -days 365 -newkey rsa:2048 \ + -keyout ${CERTS_DIR}/nginx.key \ + -out ${CERTS_DIR}/nginx.crt \ + -subj "/CN=${DOMAIN_NAME}" + + echo "Here are the generated certs in ${CERTS_DIR} ..." + ls -l ${CERTS_DIR} + +else + echo "Could not find a matching domain name in certs for ${DOMAIN_NAME}" +fi + +popd diff --git a/settings.gradle b/settings.gradle index a7ee39d4c..50b942e94 100644 --- a/settings.gradle +++ b/settings.gradle @@ -13,6 +13,7 @@ include ':diameter-stack' include ':diameter-test' include ':ext-auth-provider' include ':firebase-store' +include ':firebase-extensions' include ':model' include ':neo4j-admin-tools' include ':neo4j-store' @@ -22,7 +23,7 @@ include ':ocsgw' include ':ostelco-lib' include ':payment-processor' include ':prime' -include ':prime-api' +include ':prime-modules' include ':prime-client-api' include ':pseudonym-server' @@ -40,6 +41,7 @@ project(':diameter-stack').projectDir = "$rootDir/diameter-stack" as File project(':diameter-test').projectDir = "$rootDir/diameter-test" as File project(':ext-auth-provider').projectDir = "$rootDir/ext-auth-provider" as File project(':firebase-store').projectDir = "$rootDir/firebase-store" as File +project(':firebase-extensions').projectDir = "$rootDir/firebase-extensions" as File project(':model').projectDir = "$rootDir/model" as File project(':neo4j-admin-tools').projectDir = "$rootDir/tools/neo4j-admin-tools" as File project(':neo4j-store').projectDir = "$rootDir/neo4j-store" as File @@ -49,6 +51,6 @@ project(':ocsgw').projectDir = "$rootDir/ocsgw" as File project(':ostelco-lib').projectDir = "$rootDir/ostelco-lib" as File project(':payment-processor').projectDir = "$rootDir/payment-processor" as File project(':prime').projectDir = "$rootDir/prime" as File -project(':prime-api').projectDir = "$rootDir/prime-api" as File +project(':prime-modules').projectDir = "$rootDir/prime-modules" as File project(':prime-client-api').projectDir = "$rootDir/prime-client-api" as File project(':pseudonym-server').projectDir = "$rootDir/pseudonym-server" as File diff --git a/tools/neo4j-admin-tools/build.gradle b/tools/neo4j-admin-tools/build.gradle index 362e4206b..017a5f34c 100644 --- a/tools/neo4j-admin-tools/build.gradle +++ b/tools/neo4j-admin-tools/build.gradle @@ -1,23 +1,23 @@ plugins { - id "org.jetbrains.kotlin.jvm" version "1.2.61" + id "org.jetbrains.kotlin.jvm" version "1.2.70" id "application" id "com.github.johnrengelman.shadow" version "2.0.4" id "idea" } -ext.neo4jDriverVersion="1.6.1" +ext.neo4jDriverVersion="1.6.3" dependencies { implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlinVersion" - implementation project(":prime-api") + implementation project(":prime-modules") implementation project(":firebase-store") implementation "org.neo4j.driver:neo4j-java-driver:$neo4jDriverVersion" } shadowJar { - mainClassName = 'org.ostelco.tools.migration.MainKt' + mainClassName = 'org.ostelco.tools.migration.MainKtKt' mergeServiceFiles() classifier = "uber" version = null diff --git a/tools/neo4j-admin-tools/src/main/kotlin/org/ostelco/tools/migration/FirebaseExporter.kt b/tools/neo4j-admin-tools/src/main/kotlin/org/ostelco/tools/migration/FirebaseExporter.kt index eb2ae96f7..fb0bae674 100644 --- a/tools/neo4j-admin-tools/src/main/kotlin/org/ostelco/tools/migration/FirebaseExporter.kt +++ b/tools/neo4j-admin-tools/src/main/kotlin/org/ostelco/tools/migration/FirebaseExporter.kt @@ -8,11 +8,9 @@ import org.ostelco.prime.storage.firebase.FirebaseConfig import org.ostelco.prime.storage.firebase.FirebaseConfigRegistry fun initFirebase() { - val config = FirebaseConfig() - config.configFile = "../../prime/config/pantel-prod.json" - config.databaseName = "pantel-2decb" - config.rootPath = "v2" - FirebaseConfigRegistry.firebaseConfig = config + FirebaseConfigRegistry.firebaseConfig = FirebaseConfig( + configFile = "../../prime/config/pantel-prod.json", + rootPath = "v2") } // Code moved here from FirebaseStorageSingleton diff --git a/tools/neo4j-admin-tools/src/main/kotlin/org/ostelco/tools/migration/Neo4jExporter.kt b/tools/neo4j-admin-tools/src/main/kotlin/org/ostelco/tools/migration/Neo4jExporter.kt index 914ff70f0..665aa8993 100644 --- a/tools/neo4j-admin-tools/src/main/kotlin/org/ostelco/tools/migration/Neo4jExporter.kt +++ b/tools/neo4j-admin-tools/src/main/kotlin/org/ostelco/tools/migration/Neo4jExporter.kt @@ -12,7 +12,7 @@ fun importFromNeo4j(txn: Transaction, handleCypher: (String) -> Unit) { val node = record["n"].asNode() val labels = node.labels().joinToString(separator = "", prefix = ":") - val props = node.asMap().map { entry -> + val props = node.asMap().toSortedMap().map { entry -> "`${entry.key}`: '${entry.value}'" }.joinToString(separator = ",\n") @@ -29,7 +29,7 @@ fun importFromNeo4j(txn: Transaction, handleCypher: (String) -> Unit) { val type = relation.type() - var props = relation.asMap().map { entry -> + var props = relation.asMap().toSortedMap().map { entry -> "`${entry.key}`: '${entry.value}'" }.joinToString(separator = ",\n") diff --git a/tools/neo4j-admin-tools/src/main/resources/init.cypher b/tools/neo4j-admin-tools/src/main/resources/init.cypher index ff2a6e415..d6bbbf15d 100644 --- a/tools/neo4j-admin-tools/src/main/resources/init.cypher +++ b/tools/neo4j-admin-tools/src/main/resources/init.cypher @@ -1,74 +1,82 @@ // Create product -CREATE(node:Product {id: '1GB_249NOK', - `sku`: '1GB_249NOK', - `price/amount`: '24900', - `price/currency`: 'NOK', - `properties/noOfBytes`: '1_000_000_000', - `presentation/isDefault`: 'true', - `presentation/offerLabel`: 'Default Offer', - `presentation/priceLabel`: '249 NOK', - `presentation/productLabel`: '+1GB'}); +CREATE (:Product {`id`: '1GB_249NOK', + `presentation/isDefault`: 'true', + `presentation/offerLabel`: 'Default Offer', + `presentation/priceLabel`: '249 NOK', + `presentation/productLabel`: '+1GB', + `price/amount`: '24900', + `price/currency`: 'NOK', + `properties/noOfBytes`: '1_000_000_000', + `sku`: '1GB_249NOK'}); -CREATE(node:Product {id: '2GB_299NOK', - `sku`: '2GB_299NOK', - `price/amount`: '29900', - `price/currency`: 'NOK', - `properties/noOfBytes`: '2_000_000_000', - `presentation/offerLabel`: 'Monday Special', - `presentation/priceLabel`: '299 NOK', - `presentation/productLabel`: '+2GB'}); +CREATE (:Product {`id`: '2GB_299NOK', + `presentation/offerLabel`: 'Monday Special', + `presentation/priceLabel`: '299 NOK', + `presentation/productLabel`: '+2GB', + `price/amount`: '29900', + `price/currency`: 'NOK', + `properties/noOfBytes`: '2_000_000_000', + `sku`: '2GB_299NOK'}); -CREATE(node:Product {id: '3GB_349NOK', - `sku`: '3GB_349NOK', - `price/amount`: '34900', - `price/currency`: 'NOK', - `properties/noOfBytes`: '3_000_000_000', - `presentation/offerLabel`: 'Monday Special', - `presentation/priceLabel`: '349 NOK', - `presentation/productLabel`: '+3GB'}); +CREATE (:Product {`id`: '3GB_349NOK', + `presentation/offerLabel`: 'Monday Special', + `presentation/priceLabel`: '349 NOK', + `presentation/productLabel`: '+3GB', + `price/amount`: '34900', + `price/currency`: 'NOK', + `properties/noOfBytes`: '3_000_000_000', + `sku`: '3GB_349NOK'}); -CREATE(node:Product {id: '5GB_399NOK', - `sku`: '5GB_399NOK', - `price/amount`: '39900', - `price/currency`: 'NOK', - `properties/noOfBytes`: '5_000_000_000', - `presentation/offerLabel`: 'Weekend Special', - `presentation/priceLabel`: '399 NOK', - `presentation/productLabel`: '+5GB'}); +CREATE (:Product {`id`: '5GB_399NOK', + `presentation/offerLabel`: 'Weekend Special', + `presentation/priceLabel`: '399 NOK', + `presentation/productLabel`: '+5GB', + `price/amount`: '39900', + `price/currency`: 'NOK', + `properties/noOfBytes`: '5_000_000_000', + `sku`: '5GB_399NOK'}); -CREATE(node:Product {id: '100MB_FREE_ON_JOINING', - `sku`: '100MB_FREE_ON_JOINING', - `price/amount`: '0', - `price/currency`: 'NOK', - `properties/noOfBytes`: '100_000_000', - `presentation/priceLabel`: 'Free', - `presentation/productLabel`: '100MB Welcome Pack'}); +CREATE (:Product {`id`: '100MB_FREE_ON_JOINING', + `presentation/priceLabel`: 'Free', + `presentation/productLabel`: '100MB Welcome Pack', + `price/amount`: '0', + `price/currency`: 'NOK', + `properties/noOfBytes`: '100_000_000', + `sku`: '100MB_FREE_ON_JOINING'}); -CREATE(node:Product {id: '1GB_FREE_ON_REFERRED', - `sku`: '1GB_FREE_ON_REFERRED', - `price/amount`: '0', - `price/currency`: 'NOK', - `properties/noOfBytes`: '1_000_000_000', - `presentation/priceLabel`: 'Free', - `presentation/productLabel`: '1GB Referral Pack'}); +CREATE (:Product {`id`: '1GB_FREE_ON_REFERRED', + `presentation/priceLabel`: 'Free', + `presentation/productLabel`: '1GB Referral Pack', + `price/amount`: '0', + `price/currency`: 'NOK', + `properties/noOfBytes`: '1_000_000_000', + `sku`: '1GB_FREE_ON_REFERRED'}); +CREATE (:Segment {`id`: 'all'}); -// Create Segment -CREATE (node:Segment {id: 'all'}); +CREATE (:Offer {`id`: 'default_offer'}); -// Create Offer -CREATE (node:Offer {id: 'default_offer'}); +MATCH (n:Offer {id: 'default_offer'}) +WITH n +MATCH (m:Product {id: '1GB_249NOK'}) +CREATE (n)-[:OFFER_HAS_PRODUCT]->(m); -// Add Segment to Offer -MATCH (to:Segment) - WHERE to.id IN ['all'] -WITH to -MATCH (from:Offer {id: 'default_offer'}) -CREATE (from)-[:offerHasSegment]->(to); +MATCH (n:Offer {id: 'default_offer'}) +WITH n +MATCH (m:Product {id: '2GB_299NOK'}) +CREATE (n)-[:OFFER_HAS_PRODUCT]->(m); -// Add Product to Offer -MATCH (to:Product) - WHERE to.id IN ['1GB_249NOK', '2GB_299NOK', '3GB_349NOK', '5GB_399NOK'] -WITH to -MATCH (from:Offer {id: 'default_offer'}) -CREATE (from)-[:offerHasProduct]->(to); \ No newline at end of file +MATCH (n:Offer {id: 'default_offer'}) +WITH n +MATCH (m:Product {id: '3GB_349NOK'}) +CREATE (n)-[:OFFER_HAS_PRODUCT]->(m); + +MATCH (n:Offer {id: 'default_offer'}) +WITH n +MATCH (m:Product {id: '5GB_399NOK'}) +CREATE (n)-[:OFFER_HAS_PRODUCT]->(m); + +MATCH (n:Offer {id: 'default_offer'}) +WITH n +MATCH (m:Segment {id: 'all'}) +CREATE (n)-[:OFFERED_TO_SEGMENT]->(m); \ No newline at end of file