From 9478a869a3c93350b308f8a2da9926f7cdd9a968 Mon Sep 17 00:00:00 2001 From: havard Date: Mon, 27 Aug 2018 22:00:38 +0530 Subject: [PATCH 01/78] feat(monitoring): add initial deployment spec --- prime/infra/README.md | 35 +++ prime/infra/dev/monitoring-cluster-role.yaml | 33 +++ prime/infra/dev/monitoring-pushgateway.yaml | 30 +++ prime/infra/dev/monitoring.yaml | 255 +++++++++++++++++++ 4 files changed, 353 insertions(+) create mode 100644 prime/infra/dev/monitoring-cluster-role.yaml create mode 100644 prime/infra/dev/monitoring-pushgateway.yaml create mode 100644 prime/infra/dev/monitoring.yaml diff --git a/prime/infra/README.md b/prime/infra/README.md index eb6c136ff..e92b56680 100644 --- a/prime/infra/README.md +++ b/prime/infra/README.md @@ -239,6 +239,41 @@ gcloud endpoints services deploy prime/infra/dev/prime-client-api.yaml ## Deploy to Dev cluster +### Deploy monitoring +```bash +# Create namespace if it does not already exist +kubectl create namespace monitoring + +# Not sure what this does, but should probably have a description +kubectl create clusterrolebinding cluster-admin-binding --clusterrole cluster-admin --user $(gcloud config get-value account) + +kubectl create -f prime/infra/dev/monitoring-cluster-role.yaml + +# config map, prometheus, grafana +kubectl create -f prime/infra/dev/monitoring.yaml --namespace=monitoring + +kubectl create -f prime/infra/dev/monitoring-pushgateway.yaml +``` + +#### Prometheus dashboard +```bash +kubectl port-forward --namespace=monitoring $(kubectl get pods --namespace=monitoring | grep prometheus-deployment | awk '{print $1}') 9090 +``` + +#### Grafana dashboard +__`Has own its own load balancer and can be accessed directly. Discuss if this is OK or find and implement a different way of accessing the grafana dashboard.`__ + +Can be accessed directly from external ip +```bash +kubectl get services --namespace=monitoring | grep grafana | awk '{print $4}' +``` + +#### Push gateway +```bash +# Push a metric to pushgateway:8080 (specified in the service declaration for pushgateway) +echo "some_metric 4.71" | curl -v --data-binary @- http://pushgateway:8080/metrics/job/some_job +``` + ### Setup Neo4j ```bash diff --git a/prime/infra/dev/monitoring-cluster-role.yaml b/prime/infra/dev/monitoring-cluster-role.yaml new file mode 100644 index 000000000..b5caf5667 --- /dev/null +++ b/prime/infra/dev/monitoring-cluster-role.yaml @@ -0,0 +1,33 @@ +apiVersion: rbac.authorization.k8s.io/v1beta1 +kind: ClusterRole +metadata: + name: prometheus +rules: +- apiGroups: [""] + resources: + - nodes + - nodes/proxy + - services + - endpoints + - pods + verbs: ["get", "list", "watch"] +- apiGroups: + - extensions + resources: + - ingresses + verbs: ["get", "list", "watch"] +- nonResourceURLs: ["/metrics"] + verbs: ["get"] +--- +apiVersion: rbac.authorization.k8s.io/v1beta1 +kind: ClusterRoleBinding +metadata: + name: prometheus +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: ClusterRole + name: prometheus +subjects: +- kind: ServiceAccount + name: default + namespace: monitoring diff --git a/prime/infra/dev/monitoring-pushgateway.yaml b/prime/infra/dev/monitoring-pushgateway.yaml new file mode 100644 index 000000000..4448c85a2 --- /dev/null +++ b/prime/infra/dev/monitoring-pushgateway.yaml @@ -0,0 +1,30 @@ +apiVersion: extensions/v1beta1 +kind: Deployment +metadata: + name: pushgateway-deployment +spec: + replicas: 1 + template: + metadata: + labels: + app: pushgateway-server + annotations: + prometheus.io/scrape: 'true' + spec: + containers: + - name: pushgateway + image: prom/pushgateway:v0.5.2 + ports: + - containerPort: 9091 +--- +apiVersion: v1 +kind: Service +metadata: + name: pushgateway +spec: + selector: + app: pushgateway-server + type: NodePort + ports: + - port: 8080 + targetPort: 9091 diff --git a/prime/infra/dev/monitoring.yaml b/prime/infra/dev/monitoring.yaml new file mode 100644 index 000000000..4dd35f6b5 --- /dev/null +++ b/prime/infra/dev/monitoring.yaml @@ -0,0 +1,255 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: prometheus-server-conf + labels: + name: prometheus-server-conf + namespace: monitoring +data: + prometheus.yml: |- + global: + scrape_interval: 5s + evaluation_interval: 5s + + scrape_configs: + - job_name: 'kubernetes-apiservers' + + kubernetes_sd_configs: + - role: endpoints + scheme: https + + tls_config: + ca_file: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt + bearer_token_file: /var/run/secrets/kubernetes.io/serviceaccount/token + + relabel_configs: + - source_labels: [__meta_kubernetes_namespace, __meta_kubernetes_service_name, __meta_kubernetes_endpoint_port_name] + action: keep + regex: default;kubernetes;https + + - job_name: 'kubernetes-nodes' + + scheme: https + + tls_config: + ca_file: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt + bearer_token_file: /var/run/secrets/kubernetes.io/serviceaccount/token + + kubernetes_sd_configs: + - role: node + + relabel_configs: + - action: labelmap + regex: __meta_kubernetes_node_label_(.+) + - target_label: __address__ + replacement: kubernetes.default.svc:443 + - source_labels: [__meta_kubernetes_node_name] + regex: (.+) + target_label: __metrics_path__ + replacement: /api/v1/nodes/${1}/proxy/metrics + + + - job_name: 'kubernetes-pods' + + kubernetes_sd_configs: + - role: pod + + relabel_configs: + - source_labels: [__meta_kubernetes_pod_annotation_prometheus_io_scrape] + action: keep + regex: true + - source_labels: [__meta_kubernetes_pod_annotation_prometheus_io_path] + action: replace + target_label: __metrics_path__ + regex: (.+) + - source_labels: [__address__, __meta_kubernetes_pod_annotation_prometheus_io_port] + action: replace + regex: ([^:]+)(?::\d+)?;(\d+) + replacement: $1:$2 + target_label: __address__ + - action: labelmap + regex: __meta_kubernetes_pod_label_(.+) + - source_labels: [__meta_kubernetes_namespace] + action: replace + target_label: kubernetes_namespace + - source_labels: [__meta_kubernetes_pod_name] + action: replace + target_label: kubernetes_pod_name + + - job_name: 'kubernetes-cadvisor' + + scheme: https + + tls_config: + ca_file: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt + bearer_token_file: /var/run/secrets/kubernetes.io/serviceaccount/token + + kubernetes_sd_configs: + - role: node + + relabel_configs: + - action: labelmap + regex: __meta_kubernetes_node_label_(.+) + - target_label: __address__ + replacement: kubernetes.default.svc:443 + - source_labels: [__meta_kubernetes_node_name] + regex: (.+) + target_label: __metrics_path__ + replacement: /api/v1/nodes/${1}/proxy/metrics/cadvisor + + - job_name: 'kubernetes-service-endpoints' + + kubernetes_sd_configs: + - role: endpoints + + relabel_configs: + - source_labels: [__meta_kubernetes_service_annotation_prometheus_io_scrape] + action: keep + regex: true + - source_labels: [__meta_kubernetes_service_annotation_prometheus_io_scheme] + action: replace + target_label: __scheme__ + regex: (https?) + - source_labels: [__meta_kubernetes_service_annotation_prometheus_io_path] + action: replace + target_label: __metrics_path__ + regex: (.+) + - source_labels: [__address__, __meta_kubernetes_service_annotation_prometheus_io_port] + action: replace + target_label: __address__ + regex: ([^:]+)(?::\d+)?;(\d+) + replacement: $1:$2 + - action: labelmap + regex: __meta_kubernetes_service_label_(.+) + - source_labels: [__meta_kubernetes_namespace] + action: replace + target_label: kubernetes_namespace + - source_labels: [__meta_kubernetes_service_name] + action: replace + target_label: kubernetes_name +--- +apiVersion: extensions/v1beta1 +kind: Deployment +metadata: + name: prometheus-deployment + namespace: monitoring +spec: + replicas: 1 + template: + metadata: + labels: + app: prometheus-server + spec: + containers: + - name: prometheus + image: prom/prometheus:v2.3.2 + args: + - "--config.file=/etc/prometheus/prometheus.yml" + - "--storage.tsdb.path=/prometheus/" + ports: + - containerPort: 9090 + volumeMounts: + - name: prometheus-config-volume + mountPath: /etc/prometheus/ + - name: prometheus-storage-volume + mountPath: /prometheus/ + volumes: + - name: prometheus-config-volume + configMap: + defaultMode: 420 + name: prometheus-server-conf + + - name: prometheus-storage-volume + emptyDir: {} +--- +apiVersion: v1 +kind: Service +metadata: + name: prometheus + namespace: monitoring + annotations: + prometheus.io/scrape: 'true' + prometheus.io/path: / + prometheus.io/port: '8080' + +spec: + selector: + app: prometheus-server + type: NodePort + ports: + - port: 8080 + targetPort: 9090 + nodePort: 30000 +--- +apiVersion: extensions/v1beta1 +kind: Deployment +metadata: + name: grafana-deployment + namespace: monitoring + labels: + app: grafana +spec: + replicas: 1 + template: + metadata: + labels: + app: grafana + spec: + containers: + - image: grafana/grafana:4.2.0 + name: grafana + imagePullPolicy: IfNotPresent + ports: + - name: http-server + containerPort: 3000 + # env: + resources: + # keep request = limit to keep this container in guaranteed class + limits: + cpu: 100m + memory: 100Mi + requests: + cpu: 100m + memory: 100Mi + env: + # The following env variables set up basic auth twith the default admin user and admin password. + - name: GF_AUTH_BASIC_ENABLED + value: "true" + - name: GF_AUTH_ANONYMOUS_ENABLED + value: "false" + # - name: GF_AUTH_ANONYMOUS_ORG_ROLE + # value: Admin + # does not really work, because of template variables in exported dashboards: + # - name: GF_DASHBOARDS_JSON_ENABLED + # value: "true" + readinessProbe: + httpGet: + path: /login + port: 3000 + # initialDelaySeconds: 30 + # timeoutSeconds: 1 + volumeMounts: + - name: grafana-persistent-storage + mountPath: /var + volumes: + - name: grafana-persistent-storage + emptyDir: {} +--- +apiVersion: v1 +kind: Service +metadata: + name: grafana + namespace: monitoring + labels: + app: grafana +spec: + type: LoadBalancer + ports: + - port: 80 + targetPort: http-server + # spec: + # type: NodePort + # ports: + # - port: 3000 + selector: + app: grafana From 290ec3c975eafdc5552b3bd205ac0441c6769765 Mon Sep 17 00:00:00 2001 From: havard Date: Tue, 28 Aug 2018 00:05:13 +0530 Subject: [PATCH 02/78] refactor(monitoring): base deployment on https://github.com/giantswarm/kubernetes-prometheus --- prime/infra/README.md | 19 +- prime/infra/dev/monitoring-cluster-role.yaml | 33 - prime/infra/dev/monitoring.yaml | 2896 +++++++++++++++++- 3 files changed, 2753 insertions(+), 195 deletions(-) delete mode 100644 prime/infra/dev/monitoring-cluster-role.yaml diff --git a/prime/infra/README.md b/prime/infra/README.md index e92b56680..bf35840b7 100644 --- a/prime/infra/README.md +++ b/prime/infra/README.md @@ -240,24 +240,18 @@ gcloud endpoints services deploy prime/infra/dev/prime-client-api.yaml ## Deploy to Dev cluster ### Deploy monitoring -```bash -# Create namespace if it does not already exist -kubectl create namespace monitoring - -# Not sure what this does, but should probably have a description -kubectl create clusterrolebinding cluster-admin-binding --clusterrole cluster-admin --user $(gcloud config get-value account) +Based on https://github.com/giantswarm/kubernetes-prometheus -kubectl create -f prime/infra/dev/monitoring-cluster-role.yaml - -# config map, prometheus, grafana -kubectl create -f prime/infra/dev/monitoring.yaml --namespace=monitoring +```bash +kubectl apply -f prime/infra/dev/monitoring.yaml -kubectl create -f prime/infra/dev/monitoring-pushgateway.yaml +# +kubectl apply -f prime/infra/dev/monitoring-pushgateway.yaml ``` #### Prometheus dashboard ```bash -kubectl port-forward --namespace=monitoring $(kubectl get pods --namespace=monitoring | grep prometheus-deployment | awk '{print $1}') 9090 +kubectl port-forward --namespace=monitoring $(kubectl get pods --namespace=monitoring | grep prometheus-core | awk '{print $1}') 9090 ``` #### Grafana dashboard @@ -271,6 +265,7 @@ kubectl get services --namespace=monitoring | grep grafana | awk '{print $4}' #### Push gateway ```bash # Push a metric to pushgateway:8080 (specified in the service declaration for pushgateway) +kubectl run curl-it --image=radial/busyboxplus:curl -i --tty --rm echo "some_metric 4.71" | curl -v --data-binary @- http://pushgateway:8080/metrics/job/some_job ``` diff --git a/prime/infra/dev/monitoring-cluster-role.yaml b/prime/infra/dev/monitoring-cluster-role.yaml deleted file mode 100644 index b5caf5667..000000000 --- a/prime/infra/dev/monitoring-cluster-role.yaml +++ /dev/null @@ -1,33 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1beta1 -kind: ClusterRole -metadata: - name: prometheus -rules: -- apiGroups: [""] - resources: - - nodes - - nodes/proxy - - services - - endpoints - - pods - verbs: ["get", "list", "watch"] -- apiGroups: - - extensions - resources: - - ingresses - verbs: ["get", "list", "watch"] -- nonResourceURLs: ["/metrics"] - verbs: ["get"] ---- -apiVersion: rbac.authorization.k8s.io/v1beta1 -kind: ClusterRoleBinding -metadata: - name: prometheus -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: ClusterRole - name: prometheus -subjects: -- kind: ServiceAccount - name: default - namespace: monitoring diff --git a/prime/infra/dev/monitoring.yaml b/prime/infra/dev/monitoring.yaml index 4dd35f6b5..1aef71c0b 100644 --- a/prime/infra/dev/monitoring.yaml +++ b/prime/infra/dev/monitoring.yaml @@ -1,207 +1,384 @@ +# Derived from ./manifests +--- +apiVersion: v1 +kind: Namespace +metadata: + name: monitoring +--- apiVersion: v1 +data: + default.tmpl: | + {{ define "__alertmanager" }}AlertManager{{ end }} + {{ define "__alertmanagerURL" }}{{ .ExternalURL }}/#/alerts?receiver={{ .Receiver }}{{ end }} + + {{ define "__subject" }}[{{ .Status | toUpper }}{{ if eq .Status "firing" }}:{{ .Alerts.Firing | len }}{{ end }}] {{ .GroupLabels.SortedPairs.Values | join " " }} {{ if gt (len .CommonLabels) (len .GroupLabels) }}({{ with .CommonLabels.Remove .GroupLabels.Names }}{{ .Values | join " " }}{{ end }}){{ end }}{{ end }} + {{ define "__description" }}{{ end }} + + {{ define "__text_alert_list" }}{{ range . }}Labels: + {{ range .Labels.SortedPairs }} - {{ .Name }} = {{ .Value }} + {{ end }}Annotations: + {{ range .Annotations.SortedPairs }} - {{ .Name }} = {{ .Value }} + {{ end }}Source: {{ .GeneratorURL }} + {{ end }}{{ end }} + + + {{ define "slack.default.title" }}{{ template "__subject" . }}{{ end }} + {{ define "slack.default.username" }}{{ template "__alertmanager" . }}{{ end }} + {{ define "slack.default.fallback" }}{{ template "slack.default.title" . }} | {{ template "slack.default.titlelink" . }}{{ end }} + {{ define "slack.default.pretext" }}{{ end }} + {{ define "slack.default.titlelink" }}{{ template "__alertmanagerURL" . }}{{ end }} + {{ define "slack.default.iconemoji" }}{{ end }} + {{ define "slack.default.iconurl" }}{{ end }} + {{ define "slack.default.text" }}{{ end }} + + + {{ define "hipchat.default.from" }}{{ template "__alertmanager" . }}{{ end }} + {{ define "hipchat.default.message" }}{{ template "__subject" . }}{{ end }} + + + {{ define "pagerduty.default.description" }}{{ template "__subject" . }}{{ end }} + {{ define "pagerduty.default.client" }}{{ template "__alertmanager" . }}{{ end }} + {{ define "pagerduty.default.clientURL" }}{{ template "__alertmanagerURL" . }}{{ end }} + {{ define "pagerduty.default.instances" }}{{ template "__text_alert_list" . }}{{ end }} + + + {{ define "opsgenie.default.message" }}{{ template "__subject" . }}{{ end }} + {{ define "opsgenie.default.description" }}{{ .CommonAnnotations.SortedPairs.Values | join " " }} + {{ if gt (len .Alerts.Firing) 0 -}} + Alerts Firing: + {{ template "__text_alert_list" .Alerts.Firing }} + {{- end }} + {{ if gt (len .Alerts.Resolved) 0 -}} + Alerts Resolved: + {{ template "__text_alert_list" .Alerts.Resolved }} + {{- end }} + {{- end }} + {{ define "opsgenie.default.source" }}{{ template "__alertmanagerURL" . }}{{ end }} + + + {{ define "victorops.default.message" }}{{ template "__subject" . }} | {{ template "__alertmanagerURL" . }}{{ end }} + {{ define "victorops.default.from" }}{{ template "__alertmanager" . }}{{ end }} + + + {{ define "email.default.subject" }}{{ template "__subject" . }}{{ end }} + {{ define "email.default.html" }} + + + + + + + {{ template "__subject" . }} + + + + + + + + + + + +
+
+ + + + + + + +
+ {{ .Alerts | len }} alert{{ if gt (len .Alerts) 1 }}s{{ end }} for {{ range .GroupLabels.SortedPairs }} + {{ .Name }}={{ .Value }} + {{ end }} +
+ + + + + {{ if gt (len .Alerts.Firing) 0 }} + + + + {{ end }} + {{ range .Alerts.Firing }} + + + + {{ end }} + + {{ if gt (len .Alerts.Resolved) 0 }} + {{ if gt (len .Alerts.Firing) 0 }} + + + + {{ end }} + + + + {{ end }} + {{ range .Alerts.Resolved }} + + + + {{ end }} +
+ View in {{ template "__alertmanager" . }} +
+ [{{ .Alerts.Firing | len }}] Firing +
+ Labels
+ {{ range .Labels.SortedPairs }}{{ .Name }} = {{ .Value }}
{{ end }} + {{ if gt (len .Annotations) 0 }}Annotations
{{ end }} + {{ range .Annotations.SortedPairs }}{{ .Name }} = {{ .Value }}
{{ end }} + Source
+
+
+
+
+
+ [{{ .Alerts.Resolved | len }}] Resolved +
+ Labels
+ {{ range .Labels.SortedPairs }}{{ .Name }} = {{ .Value }}
{{ end }} + {{ if gt (len .Annotations) 0 }}Annotations
{{ end }} + {{ range .Annotations.SortedPairs }}{{ .Name }} = {{ .Value }}
{{ end }} + Source
+
+
+ +
+
+ + + + + {{ end }} + + {{ define "pushover.default.title" }}{{ template "__subject" . }}{{ end }} + {{ define "pushover.default.message" }}{{ .CommonAnnotations.SortedPairs.Values | join " " }} + {{ if gt (len .Alerts.Firing) 0 }} + Alerts Firing: + {{ template "__text_alert_list" .Alerts.Firing }} + {{ end }} + {{ if gt (len .Alerts.Resolved) 0 }} + Alerts Resolved: + {{ template "__text_alert_list" .Alerts.Resolved }} + {{ end }} + {{ end }} + {{ define "pushover.default.url" }}{{ template "__alertmanagerURL" . }}{{ end }} + slack.tmpl: | + {{ define "slack.devops.text" }} + {{range .Alerts}}{{.Annotations.DESCRIPTION}} + {{end}} + {{ end }} kind: ConfigMap metadata: - name: prometheus-server-conf - labels: - name: prometheus-server-conf + creationTimestamp: null + name: alertmanager-templates + namespace: monitoring +--- +kind: ConfigMap +apiVersion: v1 +metadata: + name: alertmanager namespace: monitoring data: - prometheus.yml: |- + config.yml: |- global: - scrape_interval: 5s - evaluation_interval: 5s + # ResolveTimeout is the time after which an alert is declared resolved + # if it has not been updated. + resolve_timeout: 5m - scrape_configs: - - job_name: 'kubernetes-apiservers' + # The smarthost and SMTP sender used for mail notifications. + smtp_smarthost: 'smtp.gmail.com:587' + smtp_from: 'foo@bar.com' + smtp_auth_username: 'foo@bar.com' + smtp_auth_password: 'barfoo' - kubernetes_sd_configs: - - role: endpoints - scheme: https + # The API URL to use for Slack notifications. + slack_api_url: 'https://hooks.slack.com/services/some/api/token' - tls_config: - ca_file: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt - bearer_token_file: /var/run/secrets/kubernetes.io/serviceaccount/token - - relabel_configs: - - source_labels: [__meta_kubernetes_namespace, __meta_kubernetes_service_name, __meta_kubernetes_endpoint_port_name] - action: keep - regex: default;kubernetes;https + # # The directory from which notification templates are read. + templates: + - '/etc/alertmanager-templates/*.tmpl' - - job_name: 'kubernetes-nodes' + # The root route on which each incoming alert enters. + route: - scheme: https + # The labels by which incoming alerts are grouped together. For example, + # multiple alerts coming in for cluster=A and alertname=LatencyHigh would + # be batched into a single group. - tls_config: - ca_file: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt - bearer_token_file: /var/run/secrets/kubernetes.io/serviceaccount/token + group_by: ['alertname', 'cluster', 'service'] - kubernetes_sd_configs: - - role: node + # When a new group of alerts is created by an incoming alert, wait at + # least 'group_wait' to send the initial notification. + # This way ensures that you get multiple alerts for the same group that start + # firing shortly after another are batched together on the first + # notification. - relabel_configs: - - action: labelmap - regex: __meta_kubernetes_node_label_(.+) - - target_label: __address__ - replacement: kubernetes.default.svc:443 - - source_labels: [__meta_kubernetes_node_name] - regex: (.+) - target_label: __metrics_path__ - replacement: /api/v1/nodes/${1}/proxy/metrics + group_wait: 30s + # When the first notification was sent, wait 'group_interval' to send a batch + # of new alerts that started firing for that group. - - job_name: 'kubernetes-pods' + group_interval: 5m - kubernetes_sd_configs: - - role: pod + # If an alert has successfully been sent, wait 'repeat_interval' to + # resend them. - relabel_configs: - - source_labels: [__meta_kubernetes_pod_annotation_prometheus_io_scrape] - action: keep - regex: true - - source_labels: [__meta_kubernetes_pod_annotation_prometheus_io_path] - action: replace - target_label: __metrics_path__ - regex: (.+) - - source_labels: [__address__, __meta_kubernetes_pod_annotation_prometheus_io_port] - action: replace - regex: ([^:]+)(?::\d+)?;(\d+) - replacement: $1:$2 - target_label: __address__ - - action: labelmap - regex: __meta_kubernetes_pod_label_(.+) - - source_labels: [__meta_kubernetes_namespace] - action: replace - target_label: kubernetes_namespace - - source_labels: [__meta_kubernetes_pod_name] - action: replace - target_label: kubernetes_pod_name - - - job_name: 'kubernetes-cadvisor' - - scheme: https + #repeat_interval: 1m + repeat_interval: 15m - tls_config: - ca_file: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt - bearer_token_file: /var/run/secrets/kubernetes.io/serviceaccount/token + # A default receiver - kubernetes_sd_configs: - - role: node + # If an alert isn't caught by a route, send it to default. + receiver: default - relabel_configs: - - action: labelmap - regex: __meta_kubernetes_node_label_(.+) - - target_label: __address__ - replacement: kubernetes.default.svc:443 - - source_labels: [__meta_kubernetes_node_name] - regex: (.+) - target_label: __metrics_path__ - replacement: /api/v1/nodes/${1}/proxy/metrics/cadvisor + # All the above attributes are inherited by all child routes and can + # overwritten on each. - - job_name: 'kubernetes-service-endpoints' + # The child route trees. + routes: + # Send severity=slack alerts to slack. + - match: + severity: slack + receiver: slack_alert + # - match: + # severity: email + # receiver: email_alert - kubernetes_sd_configs: - - role: endpoints + receivers: + - name: 'default' + slack_configs: + - channel: '#alertmanager-test' + text: '{{ template "slack.devops.text" . }}' + send_resolved: true - relabel_configs: - - source_labels: [__meta_kubernetes_service_annotation_prometheus_io_scrape] - action: keep - regex: true - - source_labels: [__meta_kubernetes_service_annotation_prometheus_io_scheme] - action: replace - target_label: __scheme__ - regex: (https?) - - source_labels: [__meta_kubernetes_service_annotation_prometheus_io_path] - action: replace - target_label: __metrics_path__ - regex: (.+) - - source_labels: [__address__, __meta_kubernetes_service_annotation_prometheus_io_port] - action: replace - target_label: __address__ - regex: ([^:]+)(?::\d+)?;(\d+) - replacement: $1:$2 - - action: labelmap - regex: __meta_kubernetes_service_label_(.+) - - source_labels: [__meta_kubernetes_namespace] - action: replace - target_label: kubernetes_namespace - - source_labels: [__meta_kubernetes_service_name] - action: replace - target_label: kubernetes_name + - name: 'slack_alert' + slack_configs: + - channel: '#alertmanager-test' + send_resolved: true --- apiVersion: extensions/v1beta1 kind: Deployment metadata: - name: prometheus-deployment + name: alertmanager namespace: monitoring spec: replicas: 1 + selector: + matchLabels: + app: alertmanager template: metadata: + name: alertmanager labels: - app: prometheus-server + app: alertmanager spec: containers: - - name: prometheus - image: prom/prometheus:v2.3.2 + - name: alertmanager + image: quay.io/prometheus/alertmanager:v0.7.1 args: - - "--config.file=/etc/prometheus/prometheus.yml" - - "--storage.tsdb.path=/prometheus/" + - '-config.file=/etc/alertmanager/config.yml' + - '-storage.path=/alertmanager' ports: - - containerPort: 9090 + - name: alertmanager + containerPort: 9093 volumeMounts: - - name: prometheus-config-volume - mountPath: /etc/prometheus/ - - name: prometheus-storage-volume - mountPath: /prometheus/ + - name: config-volume + mountPath: /etc/alertmanager + - name: templates-volume + mountPath: /etc/alertmanager-templates + - name: alertmanager + mountPath: /alertmanager volumes: - - name: prometheus-config-volume + - name: config-volume configMap: - defaultMode: 420 - name: prometheus-server-conf - - - name: prometheus-storage-volume + name: alertmanager + - name: templates-volume + configMap: + name: alertmanager-templates + - name: alertmanager emptyDir: {} --- apiVersion: v1 kind: Service metadata: - name: prometheus - namespace: monitoring annotations: prometheus.io/scrape: 'true' - prometheus.io/path: / - prometheus.io/port: '8080' - + prometheus.io/path: '/metrics' + labels: + name: alertmanager + name: alertmanager + namespace: monitoring spec: selector: - app: prometheus-server + app: alertmanager type: NodePort ports: - - port: 8080 - targetPort: 9090 - nodePort: 30000 + - name: alertmanager + protocol: TCP + port: 9093 + targetPort: 9093 --- apiVersion: extensions/v1beta1 kind: Deployment metadata: - name: grafana-deployment + name: grafana-core namespace: monitoring labels: app: grafana + component: core spec: replicas: 1 template: metadata: labels: app: grafana + component: core spec: containers: - image: grafana/grafana:4.2.0 - name: grafana + name: grafana-core imagePullPolicy: IfNotPresent ports: - - name: http-server - containerPort: 3000 + - name: http-server + containerPort: 3000 # env: resources: # keep request = limit to keep this container in guaranteed class @@ -212,16 +389,16 @@ spec: cpu: 100m memory: 100Mi env: - # The following env variables set up basic auth twith the default admin user and admin password. - - name: GF_AUTH_BASIC_ENABLED - value: "true" - - name: GF_AUTH_ANONYMOUS_ENABLED - value: "false" - # - name: GF_AUTH_ANONYMOUS_ORG_ROLE - # value: Admin - # does not really work, because of template variables in exported dashboards: - # - name: GF_DASHBOARDS_JSON_ENABLED - # value: "true" + # The following env variables set up basic auth twith the default admin user and admin password. + - name: GF_AUTH_BASIC_ENABLED + value: "true" + - name: GF_AUTH_ANONYMOUS_ENABLED + value: "false" + # - name: GF_AUTH_ANONYMOUS_ORG_ROLE + # value: Admin + # does not really work, because of template variables in exported dashboards: + # - name: GF_DASHBOARDS_JSON_ENABLED + # value: "true" readinessProbe: httpGet: path: /login @@ -236,20 +413,2439 @@ spec: emptyDir: {} --- apiVersion: v1 +data: + grafana-net-2-dashboard.json: | + { + "__inputs": [{ + "name": "DS_PROMETHEUS", + "label": "Prometheus", + "description": "", + "type": "datasource", + "pluginId": "prometheus", + "pluginName": "Prometheus" + }], + "__requires": [{ + "type": "panel", + "id": "singlestat", + "name": "Singlestat", + "version": "" + }, { + "type": "panel", + "id": "text", + "name": "Text", + "version": "" + }, { + "type": "panel", + "id": "graph", + "name": "Graph", + "version": "" + }, { + "type": "grafana", + "id": "grafana", + "name": "Grafana", + "version": "3.1.0" + }, { + "type": "datasource", + "id": "prometheus", + "name": "Prometheus", + "version": "1.0.0" + }], + "id": null, + "title": "Prometheus Stats", + "tags": [], + "style": "dark", + "timezone": "browser", + "editable": true, + "hideControls": true, + "sharedCrosshair": false, + "rows": [{ + "collapse": false, + "editable": true, + "height": 178, + "panels": [{ + "cacheTimeout": null, + "colorBackground": false, + "colorValue": false, + "colors": ["rgba(245, 54, 54, 0.9)", "rgba(237, 129, 40, 0.89)", "rgba(50, 172, 45, 0.97)"], + "datasource": "${DS_PROMETHEUS}", + "decimals": 1, + "editable": true, + "error": false, + "format": "s", + "id": 5, + "interval": null, + "links": [], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "span": 3, + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "targets": [{ + "expr": "(time() - process_start_time_seconds{job=\"prometheus\"})", + "intervalFactor": 2, + "refId": "A", + "step": 4 + }], + "thresholds": "", + "title": "Uptime", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [{ + "op": "=", + "text": "N/A", + "value": "null" + }], + "valueName": "current", + "mappingTypes": [{ + "name": "value to text", + "value": 1 + }, { + "name": "range to text", + "value": 2 + }], + "rangeMaps": [{ + "from": "null", + "to": "null", + "text": "N/A" + }], + "mappingType": 1, + "gauge": { + "show": false, + "minValue": 0, + "maxValue": 100, + "thresholdMarkers": true, + "thresholdLabels": false + } + }, { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": false, + "colors": ["rgba(50, 172, 45, 0.97)", "rgba(237, 129, 40, 0.89)", "rgba(245, 54, 54, 0.9)"], + "datasource": "${DS_PROMETHEUS}", + "editable": true, + "error": false, + "format": "none", + "id": 6, + "interval": null, + "links": [], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "span": 3, + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": true + }, + "targets": [{ + "expr": "prometheus_local_storage_memory_series", + "intervalFactor": 2, + "refId": "A", + "step": 4 + }], + "thresholds": "1,5", + "title": "Local Storage Memory Series", + "type": "singlestat", + "valueFontSize": "70%", + "valueMaps": [], + "valueName": "current", + "mappingTypes": [{ + "name": "value to text", + "value": 1 + }, { + "name": "range to text", + "value": 2 + }], + "rangeMaps": [{ + "from": "null", + "to": "null", + "text": "N/A" + }], + "mappingType": 1, + "gauge": { + "show": false, + "minValue": 0, + "maxValue": 100, + "thresholdMarkers": true, + "thresholdLabels": false + } + }, { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": true, + "colors": ["rgba(50, 172, 45, 0.97)", "rgba(237, 129, 40, 0.89)", "rgba(245, 54, 54, 0.9)"], + "datasource": "${DS_PROMETHEUS}", + "editable": true, + "error": false, + "format": "none", + "id": 7, + "interval": null, + "links": [], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "span": 3, + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": true + }, + "targets": [{ + "expr": "prometheus_local_storage_indexing_queue_length", + "intervalFactor": 2, + "refId": "A", + "step": 4 + }], + "thresholds": "500,4000", + "title": "Interal Storage Queue Length", + "type": "singlestat", + "valueFontSize": "70%", + "valueMaps": [{ + "op": "=", + "text": "Empty", + "value": "0" + }], + "valueName": "current", + "mappingTypes": [{ + "name": "value to text", + "value": 1 + }, { + "name": "range to text", + "value": 2 + }], + "rangeMaps": [{ + "from": "null", + "to": "null", + "text": "N/A" + }], + "mappingType": 1, + "gauge": { + "show": false, + "minValue": 0, + "maxValue": 100, + "thresholdMarkers": true, + "thresholdLabels": false + } + }, { + "content": "\"Prometheus\nPrometheus\n\n

You're using Prometheus, an open-source systems monitoring and alerting toolkit originally built at SoundCloud. For more information, check out the Grafana and Prometheus projects.

", + "editable": true, + "error": false, + "id": 9, + "links": [], + "mode": "html", + "span": 3, + "style": {}, + "title": "", + "transparent": true, + "type": "text" + }], + "title": "New row" + }, { + "collapse": false, + "editable": true, + "height": 227, + "panels": [{ + "aliasColors": { + "prometheus": "#C15C17", + "{instance=\"localhost:9090\",job=\"prometheus\"}": "#C15C17" + }, + "bars": false, + "datasource": "${DS_PROMETHEUS}", + "editable": true, + "error": false, + "fill": 1, + "grid": { + "threshold1": null, + "threshold1Color": "rgba(216, 200, 27, 0.27)", + "threshold2": null, + "threshold2Color": "rgba(234, 112, 112, 0.22)" + }, + "id": 3, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 2, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "span": 9, + "stack": false, + "steppedLine": false, + "targets": [{ + "expr": "rate(prometheus_local_storage_ingested_samples_total[5m])", + "interval": "", + "intervalFactor": 2, + "legendFormat": "{{job}}", + "metric": "", + "refId": "A", + "step": 2 + }], + "timeFrom": null, + "timeShift": null, + "title": "Samples ingested (rate-5m)", + "tooltip": { + "shared": true, + "value_type": "cumulative", + "ordering": "alphabetical", + "msResolution": false + }, + "type": "graph", + "yaxes": [{ + "show": true, + "min": null, + "max": null, + "logBase": 1, + "format": "short" + }, { + "show": true, + "min": null, + "max": null, + "logBase": 1, + "format": "short" + }], + "xaxis": { + "show": true + } + }, { + "content": "#### Samples Ingested\nThis graph displays the count of samples ingested by the Prometheus server, as measured over the last 5 minutes, per time series in the range vector. When troubleshooting an issue on IRC or Github, this is often the first stat requested by the Prometheus team. ", + "editable": true, + "error": false, + "id": 8, + "links": [], + "mode": "markdown", + "span": 2.995914043583536, + "style": {}, + "title": "", + "transparent": true, + "type": "text" + }], + "title": "New row" + }, { + "collapse": false, + "editable": true, + "height": "250px", + "panels": [{ + "aliasColors": { + "prometheus": "#F9BA8F", + "{instance=\"localhost:9090\",interval=\"5s\",job=\"prometheus\"}": "#F9BA8F" + }, + "bars": false, + "datasource": "${DS_PROMETHEUS}", + "editable": true, + "error": false, + "fill": 1, + "grid": { + "threshold1": null, + "threshold1Color": "rgba(216, 200, 27, 0.27)", + "threshold2": null, + "threshold2Color": "rgba(234, 112, 112, 0.22)" + }, + "id": 2, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "span": 5, + "stack": false, + "steppedLine": false, + "targets": [{ + "expr": "rate(prometheus_target_interval_length_seconds_count[5m])", + "intervalFactor": 2, + "legendFormat": "{{job}}", + "refId": "A", + "step": 2 + }], + "timeFrom": null, + "timeShift": null, + "title": "Target Scrapes (last 5m)", + "tooltip": { + "shared": true, + "value_type": "cumulative", + "ordering": "alphabetical", + "msResolution": false + }, + "type": "graph", + "yaxes": [{ + "show": true, + "min": null, + "max": null, + "logBase": 1, + "format": "short" + }, { + "show": true, + "min": null, + "max": null, + "logBase": 1, + "format": "short" + }], + "xaxis": { + "show": true + } + }, { + "aliasColors": {}, + "bars": false, + "datasource": "${DS_PROMETHEUS}", + "editable": true, + "error": false, + "fill": 1, + "grid": { + "threshold1": null, + "threshold1Color": "rgba(216, 200, 27, 0.27)", + "threshold2": null, + "threshold2Color": "rgba(234, 112, 112, 0.22)" + }, + "id": 14, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "span": 4, + "stack": false, + "steppedLine": false, + "targets": [{ + "expr": "prometheus_target_interval_length_seconds{quantile!=\"0.01\", quantile!=\"0.05\"}", + "interval": "", + "intervalFactor": 2, + "legendFormat": "{{quantile}} ({{interval}})", + "metric": "", + "refId": "A", + "step": 2 + }], + "timeFrom": null, + "timeShift": null, + "title": "Scrape Duration", + "tooltip": { + "shared": true, + "value_type": "cumulative", + "ordering": "alphabetical", + "msResolution": false + }, + "type": "graph", + "yaxes": [{ + "show": true, + "min": null, + "max": null, + "logBase": 1, + "format": "short" + }, { + "show": true, + "min": null, + "max": null, + "logBase": 1, + "format": "short" + }], + "xaxis": { + "show": true + } + }, { + "content": "#### Scrapes\nPrometheus scrapes metrics from instrumented jobs, either directly or via an intermediary push gateway for short-lived jobs. Target scrapes will show how frequently targets are scraped, as measured over the last 5 minutes, per time series in the range vector. Scrape Duration will show how long the scrapes are taking, with percentiles available as series. ", + "editable": true, + "error": false, + "id": 11, + "links": [], + "mode": "markdown", + "span": 3, + "style": {}, + "title": "", + "transparent": true, + "type": "text" + }], + "title": "New row" + }, { + "collapse": false, + "editable": true, + "height": "250px", + "panels": [{ + "aliasColors": {}, + "bars": false, + "datasource": "${DS_PROMETHEUS}", + "decimals": null, + "editable": true, + "error": false, + "fill": 1, + "grid": { + "threshold1": null, + "threshold1Color": "rgba(216, 200, 27, 0.27)", + "threshold2": null, + "threshold2Color": "rgba(234, 112, 112, 0.22)" + }, + "id": 12, + "legend": { + "alignAsTable": false, + "avg": false, + "current": false, + "hideEmpty": true, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "span": 9, + "stack": false, + "steppedLine": false, + "targets": [{ + "expr": "prometheus_evaluator_duration_milliseconds{quantile!=\"0.01\", quantile!=\"0.05\"}", + "interval": "", + "intervalFactor": 2, + "legendFormat": "{{quantile}}", + "refId": "A", + "step": 2 + }], + "timeFrom": null, + "timeShift": null, + "title": "Rule Eval Duration", + "tooltip": { + "shared": true, + "value_type": "cumulative", + "ordering": "alphabetical", + "msResolution": false + }, + "type": "graph", + "yaxes": [{ + "show": true, + "min": null, + "max": null, + "logBase": 1, + "format": "percentunit", + "label": "" + }, { + "show": true, + "min": null, + "max": null, + "logBase": 1, + "format": "short" + }], + "xaxis": { + "show": true + } + }, { + "content": "#### Rule Evaluation Duration\nThis graph panel plots the duration for all evaluations to execute. The 50th percentile, 90th percentile and 99th percentile are shown as three separate series to help identify outliers that may be skewing the data.", + "editable": true, + "error": false, + "id": 15, + "links": [], + "mode": "markdown", + "span": 3, + "style": {}, + "title": "", + "transparent": true, + "type": "text" + }], + "title": "New row" + }], + "time": { + "from": "now-5m", + "to": "now" + }, + "timepicker": { + "now": true, + "refresh_intervals": ["5s", "10s", "30s", "1m", "5m", "15m", "30m", "1h", "2h", "1d"], + "time_options": ["5m", "15m", "1h", "6h", "12h", "24h", "2d", "7d", "30d"] + }, + "templating": { + "list": [] + }, + "annotations": { + "list": [] + }, + "refresh": false, + "schemaVersion": 12, + "version": 0, + "links": [{ + "icon": "info", + "tags": [], + "targetBlank": true, + "title": "Grafana Docs", + "tooltip": "", + "type": "link", + "url": "http://www.grafana.org/docs" + }, { + "icon": "info", + "tags": [], + "targetBlank": true, + "title": "Prometheus Docs", + "type": "link", + "url": "http://prometheus.io/docs/introduction/overview/" + }], + "gnetId": 2, + "description": "The official, pre-built Prometheus Stats Dashboard." + } + grafana-net-737-dashboard.json: | + { + "__inputs": [{ + "name": "DS_PROMETHEUS", + "label": "prometheus", + "description": "", + "type": "datasource", + "pluginId": "prometheus", + "pluginName": "Prometheus" + }], + "__requires": [{ + "type": "panel", + "id": "singlestat", + "name": "Singlestat", + "version": "" + }, { + "type": "panel", + "id": "graph", + "name": "Graph", + "version": "" + }, { + "type": "grafana", + "id": "grafana", + "name": "Grafana", + "version": "3.1.0" + }, { + "type": "datasource", + "id": "prometheus", + "name": "Prometheus", + "version": "1.0.0" + }], + "id": null, + "title": "Kubernetes Pod Resources", + "description": "Shows resource usage of Kubernetes pods.", + "tags": [ + "kubernetes" + ], + "style": "dark", + "timezone": "browser", + "editable": true, + "hideControls": false, + "sharedCrosshair": false, + "rows": [{ + "collapse": false, + "editable": true, + "height": "250px", + "panels": [{ + "cacheTimeout": null, + "colorBackground": false, + "colorValue": true, + "colors": [ + "rgba(50, 172, 45, 0.97)", + "rgba(237, 129, 40, 0.89)", + "rgba(245, 54, 54, 0.9)" + ], + "datasource": "${DS_PROMETHEUS}", + "editable": true, + "error": false, + "format": "percent", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": true, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "height": "180px", + "id": 4, + "interval": null, + "isNew": true, + "links": [], + "mappingType": 1, + "mappingTypes": [{ + "name": "value to text", + "value": 1 + }, { + "name": "range to text", + "value": 2 + }], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [{ + "from": "null", + "text": "N/A", + "to": "null" + }], + "span": 4, + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "targets": [{ + "expr": "sum (container_memory_working_set_bytes{id=\"/\",instance=~\"^$instance$\"}) / sum (machine_memory_bytes{instance=~\"^$instance$\"}) * 100", + "interval": "", + "intervalFactor": 2, + "legendFormat": "", + "refId": "A", + "step": 2 + }], + "thresholds": "65, 90", + "timeFrom": "1m", + "timeShift": null, + "title": "Memory Working Set", + "transparent": false, + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [{ + "op": "=", + "text": "N/A", + "value": "null" + }], + "valueName": "current" + }, { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": true, + "colors": [ + "rgba(50, 172, 45, 0.97)", + "rgba(237, 129, 40, 0.89)", + "rgba(245, 54, 54, 0.9)" + ], + "datasource": "${DS_PROMETHEUS}", + "decimals": 2, + "editable": true, + "error": false, + "format": "percent", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": true, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "height": "180px", + "id": 6, + "interval": null, + "isNew": true, + "links": [], + "mappingType": 1, + "mappingTypes": [{ + "name": "value to text", + "value": 1 + }, { + "name": "range to text", + "value": 2 + }], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [{ + "from": "null", + "text": "N/A", + "to": "null" + }], + "span": 4, + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "targets": [{ + "expr": "sum(rate(container_cpu_usage_seconds_total{id=\"/\",instance=~\"^$instance$\"}[1m])) / sum (machine_cpu_cores{instance=~\"^$instance$\"}) * 100", + "interval": "10s", + "intervalFactor": 1, + "refId": "A", + "step": 10 + }], + "thresholds": "65, 90", + "timeFrom": "1m", + "timeShift": null, + "title": "Cpu Usage", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [{ + "op": "=", + "text": "N/A", + "value": "null" + }], + "valueName": "current" + }, { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": true, + "colors": [ + "rgba(50, 172, 45, 0.97)", + "rgba(237, 129, 40, 0.89)", + "rgba(245, 54, 54, 0.9)" + ], + "datasource": "${DS_PROMETHEUS}", + "decimals": 2, + "editable": true, + "error": false, + "format": "percent", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": true, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "height": "180px", + "id": 7, + "interval": null, + "isNew": true, + "links": [], + "mappingType": 1, + "mappingTypes": [{ + "name": "value to text", + "value": 1 + }, { + "name": "range to text", + "value": 2 + }], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [{ + "from": "null", + "text": "N/A", + "to": "null" + }], + "span": 4, + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "targets": [{ + "expr": "sum(container_fs_usage_bytes{id=\"/\",instance=~\"^$instance$\"}) / sum(container_fs_limit_bytes{id=\"/\",instance=~\"^$instance$\"}) * 100", + "interval": "10s", + "intervalFactor": 1, + "legendFormat": "", + "metric": "", + "refId": "A", + "step": 10 + }], + "thresholds": "65, 90", + "timeFrom": "1m", + "timeShift": null, + "title": "Filesystem Usage", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [{ + "op": "=", + "text": "N/A", + "value": "null" + }], + "valueName": "current" + }, { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": false, + "colors": [ + "rgba(50, 172, 45, 0.97)", + "rgba(237, 129, 40, 0.89)", + "rgba(245, 54, 54, 0.9)" + ], + "datasource": "${DS_PROMETHEUS}", + "decimals": 2, + "editable": true, + "error": false, + "format": "bytes", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "height": "1px", + "hideTimeOverride": true, + "id": 9, + "interval": null, + "isNew": true, + "links": [], + "mappingType": 1, + "mappingTypes": [{ + "name": "value to text", + "value": 1 + }, { + "name": "range to text", + "value": 2 + }], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "20%", + "prefix": "", + "prefixFontSize": "20%", + "rangeMaps": [{ + "from": "null", + "text": "N/A", + "to": "null" + }], + "span": 2, + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "targets": [{ + "expr": "sum(container_memory_working_set_bytes{id=\"/\",instance=~\"^$instance$\"})", + "interval": "10s", + "intervalFactor": 1, + "refId": "A", + "step": 10 + }], + "thresholds": "", + "timeFrom": "1m", + "title": "Used", + "type": "singlestat", + "valueFontSize": "50%", + "valueMaps": [{ + "op": "=", + "text": "N/A", + "value": "null" + }], + "valueName": "current" + }, { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": false, + "colors": [ + "rgba(50, 172, 45, 0.97)", + "rgba(237, 129, 40, 0.89)", + "rgba(245, 54, 54, 0.9)" + ], + "datasource": "${DS_PROMETHEUS}", + "decimals": 2, + "editable": true, + "error": false, + "format": "bytes", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "height": "1px", + "hideTimeOverride": true, + "id": 10, + "interval": null, + "isNew": true, + "links": [], + "mappingType": 1, + "mappingTypes": [{ + "name": "value to text", + "value": 1 + }, { + "name": "range to text", + "value": 2 + }], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [{ + "from": "null", + "text": "N/A", + "to": "null" + }], + "span": 2, + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "targets": [{ + "expr": "sum (machine_memory_bytes{instance=~\"^$instance$\"})", + "interval": "10s", + "intervalFactor": 1, + "refId": "A", + "step": 10 + }], + "thresholds": "", + "timeFrom": "1m", + "title": "Total", + "type": "singlestat", + "valueFontSize": "50%", + "valueMaps": [{ + "op": "=", + "text": "N/A", + "value": "null" + }], + "valueName": "current" + }, { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": false, + "colors": [ + "rgba(50, 172, 45, 0.97)", + "rgba(237, 129, 40, 0.89)", + "rgba(245, 54, 54, 0.9)" + ], + "datasource": "${DS_PROMETHEUS}", + "decimals": 2, + "editable": true, + "error": false, + "format": "none", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "height": "1px", + "hideTimeOverride": true, + "id": 11, + "interval": null, + "isNew": true, + "links": [], + "mappingType": 1, + "mappingTypes": [{ + "name": "value to text", + "value": 1 + }, { + "name": "range to text", + "value": 2 + }], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": " cores", + "postfixFontSize": "30%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [{ + "from": "null", + "text": "N/A", + "to": "null" + }], + "span": 2, + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "targets": [{ + "expr": "sum (rate (container_cpu_usage_seconds_total{id=\"/\",instance=~\"^$instance$\"}[1m]))", + "interval": "10s", + "intervalFactor": 1, + "refId": "A", + "step": 10 + }], + "thresholds": "", + "timeFrom": "1m", + "timeShift": null, + "title": "Used", + "type": "singlestat", + "valueFontSize": "50%", + "valueMaps": [{ + "op": "=", + "text": "N/A", + "value": "null" + }], + "valueName": "current" + }, { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": false, + "colors": [ + "rgba(50, 172, 45, 0.97)", + "rgba(237, 129, 40, 0.89)", + "rgba(245, 54, 54, 0.9)" + ], + "datasource": "${DS_PROMETHEUS}", + "decimals": 2, + "editable": true, + "error": false, + "format": "none", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "height": "1px", + "hideTimeOverride": true, + "id": 12, + "interval": null, + "isNew": true, + "links": [], + "mappingType": 1, + "mappingTypes": [{ + "name": "value to text", + "value": 1 + }, { + "name": "range to text", + "value": 2 + }], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": " cores", + "postfixFontSize": "30%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [{ + "from": "null", + "text": "N/A", + "to": "null" + }], + "span": 2, + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "targets": [{ + "expr": "sum (machine_cpu_cores{instance=~\"^$instance$\"})", + "interval": "10s", + "intervalFactor": 1, + "refId": "A", + "step": 10 + }], + "thresholds": "", + "timeFrom": "1m", + "title": "Total", + "type": "singlestat", + "valueFontSize": "50%", + "valueMaps": [{ + "op": "=", + "text": "N/A", + "value": "null" + }], + "valueName": "current" + }, { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": false, + "colors": [ + "rgba(50, 172, 45, 0.97)", + "rgba(237, 129, 40, 0.89)", + "rgba(245, 54, 54, 0.9)" + ], + "datasource": "${DS_PROMETHEUS}", + "decimals": 2, + "editable": true, + "error": false, + "format": "bytes", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "height": "1px", + "hideTimeOverride": true, + "id": 13, + "interval": null, + "isNew": true, + "links": [], + "mappingType": 1, + "mappingTypes": [{ + "name": "value to text", + "value": 1 + }, { + "name": "range to text", + "value": 2 + }], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [{ + "from": "null", + "text": "N/A", + "to": "null" + }], + "span": 2, + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "targets": [{ + "expr": "sum(container_fs_usage_bytes{id=\"/\",instance=~\"^$instance$\"})", + "interval": "10s", + "intervalFactor": 1, + "refId": "A", + "step": 10 + }], + "thresholds": "", + "timeFrom": "1m", + "title": "Used", + "type": "singlestat", + "valueFontSize": "50%", + "valueMaps": [{ + "op": "=", + "text": "N/A", + "value": "null" + }], + "valueName": "current" + }, { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": false, + "colors": [ + "rgba(50, 172, 45, 0.97)", + "rgba(237, 129, 40, 0.89)", + "rgba(245, 54, 54, 0.9)" + ], + "datasource": "${DS_PROMETHEUS}", + "decimals": 2, + "editable": true, + "error": false, + "format": "bytes", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "height": "1px", + "hideTimeOverride": true, + "id": 14, + "interval": null, + "isNew": true, + "links": [], + "mappingType": 1, + "mappingTypes": [{ + "name": "value to text", + "value": 1 + }, { + "name": "range to text", + "value": 2 + }], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [{ + "from": "null", + "text": "N/A", + "to": "null" + }], + "span": 2, + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "targets": [{ + "expr": "sum (container_fs_limit_bytes{id=\"/\",instance=~\"^$instance$\"})", + "interval": "10s", + "intervalFactor": 1, + "refId": "A", + "step": 10 + }], + "thresholds": "", + "timeFrom": "1m", + "title": "Total", + "type": "singlestat", + "valueFontSize": "50%", + "valueMaps": [{ + "op": "=", + "text": "N/A", + "value": "null" + }], + "valueName": "current" + }, { + "aliasColors": {}, + "bars": false, + "datasource": "${DS_PROMETHEUS}", + "decimals": 2, + "editable": true, + "error": false, + "fill": 1, + "grid": { + "threshold1": null, + "threshold1Color": "rgba(216, 200, 27, 0.27)", + "threshold2": null, + "threshold2Color": "rgba(234, 112, 112, 0.22)", + "thresholdLine": false + }, + "height": "200px", + "id": 32, + "isNew": true, + "legend": { + "alignAsTable": true, + "avg": true, + "current": true, + "max": false, + "min": false, + "rightSide": true, + "show": true, + "sideWidth": 200, + "sort": "current", + "sortDesc": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "span": 12, + "stack": false, + "steppedLine": false, + "targets": [{ + "expr": "sum(rate(container_network_receive_bytes_total{instance=~\"^$instance$\",namespace=~\"^$namespace$\"}[1m]))", + "interval": "", + "intervalFactor": 2, + "legendFormat": "receive", + "metric": "network", + "refId": "A", + "step": 240 + }, { + "expr": "- sum(rate(container_network_transmit_bytes_total{instance=~\"^$instance$\",namespace=~\"^$namespace$\"}[1m]))", + "interval": "", + "intervalFactor": 2, + "legendFormat": "transmit", + "metric": "network", + "refId": "B", + "step": 240 + }], + "timeFrom": null, + "timeShift": null, + "title": "Network", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "cumulative" + }, + "transparent": false, + "type": "graph", + "xaxis": { + "show": true + }, + "yaxes": [{ + "format": "Bps", + "label": "transmit / receive", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, { + "format": "Bps", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": false + }] + }], + "showTitle": true, + "title": "all pods" + }, { + "collapse": false, + "editable": true, + "height": "250px", + "panels": [{ + "aliasColors": {}, + "bars": false, + "datasource": "${DS_PROMETHEUS}", + "decimals": 3, + "editable": true, + "error": false, + "fill": 0, + "grid": { + "threshold1": null, + "threshold1Color": "rgba(216, 200, 27, 0.27)", + "threshold2": null, + "threshold2Color": "rgba(234, 112, 112, 0.22)" + }, + "height": "", + "id": 17, + "isNew": true, + "legend": { + "alignAsTable": true, + "avg": true, + "current": true, + "hideEmpty": true, + "hideZero": true, + "max": false, + "min": false, + "rightSide": true, + "show": true, + "sideWidth": null, + "sort": "current", + "sortDesc": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "span": 12, + "stack": false, + "steppedLine": false, + "targets": [{ + "expr": "sum(rate(container_cpu_usage_seconds_total{image!=\"\",name=~\"^k8s_.*\",instance=~\"^$instance$\",namespace=~\"^$namespace$\"}[1m])) by (pod_name)", + "interval": "", + "intervalFactor": 2, + "legendFormat": "{{ pod_name }}", + "metric": "container_cpu", + "refId": "A", + "step": 240 + }], + "timeFrom": null, + "timeShift": null, + "title": "Cpu Usage", + "tooltip": { + "msResolution": true, + "shared": false, + "sort": 2, + "value_type": "cumulative" + }, + "transparent": false, + "type": "graph", + "xaxis": { + "show": true + }, + "yaxes": [{ + "format": "none", + "label": "cores", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": false + }] + }, { + "aliasColors": {}, + "bars": false, + "datasource": "${DS_PROMETHEUS}", + "decimals": 2, + "editable": true, + "error": false, + "fill": 0, + "grid": { + "threshold1": null, + "threshold1Color": "rgba(216, 200, 27, 0.27)", + "threshold2": null, + "threshold2Color": "rgba(234, 112, 112, 0.22)" + }, + "id": 33, + "isNew": true, + "legend": { + "alignAsTable": true, + "avg": true, + "current": true, + "hideEmpty": true, + "hideZero": true, + "max": false, + "min": false, + "rightSide": true, + "show": true, + "sideWidth": null, + "sort": "current", + "sortDesc": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "span": 12, + "stack": false, + "steppedLine": false, + "targets": [{ + "expr": "sum (container_memory_working_set_bytes{image!=\"\",name=~\"^k8s_.*\",instance=~\"^$instance$\",namespace=~\"^$namespace$\"}) by (pod_name)", + "interval": "", + "intervalFactor": 2, + "legendFormat": "{{ pod_name }}", + "metric": "", + "refId": "A", + "step": 240 + }], + "timeFrom": null, + "timeShift": null, + "title": "Memory Working Set", + "tooltip": { + "msResolution": false, + "shared": false, + "sort": 2, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "show": true + }, + "yaxes": [{ + "format": "bytes", + "label": "used", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": false + }] + }, { + "aliasColors": {}, + "bars": false, + "datasource": "${DS_PROMETHEUS}", + "decimals": 2, + "editable": true, + "error": false, + "fill": 1, + "grid": { + "threshold1": null, + "threshold1Color": "rgba(216, 200, 27, 0.27)", + "threshold2": null, + "threshold2Color": "rgba(234, 112, 112, 0.22)" + }, + "id": 16, + "isNew": true, + "legend": { + "alignAsTable": true, + "avg": true, + "current": true, + "hideEmpty": true, + "hideZero": true, + "max": false, + "min": false, + "rightSide": true, + "show": true, + "sideWidth": 200, + "sort": "avg", + "sortDesc": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "span": 12, + "stack": false, + "steppedLine": false, + "targets": [{ + "expr": "sum (rate (container_network_receive_bytes_total{image!=\"\",name=~\"^k8s_.*\",instance=~\"^$instance$\",namespace=~\"^$namespace$\"}[1m])) by (pod_name)", + "interval": "", + "intervalFactor": 2, + "legendFormat": "{{ pod_name }} < in", + "metric": "network", + "refId": "A", + "step": 240 + }, { + "expr": "- sum (rate (container_network_transmit_bytes_total{image!=\"\",name=~\"^k8s_.*\",instance=~\"^$instance$\",namespace=~\"^$namespace$\"}[1m])) by (pod_name)", + "interval": "", + "intervalFactor": 2, + "legendFormat": "{{ pod_name }} > out", + "metric": "network", + "refId": "B", + "step": 240 + }], + "timeFrom": null, + "timeShift": null, + "title": "Network", + "tooltip": { + "msResolution": false, + "shared": false, + "sort": 2, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "show": true + }, + "yaxes": [{ + "format": "Bps", + "label": "transmit / receive", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": false + }] + }, { + "aliasColors": {}, + "bars": false, + "datasource": "${DS_PROMETHEUS}", + "decimals": 2, + "editable": true, + "error": false, + "fill": 1, + "grid": { + "threshold1": null, + "threshold1Color": "rgba(216, 200, 27, 0.27)", + "threshold2": null, + "threshold2Color": "rgba(234, 112, 112, 0.22)" + }, + "id": 34, + "isNew": true, + "legend": { + "alignAsTable": true, + "avg": true, + "current": true, + "hideEmpty": true, + "hideZero": true, + "max": false, + "min": false, + "rightSide": true, + "show": true, + "sideWidth": 200, + "sort": "current", + "sortDesc": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "span": 12, + "stack": false, + "steppedLine": false, + "targets": [{ + "expr": "sum(container_fs_usage_bytes{image!=\"\",name=~\"^k8s_.*\",instance=~\"^$instance$\",namespace=~\"^$namespace$\"}) by (pod_name)", + "interval": "", + "intervalFactor": 2, + "legendFormat": "{{ pod_name }}", + "metric": "network", + "refId": "A", + "step": 240 + }], + "timeFrom": null, + "timeShift": null, + "title": "Filesystem", + "tooltip": { + "msResolution": false, + "shared": false, + "sort": 2, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "show": true + }, + "yaxes": [{ + "format": "bytes", + "label": "used", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": false + }] + }], + "showTitle": true, + "title": "each pod" + }], + "time": { + "from": "now-3d", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "templating": { + "list": [{ + "allValue": ".*", + "current": {}, + "datasource": "${DS_PROMETHEUS}", + "hide": 0, + "includeAll": true, + "label": "Instance", + "multi": false, + "name": "instance", + "options": [], + "query": "label_values(instance)", + "refresh": 1, + "regex": "", + "type": "query" + }, { + "current": {}, + "datasource": "${DS_PROMETHEUS}", + "hide": 0, + "includeAll": true, + "label": "Namespace", + "multi": true, + "name": "namespace", + "options": [], + "query": "label_values(namespace)", + "refresh": 1, + "regex": "", + "type": "query" + }] + }, + "annotations": { + "list": [] + }, + "refresh": false, + "schemaVersion": 12, + "version": 8, + "links": [], + "gnetId": 737 + } + prometheus-datasource.json: | + { + "name": "prometheus", + "type": "prometheus", + "url": "http://prometheus:9090", + "access": "proxy", + "basicAuth": false + } +kind: ConfigMap +metadata: + creationTimestamp: null + name: grafana-import-dashboards + namespace: monitoring +--- +apiVersion: batch/v1 +kind: Job +metadata: + name: grafana-import-dashboards + namespace: monitoring + labels: + app: grafana + component: import-dashboards +spec: + template: + metadata: + name: grafana-import-dashboards + labels: + app: grafana + component: import-dashboards + annotations: + pod.beta.kubernetes.io/init-containers: '[ + { + "name": "wait-for-endpoints", + "image": "giantswarm/tiny-tools", + "imagePullPolicy": "IfNotPresent", + "command": ["fish", "-c", "echo \"waiting for endpoints...\"; while true; set endpoints (curl -s --cacert /var/run/secrets/kubernetes.io/serviceaccount/ca.crt --header \"Authorization: Bearer \"(cat /var/run/secrets/kubernetes.io/serviceaccount/token) https://kubernetes.default.svc/api/v1/namespaces/monitoring/endpoints/grafana); echo $endpoints | jq \".\"; if test (echo $endpoints | jq -r \".subsets[]?.addresses // [] | length\") -gt 0; exit 0; end; echo \"waiting...\";sleep 1; end"], + "args": ["monitoring", "grafana"] + } + ]' + spec: + serviceAccountName: prometheus-k8s + containers: + - name: grafana-import-dashboards + image: giantswarm/tiny-tools + command: ["/bin/sh", "-c"] + workingDir: /opt/grafana-import-dashboards + args: + - > + for file in *-datasource.json ; do + if [ -e "$file" ] ; then + echo "importing $file" && + curl --silent --fail --show-error \ + --request POST http://admin:admin@grafana:80/api/datasources \ + --header "Content-Type: application/json" \ + --data-binary "@$file" ; + echo "" ; + fi + done ; + for file in *-dashboard.json ; do + if [ -e "$file" ] ; then + echo "importing $file" && + ( echo '{"dashboard":'; \ + cat "$file"; \ + echo ',"overwrite":true,"inputs":[{"name":"DS_PROMETHEUS","type":"datasource","pluginId":"prometheus","value":"prometheus"}]}' ) \ + | jq -c '.' \ + | curl --silent --fail --show-error \ + --request POST http://admin:admin@grafana:80/api/dashboards/import \ + --header "Content-Type: application/json" \ + --data-binary "@-" ; + echo "" ; + fi + done + + volumeMounts: + - name: config-volume + mountPath: /opt/grafana-import-dashboards + restartPolicy: Never + volumes: + - name: config-volume + configMap: + name: grafana-import-dashboards +--- +# apiVersion: extensions/v1beta1 +# kind: Ingress +# metadata: +# name: grafana +# namespace: monitoring +# spec: +# rules: +# - host: ..k8s.gigantic.io +# http: +# paths: +# - path: / +# backend: +# serviceName: grafana +# servicePort: 3000 +--- +apiVersion: v1 kind: Service metadata: name: grafana namespace: monitoring labels: app: grafana + component: core spec: type: LoadBalancer ports: - - port: 80 - targetPort: http-server - # spec: - # type: NodePort - # ports: - # - port: 3000 + - port: 80 + targetPort: http-server selector: app: grafana + component: core +#spec: +# type: NodePort +# ports: +# - port: 3000 +# selector: +# app: grafana +# component: core +--- +apiVersion: v1 +data: + prometheus.yaml: | + global: + scrape_interval: 10s + scrape_timeout: 10s + evaluation_interval: 10s + rule_files: + - "/etc/prometheus-rules/*.rules" + scrape_configs: + + # https://github.com/prometheus/prometheus/blob/master/documentation/examples/prometheus-kubernetes.yml#L37 + - job_name: 'kubernetes-nodes' + tls_config: + ca_file: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt + bearer_token_file: /var/run/secrets/kubernetes.io/serviceaccount/token + kubernetes_sd_configs: + - role: node + relabel_configs: + - source_labels: [__address__] + regex: '(.*):10250' + replacement: '${1}:10255' + target_label: __address__ + + # https://github.com/prometheus/prometheus/blob/master/documentation/examples/prometheus-kubernetes.yml#L79 + - job_name: 'kubernetes-endpoints' + kubernetes_sd_configs: + - role: endpoints + relabel_configs: + - source_labels: [__meta_kubernetes_service_annotation_prometheus_io_scrape] + action: keep + regex: true + - source_labels: [__meta_kubernetes_service_annotation_prometheus_io_scheme] + action: replace + target_label: __scheme__ + regex: (https?) + - source_labels: [__meta_kubernetes_service_annotation_prometheus_io_path] + action: replace + target_label: __metrics_path__ + regex: (.+) + - source_labels: [__address__, __meta_kubernetes_service_annotation_prometheus_io_port] + action: replace + target_label: __address__ + regex: (.+)(?::\d+);(\d+) + replacement: $1:$2 + - action: labelmap + regex: __meta_kubernetes_service_label_(.+) + - source_labels: [__meta_kubernetes_namespace] + action: replace + target_label: kubernetes_namespace + - source_labels: [__meta_kubernetes_service_name] + action: replace + target_label: kubernetes_name + + # https://github.com/prometheus/prometheus/blob/master/documentation/examples/prometheus-kubernetes.yml#L119 + - job_name: 'kubernetes-services' + metrics_path: /probe + params: + module: [http_2xx] + kubernetes_sd_configs: + - role: service + relabel_configs: + - source_labels: [__meta_kubernetes_service_annotation_prometheus_io_probe] + action: keep + regex: true + - source_labels: [__address__] + target_label: __param_target + - target_label: __address__ + replacement: blackbox + - source_labels: [__param_target] + target_label: instance + - action: labelmap + regex: __meta_kubernetes_service_label_(.+) + - source_labels: [__meta_kubernetes_namespace] + target_label: kubernetes_namespace + - source_labels: [__meta_kubernetes_service_name] + target_label: kubernetes_name + + # https://github.com/prometheus/prometheus/blob/master/documentation/examples/prometheus-kubernetes.yml#L156 + - job_name: 'kubernetes-pods' + kubernetes_sd_configs: + - role: pod + relabel_configs: + - source_labels: [__meta_kubernetes_pod_annotation_prometheus_io_scrape] + action: keep + regex: true + - source_labels: [__meta_kubernetes_pod_annotation_prometheus_io_path] + action: replace + target_label: __metrics_path__ + regex: (.+) + - source_labels: [__address__, __meta_kubernetes_pod_annotation_prometheus_io_port] + action: replace + regex: (.+):(?:\d+);(\d+) + replacement: ${1}:${2} + target_label: __address__ + - action: labelmap + regex: __meta_kubernetes_pod_label_(.+) + - source_labels: [__meta_kubernetes_namespace] + action: replace + target_label: kubernetes_namespace + - source_labels: [__meta_kubernetes_pod_name] + action: replace + target_label: kubernetes_pod_name + - source_labels: [__meta_kubernetes_pod_container_port_number] + action: keep + regex: 9\d{3} +kind: ConfigMap +metadata: + creationTimestamp: null + name: prometheus-core + namespace: monitoring +--- +apiVersion: extensions/v1beta1 +kind: Deployment +metadata: + name: prometheus-core + namespace: monitoring + labels: + app: prometheus + component: core +spec: + replicas: 1 + template: + metadata: + name: prometheus-main + labels: + app: prometheus + component: core + spec: + serviceAccountName: prometheus-k8s + containers: + - name: prometheus + image: prom/prometheus:v1.7.0 + args: + - '-storage.local.retention=12h' + - '-storage.local.memory-chunks=500000' + - '-config.file=/etc/prometheus/prometheus.yaml' + - '-alertmanager.url=http://alertmanager:9093/' + ports: + - name: webui + containerPort: 9090 + resources: + requests: + cpu: 500m + memory: 500M + limits: + cpu: 500m + memory: 500M + volumeMounts: + - name: config-volume + mountPath: /etc/prometheus + - name: rules-volume + mountPath: /etc/prometheus-rules + volumes: + - name: config-volume + configMap: + name: prometheus-core + - name: rules-volume + configMap: + name: prometheus-rules +--- +apiVersion: extensions/v1beta1 +kind: Deployment +metadata: + name: kube-state-metrics + namespace: monitoring +spec: + replicas: 2 + template: + metadata: + labels: + app: kube-state-metrics + spec: + serviceAccountName: kube-state-metrics + containers: + - name: kube-state-metrics + image: gcr.io/google_containers/kube-state-metrics:v0.5.0 + ports: + - containerPort: 8080 +--- +# --- +# apiVersion: rbac.authorization.k8s.io/v1beta1 +# kind: ClusterRoleBinding +# metadata: +# name: kube-state-metrics +# roleRef: +# apiGroup: rbac.authorization.k8s.io +# kind: ClusterRole +# name: kube-state-metrics +# subjects: +# - kind: ServiceAccount +# name: kube-state-metrics +# namespace: monitoring +# --- +# apiVersion: rbac.authorization.k8s.io/v1beta1 +# kind: ClusterRole +# metadata: +# name: kube-state-metrics +# rules: +# - apiGroups: [""] +# resources: +# - nodes +# - pods +# - services +# - resourcequotas +# - replicationcontrollers +# - limitranges +# verbs: ["list", "watch"] +# - apiGroups: ["extensions"] +# resources: +# - daemonsets +# - deployments +# - replicasets +# verbs: ["list", "watch"] +# --- +apiVersion: v1 +kind: ServiceAccount +metadata: + name: kube-state-metrics + namespace: monitoring +--- +apiVersion: v1 +kind: Service +metadata: + annotations: + prometheus.io/scrape: 'true' + name: kube-state-metrics + namespace: monitoring + labels: + app: kube-state-metrics +spec: + ports: + - name: kube-state-metrics + port: 8080 + protocol: TCP + selector: + app: kube-state-metrics + +--- +apiVersion: extensions/v1beta1 +kind: DaemonSet +metadata: + name: node-directory-size-metrics + namespace: monitoring + annotations: + description: | + This `DaemonSet` provides metrics in Prometheus format about disk usage on the nodes. + The container `read-du` reads in sizes of all directories below /mnt and writes that to `/tmp/metrics`. It only reports directories larger then `100M` for now. + The other container `caddy` just hands out the contents of that file on request via `http` on `/metrics` at port `9102` which are the defaults for Prometheus. + These are scheduled on every node in the Kubernetes cluster. + To choose directories from the node to check, just mount them on the `read-du` container below `/mnt`. +spec: + template: + metadata: + labels: + app: node-directory-size-metrics + annotations: + prometheus.io/scrape: 'true' + prometheus.io/port: '9102' + description: | + This `Pod` provides metrics in Prometheus format about disk usage on the node. + The container `read-du` reads in sizes of all directories below /mnt and writes that to `/tmp/metrics`. It only reports directories larger then `100M` for now. + The other container `caddy` just hands out the contents of that file on request on `/metrics` at port `9102` which are the defaults for Prometheus. + This `Pod` is scheduled on every node in the Kubernetes cluster. + To choose directories from the node to check just mount them on `read-du` below `/mnt`. + spec: + containers: + - name: read-du + image: giantswarm/tiny-tools + imagePullPolicy: Always + # FIXME threshold via env var + # The + command: + - fish + - --command + - | + touch /tmp/metrics-temp + while true + for directory in (du --bytes --separate-dirs --threshold=100M /mnt) + echo $directory | read size path + echo "node_directory_size_bytes{path=\"$path\"} $size" \ + >> /tmp/metrics-temp + end + mv /tmp/metrics-temp /tmp/metrics + sleep 300 + end + volumeMounts: + - name: host-fs-var + mountPath: /mnt/var + readOnly: true + - name: metrics + mountPath: /tmp + - name: caddy + image: dockermuenster/caddy:0.9.3 + command: + - "caddy" + - "-port=9102" + - "-root=/var/www" + ports: + - containerPort: 9102 + volumeMounts: + - name: metrics + mountPath: /var/www + volumes: + - name: host-fs-var + hostPath: + path: /var + - name: metrics + emptyDir: + medium: Memory +--- +apiVersion: extensions/v1beta1 +kind: DaemonSet +metadata: + name: prometheus-node-exporter + namespace: monitoring + labels: + app: prometheus + component: node-exporter +spec: + template: + metadata: + name: prometheus-node-exporter + labels: + app: prometheus + component: node-exporter + spec: + containers: + - image: prom/node-exporter:v0.14.0 + name: prometheus-node-exporter + ports: + - name: prom-node-exp + #^ must be an IANA_SVC_NAME (at most 15 characters, ..) + containerPort: 9100 + hostPort: 9100 + hostNetwork: true + hostPID: true +--- +apiVersion: v1 +kind: Service +metadata: + annotations: + prometheus.io/scrape: 'true' + name: prometheus-node-exporter + namespace: monitoring + labels: + app: prometheus + component: node-exporter +spec: + clusterIP: None + ports: + - name: prometheus-node-exporter + port: 9100 + protocol: TCP + selector: + app: prometheus + component: node-exporter + type: ClusterIP +--- +apiVersion: v1 +data: + cpu-usage.rules: | + ALERT NodeCPUUsage + IF (100 - (avg by (instance) (irate(node_cpu{name="node-exporter",mode="idle"}[5m])) * 100)) > 75 + FOR 2m + LABELS { + severity="page" + } + ANNOTATIONS { + SUMMARY = "{{$labels.instance}}: High CPU usage detected", + DESCRIPTION = "{{$labels.instance}}: CPU usage is above 75% (current value is: {{ $value }})" + } + instance-availability.rules: | + ALERT InstanceDown + IF up == 0 + FOR 1m + LABELS { severity = "page" } + ANNOTATIONS { + summary = "Instance {{ $labels.instance }} down", + description = "{{ $labels.instance }} of job {{ $labels.job }} has been down for more than 1 minute.", + } + low-disk-space.rules: | + ALERT NodeLowRootDisk + IF ((node_filesystem_size{mountpoint="/root-disk"} - node_filesystem_free{mountpoint="/root-disk"} ) / node_filesystem_size{mountpoint="/root-disk"} * 100) > 75 + FOR 2m + LABELS { + severity="page" + } + ANNOTATIONS { + SUMMARY = "{{$labels.instance}}: Low root disk space", + DESCRIPTION = "{{$labels.instance}}: Root disk usage is above 75% (current value is: {{ $value }})" + } + + ALERT NodeLowDataDisk + IF ((node_filesystem_size{mountpoint="/data-disk"} - node_filesystem_free{mountpoint="/data-disk"} ) / node_filesystem_size{mountpoint="/data-disk"} * 100) > 75 + FOR 2m + LABELS { + severity="page" + } + ANNOTATIONS { + SUMMARY = "{{$labels.instance}}: Low data disk space", + DESCRIPTION = "{{$labels.instance}}: Data disk usage is above 75% (current value is: {{ $value }})" + } + mem-usage.rules: | + ALERT NodeSwapUsage + IF (((node_memory_SwapTotal-node_memory_SwapFree)/node_memory_SwapTotal)*100) > 75 + FOR 2m + LABELS { + severity="page" + } + ANNOTATIONS { + SUMMARY = "{{$labels.instance}}: Swap usage detected", + DESCRIPTION = "{{$labels.instance}}: Swap usage usage is above 75% (current value is: {{ $value }})" + } + + ALERT NodeMemoryUsage + IF (((node_memory_MemTotal-node_memory_MemFree-node_memory_Cached)/(node_memory_MemTotal)*100)) > 75 + FOR 2m + LABELS { + severity="page" + } + ANNOTATIONS { + SUMMARY = "{{$labels.instance}}: High memory usage detected", + DESCRIPTION = "{{$labels.instance}}: Memory usage is above 75% (current value is: {{ $value }})" + } +kind: ConfigMap +metadata: + creationTimestamp: null + name: prometheus-rules + namespace: monitoring +--- +--- +apiVersion: rbac.authorization.k8s.io/v1beta1 +kind: ClusterRoleBinding +metadata: + name: prometheus +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: ClusterRole + name: prometheus +subjects: +- kind: ServiceAccount + name: prometheus-k8s + namespace: monitoring +--- +apiVersion: rbac.authorization.k8s.io/v1beta1 +kind: ClusterRole +metadata: + name: prometheus +rules: +- apiGroups: [""] + resources: + - nodes + - services + - endpoints + - pods + verbs: ["get", "list", "watch"] +- apiGroups: [""] + resources: + - configmaps + verbs: ["get"] +- nonResourceURLs: ["/metrics"] + verbs: ["get"] +--- +apiVersion: v1 +kind: ServiceAccount +metadata: + name: prometheus-k8s + namespace: monitoring +--- +apiVersion: v1 +kind: Service +metadata: + name: prometheus + namespace: monitoring + labels: + app: prometheus + component: core + annotations: + prometheus.io/scrape: 'true' +spec: + type: NodePort + ports: + - port: 9090 + protocol: TCP + name: webui + selector: + app: prometheus + component: core From 32f78632fec5a6d2987d3f815993d5ead94cda9a Mon Sep 17 00:00:00 2001 From: havard Date: Tue, 28 Aug 2018 13:19:02 +0530 Subject: [PATCH 03/78] refactor(monitoring): move documentation from deploy README.md to ownfile MONITORING.md --- prime/infra/MONITORING.md | 100 ++++++++++++++++++++++++++++++++ prime/infra/README.md | 22 ------- prime/infra/dev/monitoring.yaml | 1 - 3 files changed, 100 insertions(+), 23 deletions(-) create mode 100644 prime/infra/MONITORING.md diff --git a/prime/infra/MONITORING.md b/prime/infra/MONITORING.md new file mode 100644 index 000000000..b6c422464 --- /dev/null +++ b/prime/infra/MONITORING.md @@ -0,0 +1,100 @@ +# Prometheus + +## Setup + +Anything that wants to be scraped by prometheus requires at least one annotation, see the [promotheus section](#prometheus) + +## Access + +### Grafana + +Through external ip: + +```bash +kubectl get services --namespace=monitoring | grep grafana | awk '{print $4}' +``` + +### Prometheus + +Through port forwarding: + +```bash +kubectl port-forward --namespace=monitoring $(kubectl get pods --namespace=monitoring | grep prometheus-core | awk '{print $1}') 9090 +``` + +## Discovery + +Prometheus is configured to do service discovery + +## Scrape + +Prometheus is configured to scrape metrics over https, thus the `ConfigMap` for `prometheus-core` requires path to ca_file from kubernetes secrets + +## [Pushgateway](https://github.com/prometheus/pushgateway) + +Used jobs that might not live long enough to be scraped by prometheus. + +Example usage: +```bash +# Push a metric to pushgateway:8080 (specified in the service declaration for pushgateway) +kubectl run curl-it --image=radial/busyboxplus:curl -i --tty --rm +echo "some_metric 4.71" | curl -v --data-binary @- http://pushgateway:8080/metrics/job/some_job +``` + +## [Monitoring.yaml](dev/monitoring.yaml) + +### Namespace +Defines monitoring namespace + +### [Alert Manager](https://prometheus.io/docs/alerting/alertmanager/) + +__`TODO: Add email config and / or slack api url for alerts to work`__ + +Defines alerts that can be sent by email or to slack. + +Contains two config maps, one defining the alert template and another to configure the alertmanager itself. +There is also a kubernetes Deployment and Service configuration. + +### [Grafana](https://grafana.com/) + +__`TODO: Discuss how grafana should be exposed. With LoadBalancer and / or through existing ingress using auth0 to authenticate users before they can access the dashboard.`__ +__`TODO: Figure out a better way / automatic way to backup dashboards and automatically import them on redeploy`__ + +Contains a Deployment and Service configuration and a ConfigMap with predefined dashboards. + +Grafana is exposed using a LoadBalancer. + +### [](#prometheus)[Prometheus](https://prometheus.io/) + +__`TODO: scraping happens over https thus requires a ca_file, figure out if this is automatically handled or if we need to add a ca file to kubernetes secrets`__ + +Contains a Deployment configuration and a ConfigMap. The ConfigMap defines how and what prometheus scrapes etc. + +There are more configurations at the end of the file, containing ConfigMap for prometheus rules, ClusterRoleBinding, ClusterRole, ServiceAccount and finally the prometheus Service itself. + +The following roles are being scraped: +nodes, endpoints, services, pods + +Given that they set the required annotations which tells prometheus that they should be scaped, see below: + +```yaml +metadata: + annotations: + prometheus.io/scrape: 'true' # REQUIRED: has to be set for prometheus to scrape + prometheus.io/port: '9102' # OPTIONAL: defaults to '9102' + prometheus.io/path: '' # OPTIONAL: defaults to '/metrics' + prometheus.io/scheme: '' # OPTIONAL: http or https defaults to 'https' +``` + +### [Kube State Metrics](https://github.com/kubernetes/kube-state-metrics) + +See the above link for documentation. + +### Extra Prometheus Metrics + +Some DaemonSets that define different prometheus metrics, not sure if this is a general config or if its connected to any of the other configurations. + + + + + diff --git a/prime/infra/README.md b/prime/infra/README.md index bf35840b7..c2aae1578 100644 --- a/prime/infra/README.md +++ b/prime/infra/README.md @@ -244,31 +244,9 @@ Based on https://github.com/giantswarm/kubernetes-prometheus ```bash kubectl apply -f prime/infra/dev/monitoring.yaml - -# kubectl apply -f prime/infra/dev/monitoring-pushgateway.yaml ``` -#### Prometheus dashboard -```bash -kubectl port-forward --namespace=monitoring $(kubectl get pods --namespace=monitoring | grep prometheus-core | awk '{print $1}') 9090 -``` - -#### Grafana dashboard -__`Has own its own load balancer and can be accessed directly. Discuss if this is OK or find and implement a different way of accessing the grafana dashboard.`__ - -Can be accessed directly from external ip -```bash -kubectl get services --namespace=monitoring | grep grafana | awk '{print $4}' -``` - -#### Push gateway -```bash -# Push a metric to pushgateway:8080 (specified in the service declaration for pushgateway) -kubectl run curl-it --image=radial/busyboxplus:curl -i --tty --rm -echo "some_metric 4.71" | curl -v --data-binary @- http://pushgateway:8080/metrics/job/some_job -``` - ### Setup Neo4j ```bash diff --git a/prime/infra/dev/monitoring.yaml b/prime/infra/dev/monitoring.yaml index 1aef71c0b..cbcf2a979 100644 --- a/prime/infra/dev/monitoring.yaml +++ b/prime/infra/dev/monitoring.yaml @@ -2791,7 +2791,6 @@ metadata: name: prometheus-rules namespace: monitoring --- ---- apiVersion: rbac.authorization.k8s.io/v1beta1 kind: ClusterRoleBinding metadata: From e4de44987bd522ec1fe2fb8eee4aa338675380ac Mon Sep 17 00:00:00 2001 From: havard Date: Tue, 28 Aug 2018 13:21:05 +0530 Subject: [PATCH 04/78] chore(monitoring): add reference to origin of monitoring.yaml --- prime/infra/MONITORING.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/prime/infra/MONITORING.md b/prime/infra/MONITORING.md index b6c422464..fd6e59598 100644 --- a/prime/infra/MONITORING.md +++ b/prime/infra/MONITORING.md @@ -43,6 +43,8 @@ echo "some_metric 4.71" | curl -v --data-binary @- http://pushgateway:8080/metr ## [Monitoring.yaml](dev/monitoring.yaml) +Is completely based on manifests-all.yaml from this (github repo)[https://github.com/giantswarm/kubernetes-prometheus] + ### Namespace Defines monitoring namespace From d49c9236cc612b87b61beb2cb8b8442496702e66 Mon Sep 17 00:00:00 2001 From: Martin Cederlof Date: Thu, 30 Aug 2018 12:32:00 +0200 Subject: [PATCH 05/78] WIP refactor OcsEvent --- .../prime/analytics/DataConsumptionInfo.kt | 11 +++- .../prime/disruptor/EventProducerImpl.kt | 17 ++---- .../org/ostelco/prime/disruptor/OcsEvent.kt | 37 +++---------- .../org/ostelco/prime/ocs/EventHandlerImpl.kt | 54 ++++++++++--------- .../prime/disruptor/PrimeEventProducerTest.kt | 12 +++-- 5 files changed, 56 insertions(+), 75 deletions(-) diff --git a/ocs/src/main/kotlin/org/ostelco/prime/analytics/DataConsumptionInfo.kt b/ocs/src/main/kotlin/org/ostelco/prime/analytics/DataConsumptionInfo.kt index 09f3d6366..07520e980 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/analytics/DataConsumptionInfo.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/analytics/DataConsumptionInfo.kt @@ -2,8 +2,9 @@ package org.ostelco.prime.analytics import com.lmax.disruptor.EventHandler import org.ostelco.prime.analytics.PrimeMetric.MEGABYTES_CONSUMED -import org.ostelco.prime.disruptor.EventMessageType.CREDIT_CONTROL_REQUEST +import org.ostelco.ocs.api.CreditControlRequestType import org.ostelco.prime.disruptor.OcsEvent +import org.ostelco.prime.disruptor.EventMessageType.CREDIT_CONTROL_REQUEST import org.ostelco.prime.logger import org.ostelco.prime.module.getResource @@ -34,6 +35,14 @@ class DataConsumptionInfo() : EventHandler { analyticsReporter.reportMetric( primeMetric = MEGABYTES_CONSUMED, value = event.usedBucketBytes / 1_000_000) + + event.request?.let { + if(it.type == CreditControlRequestType.INITIAL_REQUEST) { + logger.info("MSISDN : {} connected", it.msisdn) + } else if (it.type == CreditControlRequestType.TERMINATION_REQUEST) { + logger.info("MSISDN : {} disconnected", it.msisdn) + } + } } } } diff --git a/ocs/src/main/kotlin/org/ostelco/prime/disruptor/EventProducerImpl.kt b/ocs/src/main/kotlin/org/ostelco/prime/disruptor/EventProducerImpl.kt index 47edee8e0..67c84f5a1 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/disruptor/EventProducerImpl.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/disruptor/EventProducerImpl.kt @@ -49,11 +49,8 @@ class EventProducerImpl(private val ringBuffer: RingBuffer) : EventPro requestedBytes: Long = 0, usedBytes: Long = 0, reservedBytes: Long = 0, - serviceId: Long = 0, - ratingGroup: Long = 0, - reportingReason: ReportingReason = ReportingReason.UNRECOGNIZED, streamId: String? = null, - requestId: String? = null) { + request: CreditControlRequestInfo? = null) { processNextEventOnTheRingBuffer( Consumer { event -> @@ -65,11 +62,8 @@ class EventProducerImpl(private val ringBuffer: RingBuffer) : EventPro requestedBytes, usedBytes, reservedBytes, - serviceId, - ratingGroup, - reportingReason, streamId, - requestId) + request) }) } @@ -101,7 +95,7 @@ class EventProducerImpl(private val ringBuffer: RingBuffer) : EventPro injectIntoRingBuffer(CREDIT_CONTROL_REQUEST, request.msisdn, streamId = streamId, - requestId = request.requestId) + request = request) } else { // FIXME vihang: For now we assume that there is only 1 MSCC in the Request. injectIntoRingBuffer(CREDIT_CONTROL_REQUEST, @@ -109,11 +103,8 @@ class EventProducerImpl(private val ringBuffer: RingBuffer) : EventPro requestedBytes = request.getMscc(0).requested.totalOctets, usedBytes = request.getMscc(0).used.totalOctets, reservedBytes = 0, - serviceId = request.getMscc(0).serviceIdentifier, - ratingGroup = request.getMscc(0).ratingGroup, - reportingReason = request.getMscc(0).reportingReason, streamId = streamId, - requestId = request.requestId) + request = request) } } diff --git a/ocs/src/main/kotlin/org/ostelco/prime/disruptor/OcsEvent.kt b/ocs/src/main/kotlin/org/ostelco/prime/disruptor/OcsEvent.kt index 815a76669..09b31cfcf 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/disruptor/OcsEvent.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/disruptor/OcsEvent.kt @@ -1,5 +1,6 @@ package org.ostelco.prime.disruptor +import org.ostelco.ocs.api.CreditControlRequestInfo import org.ostelco.ocs.api.ReportingReason class OcsEvent { @@ -55,26 +56,9 @@ class OcsEvent { var ocsgwStreamId: String? = null /** - * Request ID used by OCS gateway to correlate response with requests + * Credit-Control-Request from OCS */ - var ocsgwRequestId: String? = null - - - /** - * Service-Identifier is used to classify traffic - */ - var serviceIdentifier: Long = 0 - - /** - * Rating-Group is used to classify traffic - */ - var ratingGroup: Long = 0 - - /** - * Reporting-Reason - * // FIXME martin: This is the Reporting-Reason for the MSCC. The PrimeEvent might be to generic since there is also Reporting-Reason used on ServiceUnit level - */ - var reportingReason: ReportingReason = ReportingReason.UNRECOGNIZED + var request: CreditControlRequestInfo? = null; fun clear() { messageType = null @@ -91,10 +75,7 @@ class OcsEvent { bundleBytes = 0 ocsgwStreamId = null - ocsgwRequestId = null - serviceIdentifier = 0 - ratingGroup = 0 - reportingReason = ReportingReason.UNRECOGNIZED + request = null } //FIXME vihang: We need to think about roaming!!! @@ -108,11 +89,8 @@ class OcsEvent { requestedBytes: Long, usedBytes: Long, reservedBucketBytes: Long, - serviceIdentifier: Long, - ratingGroup: Long, - reportingReason: ReportingReason, ocsgwStreamId: String?, - ocsgwRequestId: String?) { + request: CreditControlRequestInfo?) { this.messageType = messageType this.msisdn = msisdn this.bundleId = bundleId @@ -121,10 +99,7 @@ class OcsEvent { this.requestedBucketBytes = requestedBytes this.usedBucketBytes = usedBytes this.reservedBucketBytes = reservedBucketBytes - this.serviceIdentifier = serviceIdentifier - this.ratingGroup = ratingGroup - this.reportingReason = reportingReason this.ocsgwStreamId = ocsgwStreamId - this.ocsgwRequestId = ocsgwRequestId + this.request = request } } diff --git a/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt b/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt index 2402c9727..e87953fe0 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt @@ -56,8 +56,7 @@ internal class EventHandlerImpl(private val ocsService: OcsService) : EventHandl logger.info("reserved bytes: {}", event.reservedBucketBytes) logger.info("used bytes: {}", event.usedBucketBytes) logger.info("bundle bytes: {}", event.bundleBytes) - logger.info("Reporting reason: {}", event.reportingReason) - logger.info("request id: {} ",event.ocsgwRequestId) + logger.info("request id: {} ",event.request?.requestId) } private fun handleCreditControlRequest(event: OcsEvent) { @@ -70,33 +69,36 @@ internal class EventHandlerImpl(private val ocsService: OcsService) : EventHandl try { val creditControlAnswer = CreditControlAnswerInfo.newBuilder() .setMsisdn(event.msisdn) - .setRequestId(event.ocsgwRequestId) - - // This is a hack to know when we have received an MSCC in the request or not. - // For Terminate request we might not have any MSCC and therefore no serviceIdentifier. - if (event.serviceIdentifier > 0) { - val msccBuilder = MultipleServiceCreditControl.newBuilder() - msccBuilder.setServiceIdentifier(event.serviceIdentifier) - .setRatingGroup(event.ratingGroup) - .setValidityTime(86400) - - if ((event.reportingReason != ReportingReason.FINAL) && (event.requestedBucketBytes > 0)) { - msccBuilder.granted = ServiceUnit.newBuilder() - .setTotalOctets(event.reservedBucketBytes) - .build() - if (event.reservedBucketBytes < event.requestedBucketBytes) { - msccBuilder.finalUnitIndication = FinalUnitIndication.newBuilder() - .setFinalUnitAction(FinalUnitAction.TERMINATE) - .setIsSet(true) + + event.request?.let { + // This is a hack to know when we have received an MSCC in the request or not. + // For Terminate request we might not have any MSCC and therefore no serviceIdentifier. + if (it.getMscc(0).serviceIdentifier > 0) { + val msccBuilder = MultipleServiceCreditControl.newBuilder() + msccBuilder.setServiceIdentifier(it.getMscc(0).serviceIdentifier) + .setRatingGroup(it.getMscc(0).ratingGroup) + .setValidityTime(86400) + + if ((it.getMscc(0).reportingReason != ReportingReason.FINAL) && (event.requestedBucketBytes > 0)) { + msccBuilder.granted = ServiceUnit.newBuilder() + .setTotalOctets(event.reservedBucketBytes) + .build() + if (event.reservedBucketBytes < event.requestedBucketBytes) { + msccBuilder.finalUnitIndication = FinalUnitIndication.newBuilder() + .setFinalUnitAction(FinalUnitAction.TERMINATE) + .setIsSet(true) + .build() + } + } else { + // Use -1 to indicate no granted service unit should be included in the answer + msccBuilder.granted = ServiceUnit.newBuilder() + .setTotalOctets(-1) .build() } - } else { - // Use -1 to indicate no granted service unit should be included in the answer - msccBuilder.granted = ServiceUnit.newBuilder() - .setTotalOctets(-1) - .build() + creditControlAnswer.addMscc(msccBuilder.build()) } - creditControlAnswer.addMscc(msccBuilder.build()) + + creditControlAnswer.setRequestId(it.requestId) } val streamId = event.ocsgwStreamId diff --git a/ocs/src/test/kotlin/org/ostelco/prime/disruptor/PrimeEventProducerTest.kt b/ocs/src/test/kotlin/org/ostelco/prime/disruptor/PrimeEventProducerTest.kt index e410bae73..88a00dce6 100644 --- a/ocs/src/test/kotlin/org/ostelco/prime/disruptor/PrimeEventProducerTest.kt +++ b/ocs/src/test/kotlin/org/ostelco/prime/disruptor/PrimeEventProducerTest.kt @@ -89,8 +89,8 @@ class PrimeEventProducerTest { .setTotalOctets(REQUESTED_BYTES) .build()) .setUsed(ServiceUnit.newBuilder().setTotalOctets(USED_BYTES).build()) - .setRatingGroup(10) - .setServiceIdentifier(1) + .setRatingGroup(RATING_GROUP) + .setServiceIdentifier(SERVICE_IDENTIFIER) .build() ).build() @@ -100,8 +100,8 @@ class PrimeEventProducerTest { assertEquals(MSISDN, event.msisdn) assertEquals(REQUESTED_BYTES, event.requestedBucketBytes) assertEquals(USED_BYTES, event.usedBucketBytes) - assertEquals(10, event.ratingGroup) - assertEquals(1, event.serviceIdentifier) + assertEquals(RATING_GROUP, event.request?.getMscc(0)?.ratingGroup) + assertEquals(SERVICE_IDENTIFIER, event.request?.getMscc(0)?.serviceIdentifier) assertEquals(STREAM_ID, event.ocsgwStreamId) assertEquals(CREDIT_CONTROL_REQUEST, event.messageType) } @@ -123,6 +123,10 @@ class PrimeEventProducerTest { private const val RING_BUFFER_SIZE = 256 private const val TIMEOUT = 10 + + private const val RATING_GROUP = 10L; + + private const val SERVICE_IDENTIFIER = 1L; } } From 9a1d9c7f762879a43fe42e127e38718a9e2feb74 Mon Sep 17 00:00:00 2001 From: Martin Cederlof Date: Thu, 30 Aug 2018 12:57:23 +0200 Subject: [PATCH 06/78] Refactor usedUnits --- .../org/ostelco/prime/analytics/DataConsumptionInfo.kt | 4 ++-- .../org/ostelco/prime/disruptor/EventProducerImpl.kt | 3 --- .../main/kotlin/org/ostelco/prime/disruptor/OcsEvent.kt | 8 -------- .../main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt | 2 +- ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsState.kt | 2 +- .../org/ostelco/prime/disruptor/PrimeEventProducerTest.kt | 2 +- 6 files changed, 5 insertions(+), 16 deletions(-) diff --git a/ocs/src/main/kotlin/org/ostelco/prime/analytics/DataConsumptionInfo.kt b/ocs/src/main/kotlin/org/ostelco/prime/analytics/DataConsumptionInfo.kt index 07520e980..1523f25e1 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/analytics/DataConsumptionInfo.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/analytics/DataConsumptionInfo.kt @@ -30,11 +30,11 @@ class DataConsumptionInfo() : EventHandler { logger.info("Sent DataConsumptionInfo event to analytics") analyticsReporter.reportTrafficInfo( msisdn = event.msisdn!!, - usedBytes = event.usedBucketBytes, + usedBytes = event.request?.getMscc(0)?.used?.totalOctets ?: 0L, bundleBytes = event.bundleBytes) analyticsReporter.reportMetric( primeMetric = MEGABYTES_CONSUMED, - value = event.usedBucketBytes / 1_000_000) + value = (event.request?.getMscc(0)?.used?.totalOctets ?: 0L) / 1_000_000) event.request?.let { if(it.type == CreditControlRequestType.INITIAL_REQUEST) { diff --git a/ocs/src/main/kotlin/org/ostelco/prime/disruptor/EventProducerImpl.kt b/ocs/src/main/kotlin/org/ostelco/prime/disruptor/EventProducerImpl.kt index 67c84f5a1..7f381a627 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/disruptor/EventProducerImpl.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/disruptor/EventProducerImpl.kt @@ -47,7 +47,6 @@ class EventProducerImpl(private val ringBuffer: RingBuffer) : EventPro bundleId: String? = null, bundleBytes: Long = 0, requestedBytes: Long = 0, - usedBytes: Long = 0, reservedBytes: Long = 0, streamId: String? = null, request: CreditControlRequestInfo? = null) { @@ -60,7 +59,6 @@ class EventProducerImpl(private val ringBuffer: RingBuffer) : EventPro emptyList(), bundleBytes, requestedBytes, - usedBytes, reservedBytes, streamId, request) @@ -101,7 +99,6 @@ class EventProducerImpl(private val ringBuffer: RingBuffer) : EventPro injectIntoRingBuffer(CREDIT_CONTROL_REQUEST, msisdn = request.msisdn, requestedBytes = request.getMscc(0).requested.totalOctets, - usedBytes = request.getMscc(0).used.totalOctets, reservedBytes = 0, streamId = streamId, request = request) diff --git a/ocs/src/main/kotlin/org/ostelco/prime/disruptor/OcsEvent.kt b/ocs/src/main/kotlin/org/ostelco/prime/disruptor/OcsEvent.kt index 09b31cfcf..94c01a519 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/disruptor/OcsEvent.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/disruptor/OcsEvent.kt @@ -33,11 +33,6 @@ class OcsEvent { */ var requestedBucketBytes: Long = 0 - /** - * Bytes that has been used from the bucket (previously reserved). - */ - var usedBucketBytes: Long = 0 - /** * Buckets that has been reserved from the bundle. @@ -70,7 +65,6 @@ class OcsEvent { bundleBytes = 0 requestedBucketBytes = 0 - usedBucketBytes = 0 reservedBucketBytes = 0 bundleBytes = 0 @@ -87,7 +81,6 @@ class OcsEvent { msisdnToppedUp: List, bundleBytes: Long, requestedBytes: Long, - usedBytes: Long, reservedBucketBytes: Long, ocsgwStreamId: String?, request: CreditControlRequestInfo?) { @@ -97,7 +90,6 @@ class OcsEvent { this.msisdnToppedUp = msisdnToppedUp this.bundleBytes = bundleBytes this.requestedBucketBytes = requestedBytes - this.usedBucketBytes = usedBytes this.reservedBucketBytes = reservedBucketBytes this.ocsgwStreamId = ocsgwStreamId this.request = request diff --git a/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt b/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt index e87953fe0..d657e7b72 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt @@ -54,7 +54,7 @@ internal class EventHandlerImpl(private val ocsService: OcsService) : EventHandl logger.info("MSISDN: {}", event.msisdn) logger.info("requested bytes: {}", event.requestedBucketBytes) logger.info("reserved bytes: {}", event.reservedBucketBytes) - logger.info("used bytes: {}", event.usedBucketBytes) + logger.info("used bytes: {}", event.request?.getMscc(0)?.used?.totalOctets ?: 0L) logger.info("bundle bytes: {}", event.bundleBytes) logger.info("request id: {} ",event.request?.requestId) } diff --git a/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsState.kt b/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsState.kt index b44a14084..b93baab9d 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsState.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsState.kt @@ -41,7 +41,7 @@ class OcsState(val loadSubscriberInfo:Boolean = true) : EventHandler { logger.error("Received null as msisdn") return } - consumeDataBytes(msisdn, event.usedBucketBytes) + consumeDataBytes(msisdn, event.request?.getMscc(0)?.used?.totalOctets ?: 0L) event.reservedBucketBytes = reserveDataBytes( msisdn, event.requestedBucketBytes) diff --git a/ocs/src/test/kotlin/org/ostelco/prime/disruptor/PrimeEventProducerTest.kt b/ocs/src/test/kotlin/org/ostelco/prime/disruptor/PrimeEventProducerTest.kt index 88a00dce6..80e4dfe06 100644 --- a/ocs/src/test/kotlin/org/ostelco/prime/disruptor/PrimeEventProducerTest.kt +++ b/ocs/src/test/kotlin/org/ostelco/prime/disruptor/PrimeEventProducerTest.kt @@ -99,7 +99,7 @@ class PrimeEventProducerTest { val event = collectedEvent assertEquals(MSISDN, event.msisdn) assertEquals(REQUESTED_BYTES, event.requestedBucketBytes) - assertEquals(USED_BYTES, event.usedBucketBytes) + assertEquals(USED_BYTES, event.request?.getMscc(0)?.used?.totalOctets ?: 0L) assertEquals(RATING_GROUP, event.request?.getMscc(0)?.ratingGroup) assertEquals(SERVICE_IDENTIFIER, event.request?.getMscc(0)?.serviceIdentifier) assertEquals(STREAM_ID, event.ocsgwStreamId) From 98a5db6819a5fbb5c2e0bd5dc0d9708a032e410e Mon Sep 17 00:00:00 2001 From: Martin Cederlof Date: Thu, 30 Aug 2018 15:16:48 +0200 Subject: [PATCH 07/78] Refactor requested and topup bytes --- .../prime/disruptor/EventProducerImpl.kt | 32 +++++++------------ .../org/ostelco/prime/disruptor/OcsEvent.kt | 25 ++++++--------- .../org/ostelco/prime/ocs/EventHandlerImpl.kt | 18 +++++------ .../kotlin/org/ostelco/prime/ocs/OcsState.kt | 4 +-- .../prime/disruptor/PrimeEventProducerTest.kt | 4 +-- 5 files changed, 33 insertions(+), 50 deletions(-) diff --git a/ocs/src/main/kotlin/org/ostelco/prime/disruptor/EventProducerImpl.kt b/ocs/src/main/kotlin/org/ostelco/prime/disruptor/EventProducerImpl.kt index 7f381a627..644d4fe70 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/disruptor/EventProducerImpl.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/disruptor/EventProducerImpl.kt @@ -46,10 +46,10 @@ class EventProducerImpl(private val ringBuffer: RingBuffer) : EventPro msisdn: String? = null, bundleId: String? = null, bundleBytes: Long = 0, - requestedBytes: Long = 0, reservedBytes: Long = 0, streamId: String? = null, - request: CreditControlRequestInfo? = null) { + request: CreditControlRequestInfo? = null, + topUpBytes: Long? = 0) { processNextEventOnTheRingBuffer( Consumer { event -> @@ -58,10 +58,10 @@ class EventProducerImpl(private val ringBuffer: RingBuffer) : EventPro bundleId, emptyList(), bundleBytes, - requestedBytes, reservedBytes, streamId, - request) + request, + topUpBytes) }) } @@ -72,7 +72,7 @@ class EventProducerImpl(private val ringBuffer: RingBuffer) : EventPro injectIntoRingBuffer( type = TOPUP_DATA_BUNDLE_BALANCE, bundleId = bundleId, - requestedBytes = bytes) + topUpBytes = bytes) } override fun releaseReservedDataBucketEvent( @@ -81,28 +81,18 @@ class EventProducerImpl(private val ringBuffer: RingBuffer) : EventPro injectIntoRingBuffer( type = RELEASE_RESERVED_BUCKET, - msisdn = msisdn, - requestedBytes = bytes) + msisdn = msisdn) } override fun injectCreditControlRequestIntoRingbuffer( request: CreditControlRequestInfo, streamId: String) { - if (request.msccList.isEmpty()) { - injectIntoRingBuffer(CREDIT_CONTROL_REQUEST, - request.msisdn, - streamId = streamId, - request = request) - } else { - // FIXME vihang: For now we assume that there is only 1 MSCC in the Request. - injectIntoRingBuffer(CREDIT_CONTROL_REQUEST, - msisdn = request.msisdn, - requestedBytes = request.getMscc(0).requested.totalOctets, - reservedBytes = 0, - streamId = streamId, - request = request) - } + injectIntoRingBuffer(CREDIT_CONTROL_REQUEST, + msisdn = request.msisdn, + reservedBytes = 0, + streamId = streamId, + request = request) } override fun addBundle(bundle: Bundle) { diff --git a/ocs/src/main/kotlin/org/ostelco/prime/disruptor/OcsEvent.kt b/ocs/src/main/kotlin/org/ostelco/prime/disruptor/OcsEvent.kt index 94c01a519..0d5be6883 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/disruptor/OcsEvent.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/disruptor/OcsEvent.kt @@ -26,14 +26,6 @@ class OcsEvent { */ var msisdnToppedUp: List? = null - /** - * Origin of word 'bucket' - P-GW consumes data in `buckets` of 10 MB ~ 100 MB at a time - * This field is used in. - * Request to reserve a new bucket of bytes - */ - var requestedBucketBytes: Long = 0 - - /** * Buckets that has been reserved from the bundle. */ @@ -55,21 +47,22 @@ class OcsEvent { */ var request: CreditControlRequestInfo? = null; + /** + * Topup amount for bundle + */ + var topUpBytes: Long? = 0; + fun clear() { messageType = null - msisdn = null bundleId = null - msisdnToppedUp = null - bundleBytes = 0 - requestedBucketBytes = 0 reservedBucketBytes = 0 bundleBytes = 0 - ocsgwStreamId = null request = null + topUpBytes = 0; } //FIXME vihang: We need to think about roaming!!! @@ -80,18 +73,18 @@ class OcsEvent { bundleId: String?, msisdnToppedUp: List, bundleBytes: Long, - requestedBytes: Long, reservedBucketBytes: Long, ocsgwStreamId: String?, - request: CreditControlRequestInfo?) { + request: CreditControlRequestInfo?, + topUpBytes: Long?) { this.messageType = messageType this.msisdn = msisdn this.bundleId = bundleId this.msisdnToppedUp = msisdnToppedUp this.bundleBytes = bundleBytes - this.requestedBucketBytes = requestedBytes this.reservedBucketBytes = reservedBucketBytes this.ocsgwStreamId = ocsgwStreamId this.request = request + this.topUpBytes = topUpBytes } } diff --git a/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt b/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt index d657e7b72..21397c1c7 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt @@ -52,10 +52,11 @@ internal class EventHandlerImpl(private val ocsService: OcsService) : EventHandl private fun logEventProcessing(msg: String, event: OcsEvent) { logger.info("{}", msg) logger.info("MSISDN: {}", event.msisdn) - logger.info("requested bytes: {}", event.requestedBucketBytes) + logger.info("requested bytes: {}", event.request?.getMscc(0)?.requested?.totalOctets ?: 0L) logger.info("reserved bytes: {}", event.reservedBucketBytes) logger.info("used bytes: {}", event.request?.getMscc(0)?.used?.totalOctets ?: 0L) logger.info("bundle bytes: {}", event.bundleBytes) + logger.info("topup bytes: {}", event.topUpBytes) logger.info("request id: {} ",event.request?.requestId) } @@ -70,20 +71,20 @@ internal class EventHandlerImpl(private val ocsService: OcsService) : EventHandl val creditControlAnswer = CreditControlAnswerInfo.newBuilder() .setMsisdn(event.msisdn) - event.request?.let { + event.request?.let { request -> // This is a hack to know when we have received an MSCC in the request or not. // For Terminate request we might not have any MSCC and therefore no serviceIdentifier. - if (it.getMscc(0).serviceIdentifier > 0) { + if (request.getMscc(0).serviceIdentifier > 0) { val msccBuilder = MultipleServiceCreditControl.newBuilder() - msccBuilder.setServiceIdentifier(it.getMscc(0).serviceIdentifier) - .setRatingGroup(it.getMscc(0).ratingGroup) + msccBuilder.setServiceIdentifier(request.getMscc(0).serviceIdentifier) + .setRatingGroup(request.getMscc(0).ratingGroup) .setValidityTime(86400) - if ((it.getMscc(0).reportingReason != ReportingReason.FINAL) && (event.requestedBucketBytes > 0)) { + if ((request.getMscc(0).reportingReason != ReportingReason.FINAL) && (request.getMscc(0).requested.totalOctets > 0)) { msccBuilder.granted = ServiceUnit.newBuilder() .setTotalOctets(event.reservedBucketBytes) .build() - if (event.reservedBucketBytes < event.requestedBucketBytes) { + if (event.reservedBucketBytes < request.getMscc(0).requested.totalOctets) { msccBuilder.finalUnitIndication = FinalUnitIndication.newBuilder() .setFinalUnitAction(FinalUnitAction.TERMINATE) .setIsSet(true) @@ -97,8 +98,7 @@ internal class EventHandlerImpl(private val ocsService: OcsService) : EventHandl } creditControlAnswer.addMscc(msccBuilder.build()) } - - creditControlAnswer.setRequestId(it.requestId) + creditControlAnswer.setRequestId(request.requestId) } val streamId = event.ocsgwStreamId diff --git a/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsState.kt b/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsState.kt index b93baab9d..1e2ccd218 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsState.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsState.kt @@ -44,7 +44,7 @@ class OcsState(val loadSubscriberInfo:Boolean = true) : EventHandler { consumeDataBytes(msisdn, event.request?.getMscc(0)?.used?.totalOctets ?: 0L) event.reservedBucketBytes = reserveDataBytes( msisdn, - event.requestedBucketBytes) + event.request?.getMscc(0)?.requested?.totalOctets ?: 0L) event.bundleId = msisdnToBundleIdMap[msisdn] event.bundleBytes = bundleBalanceMap[event.bundleId] ?: 0 } @@ -54,7 +54,7 @@ class OcsState(val loadSubscriberInfo:Boolean = true) : EventHandler { logger.error("Received null as bundleId") return } - event.bundleBytes = addDataBundleBytes(bundleId, event.requestedBucketBytes) + event.bundleBytes = addDataBundleBytes(bundleId, event.topUpBytes ?: 0L) event.msisdnToppedUp = bundleIdToMsisdnMap[bundleId]?.toList() } RELEASE_RESERVED_BUCKET -> { diff --git a/ocs/src/test/kotlin/org/ostelco/prime/disruptor/PrimeEventProducerTest.kt b/ocs/src/test/kotlin/org/ostelco/prime/disruptor/PrimeEventProducerTest.kt index 80e4dfe06..e986c2f8c 100644 --- a/ocs/src/test/kotlin/org/ostelco/prime/disruptor/PrimeEventProducerTest.kt +++ b/ocs/src/test/kotlin/org/ostelco/prime/disruptor/PrimeEventProducerTest.kt @@ -77,7 +77,7 @@ class PrimeEventProducerTest { // Verify some behavior assertEquals(BUNDLE_ID, event.bundleId) - assertEquals(NO_OF_TOPUP_BYTES, event.requestedBucketBytes) + assertEquals(NO_OF_TOPUP_BYTES, event.topUpBytes) assertEquals(TOPUP_DATA_BUNDLE_BALANCE, event.messageType) } @@ -98,7 +98,7 @@ class PrimeEventProducerTest { val event = collectedEvent assertEquals(MSISDN, event.msisdn) - assertEquals(REQUESTED_BYTES, event.requestedBucketBytes) + assertEquals(REQUESTED_BYTES, event.request?.getMscc(0)?.requested?.totalOctets ?: 0L) assertEquals(USED_BYTES, event.request?.getMscc(0)?.used?.totalOctets ?: 0L) assertEquals(RATING_GROUP, event.request?.getMscc(0)?.ratingGroup) assertEquals(SERVICE_IDENTIFIER, event.request?.getMscc(0)?.serviceIdentifier) From 5261237250831ba83d6d0a0519b165c776f6ddfe Mon Sep 17 00:00:00 2001 From: Martin Cederlof Date: Thu, 30 Aug 2018 16:38:30 +0200 Subject: [PATCH 08/78] Update logs --- .../prime/analytics/DataConsumptionInfo.kt | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/ocs/src/main/kotlin/org/ostelco/prime/analytics/DataConsumptionInfo.kt b/ocs/src/main/kotlin/org/ostelco/prime/analytics/DataConsumptionInfo.kt index 1523f25e1..9e7bfd867 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/analytics/DataConsumptionInfo.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/analytics/DataConsumptionInfo.kt @@ -36,11 +36,17 @@ class DataConsumptionInfo() : EventHandler { primeMetric = MEGABYTES_CONSUMED, value = (event.request?.getMscc(0)?.used?.totalOctets ?: 0L) / 1_000_000) - event.request?.let { - if(it.type == CreditControlRequestType.INITIAL_REQUEST) { - logger.info("MSISDN : {} connected", it.msisdn) - } else if (it.type == CreditControlRequestType.TERMINATION_REQUEST) { - logger.info("MSISDN : {} disconnected", it.msisdn) + event.request?.let { request -> + if(request.type == CreditControlRequestType.INITIAL_REQUEST) { + logger.info("MSISDN : {} connected apn {} sgsn_mcc_mnc {}", + request.msisdn, + request.serviceInformation.psInformation.calledStationId, + request.serviceInformation.psInformation.sgsnMccMnc) + } else if (request.type == CreditControlRequestType.TERMINATION_REQUEST) { + logger.info("MSISDN : {} disconnected apn {} sgsn_mcc_mnc", + request.msisdn, + request.serviceInformation.psInformation.calledStationId, + request.serviceInformation.psInformation.sgsnMccMnc) } } } From 7b30ec100b6c42604605276026283817cd6515e4 Mon Sep 17 00:00:00 2001 From: Martin Cederlof Date: Fri, 31 Aug 2018 14:31:56 +0200 Subject: [PATCH 09/78] Added keep-alive for metrics grpc stream --- .../ostelco/ocsgw/data/grpc/OcsgwMetrics.java | 26 ++++++++++++++++--- 1 file changed, 23 insertions(+), 3 deletions(-) diff --git a/ocsgw/src/main/java/org/ostelco/ocsgw/data/grpc/OcsgwMetrics.java b/ocsgw/src/main/java/org/ostelco/ocsgw/data/grpc/OcsgwMetrics.java index 6facc6699..ea516025f 100644 --- a/ocsgw/src/main/java/org/ostelco/ocsgw/data/grpc/OcsgwMetrics.java +++ b/ocsgw/src/main/java/org/ostelco/ocsgw/data/grpc/OcsgwMetrics.java @@ -40,6 +40,8 @@ public class OcsgwMetrics { private ScheduledFuture initAnalyticsFuture = null; + private ScheduledFuture keepAliveFuture = null; + private int lastActiveSessions = 0; public OcsgwMetrics(String metricsServerHostname, ServiceAccountJwtAccessCredentials credentials) { @@ -82,6 +84,13 @@ public final void onCompleted() { } } + private void reconnectKeepAlive() { + LOG.info("reconnectKeepAlive called"); + if (keepAliveFuture != null) { + keepAliveFuture.cancel(true); + } + } + private void reconnectAnalyticsReport() { LOG.info("reconnectAnalyticsReport called"); @@ -91,6 +100,7 @@ private void reconnectAnalyticsReport() { LOG.info("Schedule new Callable initAnalyticsRequest"); initAnalyticsFuture = executorService.schedule((Callable) () -> { + reconnectKeepAlive(); LOG.info("Calling initAnalyticsRequest"); initAnalyticsRequest(); sendAnalytics(lastActiveSessions); @@ -100,7 +110,7 @@ private void reconnectAnalyticsReport() { TimeUnit.SECONDS); } - public void initAnalyticsRequest() { + void initAnalyticsRequest() { ocsgwAnalyticsReport = ocsgwAnalyticsServiceStub.ocsgwAnalyticsEvent( new AnalyticsRequestObserver() { @@ -110,11 +120,21 @@ public void onNext(OcsgwAnalyticsReply value) { } } ); + initKeepAlive(); + } + + private void initKeepAlive() { + // this is used to keep connection alive + keepAliveFuture = executorService.scheduleWithFixedDelay(() -> { + sendAnalytics(lastActiveSessions); + }, + 15, + 50, + TimeUnit.SECONDS); } - public void sendAnalytics(int size) { + void sendAnalytics(int size) { ocsgwAnalyticsReport.onNext(OcsgwAnalyticsReport.newBuilder().setActiveSessions(size).build()); lastActiveSessions = size; } - } \ No newline at end of file From 67db1d8280d87d3367e7b4e452897de9fda55505 Mon Sep 17 00:00:00 2001 From: Vihang Patil Date: Fri, 31 Aug 2018 17:00:25 +0200 Subject: [PATCH 10/78] BiqQuery Table Partition and Schema changes. --- .../src/main/proto/analytics.proto | 2 +- dataflow-pipelines/README.md | 6 +----- dataflow-pipelines/build.gradle | 8 +++++++- dataflow-pipelines/docker-compose.yaml | 2 +- dataflow-pipelines/script/start.sh | 2 +- .../DataConsumptionPipelineDefinition.kt | 18 +++++++---------- .../ostelco/dataflow/pipelines/io/BigQuery.kt | 13 ++++++------ .../src/main/resources/table_schema.ddl | 19 ++++++++++++++++++ .../pipelines/ConsumptionPerMsisdnTest.kt | 20 +++++-------------- 9 files changed, 49 insertions(+), 41 deletions(-) create mode 100644 dataflow-pipelines/src/main/resources/table_schema.ddl diff --git a/analytics-grpc-api/src/main/proto/analytics.proto b/analytics-grpc-api/src/main/proto/analytics.proto index 6ba8085b9..f2d68aa83 100644 --- a/analytics-grpc-api/src/main/proto/analytics.proto +++ b/analytics-grpc-api/src/main/proto/analytics.proto @@ -19,5 +19,5 @@ message DataTrafficInfo { message AggregatedDataTrafficInfo { string msisdn = 1; uint64 dataBytes = 2; - string dateTime = 3; + google.protobuf.Timestamp timestamp = 3; } \ No newline at end of file diff --git a/dataflow-pipelines/README.md b/dataflow-pipelines/README.md index 87c729277..87aa7b4e8 100644 --- a/dataflow-pipelines/README.md +++ b/dataflow-pipelines/README.md @@ -7,11 +7,7 @@ ## Package - gradle clean shadowJar - -With unit testing: - - gradle clean test shadowJar + gradle clean build ## Deploy to GCP diff --git a/dataflow-pipelines/build.gradle b/dataflow-pipelines/build.gradle index ea2eb9480..59461ef65 100644 --- a/dataflow-pipelines/build.gradle +++ b/dataflow-pipelines/build.gradle @@ -6,12 +6,18 @@ plugins { } dependencies { - implementation project(':analytics-grpc-api') + implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlinVersion" + + implementation project(':analytics-grpc-api') + implementation "com.google.cloud:google-cloud-pubsub:$googleCloudVersion" + implementation 'com.google.cloud.dataflow:google-cloud-dataflow-java-sdk-all:2.5.0' runtimeOnly 'org.apache.beam:beam-runners-google-cloud-dataflow-java:2.5.0' + implementation 'ch.qos.logback:logback-classic:1.2.3' + testImplementation "org.jetbrains.kotlin:kotlin-test-junit:$kotlinVersion" testRuntimeOnly 'org.hamcrest:hamcrest-all:1.3' } diff --git a/dataflow-pipelines/docker-compose.yaml b/dataflow-pipelines/docker-compose.yaml index c892a375e..d5a8058b9 100644 --- a/dataflow-pipelines/docker-compose.yaml +++ b/dataflow-pipelines/docker-compose.yaml @@ -1,7 +1,7 @@ version: "3.7" services: - analytics: + dataflow-pipelines: container_name: dataflow-pipelines build: . environment: diff --git a/dataflow-pipelines/script/start.sh b/dataflow-pipelines/script/start.sh index c98400eb0..9ee58596d 100755 --- a/dataflow-pipelines/script/start.sh +++ b/dataflow-pipelines/script/start.sh @@ -3,4 +3,4 @@ # Start app exec java \ -Dfile.encoding=UTF-8 \ - -jar /analytics.jar + -jar /dataflow-pipelines.jar diff --git a/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/definitions/DataConsumptionPipelineDefinition.kt b/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/definitions/DataConsumptionPipelineDefinition.kt index 34b0abf3c..0eaa0cc39 100644 --- a/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/definitions/DataConsumptionPipelineDefinition.kt +++ b/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/definitions/DataConsumptionPipelineDefinition.kt @@ -5,6 +5,7 @@ import org.apache.beam.sdk.Pipeline import org.apache.beam.sdk.coders.KvCoder import org.apache.beam.sdk.coders.VarLongCoder import org.apache.beam.sdk.extensions.protobuf.ProtoCoder +import org.apache.beam.sdk.io.gcp.bigquery.TableRowJsonCoder import org.apache.beam.sdk.transforms.Combine import org.apache.beam.sdk.transforms.Filter import org.apache.beam.sdk.transforms.GroupByKey @@ -27,9 +28,6 @@ import org.ostelco.dataflow.pipelines.io.Table.RAW_CONSUMPTION import org.ostelco.dataflow.pipelines.io.convertToHourlyTableRows import org.ostelco.dataflow.pipelines.io.convertToRawTableRows import org.ostelco.dataflow.pipelines.io.readFromPubSub -import java.time.ZoneOffset -import java.time.ZonedDateTime -import java.time.format.DateTimeFormatter object DataConsumptionPipelineDefinition : PipelineDefinition { @@ -53,12 +51,14 @@ object DataConsumptionPipelineDefinition : PipelineDefinition { // PubSubEvents -> raw_consumption big-query dataTrafficInfoEvents .apply("convertToRawTableRows", convertToRawTableRows) + .setCoder(TableRowJsonCoder.of()) .apply("saveRawEventsToBigQuery", saveToBigQuery(RAW_CONSUMPTION)) // PubSubEvents -> aggregate by hour -> hourly_consumption big-query dataTrafficInfoEvents .apply("TotalDataConsumptionGroupByMsisdn", consumptionPerMsisdn) .apply("convertToHourlyTableRows", convertToHourlyTableRows) + .setCoder(TableRowJsonCoder.of()) .apply("saveToBigQueryGroupedByHour", saveToBigQuery(HOURLY_CONSUMPTION)) } } @@ -80,16 +80,11 @@ val consumptionPerMsisdn = object : PTransform, PCo .discardingFiredPanes() val toKeyValuePair = ParDoFn.transform> { - val zonedDateTime = ZonedDateTime - .ofInstant(java.time.Instant.ofEpochMilli(Timestamps.toMillis(it.timestamp)), ZoneOffset.UTC) - .withMinute(0) - .withSecond(0) - .withNano(0) - val formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:SS") + val hoursSinceEpoch: Long = it.timestamp.seconds / 3600 KV.of( AggregatedDataTrafficInfo.newBuilder() .setMsisdn(it.msisdn) - .setDateTime(formatter.format(zonedDateTime)) + .setTimestamp(Timestamps.fromSeconds(hoursSinceEpoch * 3600)) .setDataBytes(0) .build(), it.bucketBytes) @@ -100,7 +95,7 @@ val consumptionPerMsisdn = object : PTransform, PCo val kvToSingleObject = ParDoFn.transform, AggregatedDataTrafficInfo> { AggregatedDataTrafficInfo.newBuilder() .setMsisdn(it.key?.msisdn) - .setDateTime(it.key?.dateTime) + .setTimestamp(it.key?.timestamp) .setDataBytes(it.value) .build() } @@ -119,5 +114,6 @@ val consumptionPerMsisdn = object : PTransform, PCo // sum for each group .apply("reduceToSumOfBucketBytes", reduceToSumOfBucketBytes) .apply("kvToSingleObject", kvToSingleObject) + .setCoder(ProtoCoder.of(AggregatedDataTrafficInfo::class.java)) } } \ No newline at end of file diff --git a/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/io/BigQuery.kt b/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/io/BigQuery.kt index b65e8e4c7..6316583e7 100644 --- a/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/io/BigQuery.kt +++ b/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/io/BigQuery.kt @@ -3,6 +3,7 @@ package org.ostelco.dataflow.pipelines.io import com.google.api.services.bigquery.model.TableFieldSchema import com.google.api.services.bigquery.model.TableRow import com.google.api.services.bigquery.model.TableSchema +import com.google.protobuf.Timestamp import com.google.protobuf.util.Timestamps import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO import org.ostelco.analytics.api.AggregatedDataTrafficInfo @@ -58,7 +59,7 @@ private object TableSchemas { val fields = ArrayList() fields.add(TableFieldSchema().setName("msisdn").setType("STRING")) fields.add(TableFieldSchema().setName("bytes").setType("INTEGER")) - fields.add(TableFieldSchema().setName("timestamp").setType("DATETIME")) + fields.add(TableFieldSchema().setName("timestamp").setType("TIMESTAMP")) TableSchema().setFields(fields) } } @@ -73,18 +74,19 @@ val convertToRawTableRows = ParDoFn.transform { .set("msisdn", it.msisdn) .set("bucketBytes", it.bucketBytes) .set("bundleBytes", it.bundleBytes) - .set("timestamp", ZonedDateTime.ofInstant( - Instant.ofEpochMilli(Timestamps.toMillis(it.timestamp)), - ZoneOffset.UTC).toString()) + .set("timestamp", protobufTimestampToZonedDateTime(it.timestamp)) } val convertToHourlyTableRows = ParDoFn.transform { TableRow() .set("msisdn", it.msisdn) .set("bytes", it.dataBytes) - .set("timestamp", it.dateTime) + .set("timestamp", protobufTimestampToZonedDateTime(it.timestamp)) } +fun protobufTimestampToZonedDateTime(timestamp: Timestamp) = ZonedDateTime.ofInstant( + Instant.ofEpochMilli(Timestamps.toMillis(timestamp)), + ZoneOffset.UTC).toString() // // Save to BigQuery Table // @@ -102,7 +104,6 @@ object BigQueryIOUtils { return BigQueryIO.writeTableRows() .to("$project:$dataset.${table.name.toLowerCase()}") .withSchema(TableSchemas.getTableSchema(table)) - .withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED) .withWriteDisposition(BigQueryIO.Write.WriteDisposition.WRITE_APPEND) } } diff --git a/dataflow-pipelines/src/main/resources/table_schema.ddl b/dataflow-pipelines/src/main/resources/table_schema.ddl new file mode 100644 index 000000000..2e6d229b3 --- /dev/null +++ b/dataflow-pipelines/src/main/resources/table_schema.ddl @@ -0,0 +1,19 @@ +CREATE TABLE IF NOT EXISTS +`pantel-2decb.data_consumption.hourly_consumption` +( + msisdn STRING NOT NULL, + bytes INT64 NOT NULL, + timestamp TIMESTAMP NOT NULL +) +PARTITION BY DATE(timestamp); + + +CREATE TABLE IF NOT EXISTS +`pantel-2decb.data_consumption.raw_consumption` +( + msisdn STRING NOT NULL, + bucketBytes INT64 NOT NULL, + bundleBytes INT64 NOT NULL, + timestamp TIMESTAMP NOT NULL +) +PARTITION BY DATE(timestamp); \ No newline at end of file diff --git a/dataflow-pipelines/src/test/kotlin/org/ostelco/dataflow/pipelines/ConsumptionPerMsisdnTest.kt b/dataflow-pipelines/src/test/kotlin/org/ostelco/dataflow/pipelines/ConsumptionPerMsisdnTest.kt index 0b26d7bc7..b8ac4121c 100644 --- a/dataflow-pipelines/src/test/kotlin/org/ostelco/dataflow/pipelines/ConsumptionPerMsisdnTest.kt +++ b/dataflow-pipelines/src/test/kotlin/org/ostelco/dataflow/pipelines/ConsumptionPerMsisdnTest.kt @@ -1,5 +1,6 @@ package org.ostelco.dataflow.pipelines +import com.google.protobuf.Timestamp import com.google.protobuf.util.Timestamps import org.apache.beam.sdk.extensions.protobuf.ProtoCoder import org.apache.beam.sdk.testing.NeedsRunner @@ -14,9 +15,6 @@ import org.junit.experimental.categories.Category import org.ostelco.analytics.api.AggregatedDataTrafficInfo import org.ostelco.analytics.api.DataTrafficInfo import org.ostelco.dataflow.pipelines.definitions.consumptionPerMsisdn -import java.time.ZoneOffset -import java.time.ZonedDateTime -import java.time.format.DateTimeFormatter class ConsumptionPerMsisdnTest { @@ -80,21 +78,13 @@ class ConsumptionPerMsisdnTest { .setCoder(ProtoCoder.of(AggregatedDataTrafficInfo::class.java)) PAssert.that(out).containsInAnyOrder( - AggregatedDataTrafficInfo.newBuilder().setMsisdn("123").setDataBytes(300).setDateTime(currentHourDateTime).build(), - AggregatedDataTrafficInfo.newBuilder().setMsisdn("456").setDataBytes(200).setDateTime(currentHourDateTime).build(), - AggregatedDataTrafficInfo.newBuilder().setMsisdn("789").setDataBytes(100).setDateTime(currentHourDateTime).build()) + AggregatedDataTrafficInfo.newBuilder().setMsisdn("123").setDataBytes(300).setTimestamp(currentHourDateTime).build(), + AggregatedDataTrafficInfo.newBuilder().setMsisdn("456").setDataBytes(200).setTimestamp(currentHourDateTime).build(), + AggregatedDataTrafficInfo.newBuilder().setMsisdn("789").setDataBytes(100).setTimestamp(currentHourDateTime).build()) pipeline.run().waitUntilFinish() } } - private fun getCurrentHourDateTime(): String { - val zonedDateTime = ZonedDateTime - .ofInstant(java.time.Instant.now(), ZoneOffset.UTC) - .withMinute(0) - .withSecond(0) - .withNano(0) - val formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:SS") - return formatter.format(zonedDateTime) - } + private fun getCurrentHourDateTime(): Timestamp = Timestamps.fromSeconds((java.time.Instant.now().epochSecond / 3600) * 3600) } \ No newline at end of file From a4efa27a636ff7c45d46a1b3e93eb254e01e76c0 Mon Sep 17 00:00:00 2001 From: Vihang Patil Date: Mon, 3 Sep 2018 16:50:22 +0200 Subject: [PATCH 11/78] Moved purchaseProduct from SubscriberDAOImpl to neo4j-store --- .../org/ostelco/at/common/StripePayment.kt | 15 +- .../kotlin/org/ostelco/at/jersey/Tests.kt | 55 +++++- .../kotlin/org/ostelco/at/okhttp/Tests.kt | 2 +- .../prime/client/api/metrics/Metrics.kt | 3 - .../client/api/store/SubscriberDAOImpl.kt | 92 ++-------- .../ostelco/prime/storage/graph/Neo4jStore.kt | 167 ++++++++++++++++-- .../StripePaymentProcessor.kt | 12 +- .../paymentprocessor/PaymentProcessor.kt | 16 +- .../org/ostelco/prime/storage/Variants.kt | 7 + 9 files changed, 259 insertions(+), 110 deletions(-) diff --git a/acceptance-tests/src/main/kotlin/org/ostelco/at/common/StripePayment.kt b/acceptance-tests/src/main/kotlin/org/ostelco/at/common/StripePayment.kt index 42602b971..4805f4773 100644 --- a/acceptance-tests/src/main/kotlin/org/ostelco/at/common/StripePayment.kt +++ b/acceptance-tests/src/main/kotlin/org/ostelco/at/common/StripePayment.kt @@ -2,10 +2,12 @@ package org.ostelco.at.common import com.stripe.Stripe import com.stripe.model.Customer +import com.stripe.model.Source import com.stripe.model.Token object StripePayment { - fun createPaymentSourceId(): String { + + fun createPaymentTokenId(): String { // https://stripe.com/docs/api/java#create_card_token Stripe.apiKey = System.getenv("STRIPE_API_KEY") @@ -21,6 +23,17 @@ object StripePayment { return token.id } + fun createPaymentSourceId(): String { + + // https://stripe.com/docs/api/java#create_source + Stripe.apiKey = System.getenv("STRIPE_API_KEY") + + // TODO martin: set valid map values + val sourceMap = mapOf() + val source = Source.create(sourceMap) + return source.id + } + fun deleteAllCustomers() { // https://stripe.com/docs/api/java#create_card_token Stripe.apiKey = System.getenv("STRIPE_API_KEY") diff --git a/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt b/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt index 91186ed16..27d5f8354 100644 --- a/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt +++ b/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt @@ -10,6 +10,7 @@ import org.ostelco.at.common.randomInt import org.ostelco.prime.client.model.ActivePseudonyms import org.ostelco.prime.client.model.ApplicationToken import org.ostelco.prime.client.model.Consent +import org.ostelco.prime.client.model.PaymentSource import org.ostelco.prime.client.model.Person import org.ostelco.prime.client.model.Price import org.ostelco.prime.client.model.Product @@ -240,7 +241,7 @@ class PurchaseTest { val balanceBefore = subscriptionStatusBefore.remaining val productSku = "1GB_249NOK" - val sourceId = StripePayment.createPaymentSourceId() + val sourceId = StripePayment.createPaymentTokenId() post { path = "/products/$productSku/purchase" @@ -269,6 +270,58 @@ class PurchaseTest { assertEquals(expectedProducts().first(), purchaseRecords.last().product, "Incorrect 'Product' in purchase record") } + @Test + fun `jersey test - POST products purchase using default source`() { + + StripePayment.deleteAllCustomers() + + val email = "purchase-${randomInt()}@test.com" + createProfile(name = "Test Purchase User with Default Payment Source", email = email) + + val sourceId = StripePayment.createPaymentTokenId() + + val paymentSource:PaymentSource = post { + path = "/paymentSources" + subscriberId = email + queryParams = mapOf("sourceId" to sourceId) + } + + assertNotNull(paymentSource.id, message = "Failed to create payment source") + + val subscriptionStatusBefore: SubscriptionStatus = get { + path = "/subscription/status" + subscriberId = email + } + val balanceBefore = subscriptionStatusBefore.remaining + + val productSku = "1GB_249NOK" + + post { + path = "/products/$productSku/purchase" + subscriberId = email + } + + Thread.sleep(100) // wait for 100 ms for balance to be updated in db + + val subscriptionStatusAfter: SubscriptionStatus = get { + path = "/subscription/status" + subscriberId = email + } + val balanceAfter = subscriptionStatusAfter.remaining + + assertEquals(1_000_000_000, balanceAfter - balanceBefore, "Balance did not increased by 1GB after Purchase") + + val purchaseRecords: PurchaseRecordList = get { + path = "/purchases" + subscriberId = email + } + + purchaseRecords.sortBy { it.timestamp } + + assert(Instant.now().toEpochMilli() - purchaseRecords.last().timestamp < 10_000) { "Missing Purchase Record" } + assertEquals(expectedProducts().first(), purchaseRecords.last().product, "Incorrect 'Product' in purchase record") + } + @Test fun `jersey test - POST products purchase without payment`() { diff --git a/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt b/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt index 673645bad..1232427be 100644 --- a/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt +++ b/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt @@ -177,7 +177,7 @@ class PurchaseTest { val balanceBefore = client.subscriptionStatus.remaining - val sourceId = StripePayment.createPaymentSourceId() + val sourceId = StripePayment.createPaymentTokenId() client.purchaseProduct("1GB_249NOK", sourceId, false) diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/metrics/Metrics.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/metrics/Metrics.kt index 51bd24737..9bd73a7c8 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/metrics/Metrics.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/metrics/Metrics.kt @@ -19,7 +19,4 @@ fun reportMetricsAtStartUp() { fun updateMetricsOnNewSubscriber() { analyticsService.reportMetric(TOTAL_USERS, adminStore.getSubscriberCount()) analyticsService.reportMetric(USERS_ACQUIRED_THROUGH_REFERRALS, adminStore.getReferredSubscriberCount()) -} -fun updateMetricsOnPurchase() { - analyticsService.reportMetric(USERS_PAID_AT_LEAST_ONCE, adminStore.getPaidSubscriberCount()) } \ No newline at end of file diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt index 1d9d2bca0..ff2c6927b 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt @@ -1,12 +1,10 @@ package org.ostelco.prime.client.api.store import arrow.core.Either -import arrow.core.Tuple4 import arrow.core.flatMap import org.ostelco.prime.analytics.AnalyticsService import org.ostelco.prime.analytics.PrimeMetric.REVENUE import org.ostelco.prime.client.api.metrics.updateMetricsOnNewSubscriber -import org.ostelco.prime.client.api.metrics.updateMetricsOnPurchase import org.ostelco.prime.client.api.model.Consent import org.ostelco.prime.client.api.model.Person import org.ostelco.prime.client.api.model.SubscriptionStatus @@ -198,7 +196,6 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu } } - @Deprecated("use purchaseProduct", ReplaceWith("purchaseProduct")) override fun purchaseProductWithoutPayment(subscriberId: String, sku: String): Either { return getProduct(subscriberId, sku) @@ -227,85 +224,16 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu } } - override fun purchaseProduct(subscriberId: String, sku: String, sourceId: String?, saveCard: Boolean): Either { - return getProduct(subscriberId, sku) - // If we can't find the product, return not-found - .mapLeft { NotFoundError("Product unavailable") } - .flatMap { product: Product -> - // Fetch/Create stripe payment profile for the subscriber. - getPaymentProfile(subscriberId) - .fold( - { createAndStorePaymentProfile(subscriberId) }, - { profileInfo -> Either.right(profileInfo) } - ) - .map { profileInfo -> Pair(product, profileInfo) } - } - .flatMap { (product, profileInfo) -> - // Add payment source - if (sourceId != null) { - paymentProcessor.addSource(profileInfo.id, sourceId). - map {sourceInfo -> Triple(product, profileInfo, sourceInfo.id)} - } else { - Either.right(Triple(product, profileInfo, null)) - } - } - .flatMap { (product, profileInfo, savedSourceId) -> - // Authorize stripe charge for this purchase - val price = product.price - paymentProcessor.authorizeCharge(profileInfo.id, savedSourceId, price.amount, price.currency) - .mapLeft { apiError -> - logger.error("failed to authorize purchase for customerId ${profileInfo.id}, sourceId $savedSourceId, sku $sku") - apiError - } - .map { chargeId -> Tuple4(profileInfo, savedSourceId, chargeId, product) } - } - .flatMap { (profileInfo, savedSourceId, chargeId, product) -> - val purchaseRecord = PurchaseRecord( - id = chargeId, - product = product, - timestamp = Instant.now().toEpochMilli(), - msisdn = "") - // Create purchase record - storage.addPurchaseRecord(subscriberId, purchaseRecord) - .mapLeft { storeError -> - logger.error("failed to save purchase record, for customerId ${profileInfo.id}, chargeId $chargeId, payment will be unclaimed in Stripe") - BadGatewayError(storeError.message) - } - // Notify OCS - .flatMap { - //TODO: Handle errors (when it becomes available) - ocsSubscriberService.topup(subscriberId, sku) - Either.right(Tuple4(profileInfo, savedSourceId, chargeId, product)) - } - } - .flatMap { (profileInfo, savedSourceId, chargeId, product) -> - // Capture the charge, our database have been updated. - paymentProcessor.captureCharge(chargeId, profileInfo.id, sourceId) - .mapLeft { apiError -> - logger.error("Capture failed for customerId ${profileInfo.id}, chargeId $chargeId, Fix this in Stripe Dashborad") - apiError - } - .map { - // TODO vihang: handle currency conversion - analyticsReporter.reportMetric(REVENUE, product.price.amount.toLong()) - updateMetricsOnPurchase() - Triple(profileInfo, savedSourceId, ProductInfo(product.sku)) - } - } - .flatMap { (profileInfo, savedSourceId, productInfo) -> - // Remove the payment source - if (!saveCard && savedSourceId != null) { - paymentProcessor.removeSource(profileInfo.id, savedSourceId) - .mapLeft { apiError -> - logger.error("Failed to remove card, for customerId ${profileInfo.id}, sourceId $sourceId") - apiError - } - .map { productInfo } - } else { - Either.Right(productInfo) - } - } - } + override fun purchaseProduct( + subscriberId: String, + sku: String, + sourceId: String?, + saveCard: Boolean): Either = + storage.purchaseProduct( + subscriberId, + sku, + sourceId, + saveCard) override fun getReferrals(subscriberId: String): Either> { return try { diff --git a/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt b/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt index b2f109993..ea71d45d0 100644 --- a/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt +++ b/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt @@ -1,8 +1,14 @@ package org.ostelco.prime.storage.graph import arrow.core.Either +import arrow.core.Tuple4 import arrow.core.flatMap import org.neo4j.driver.v1.Transaction +import org.ostelco.prime.analytics.AnalyticsService +import org.ostelco.prime.analytics.PrimeMetric.REVENUE +import org.ostelco.prime.analytics.PrimeMetric.USERS_PAID_AT_LEAST_ONCE +import org.ostelco.prime.core.ApiError +import org.ostelco.prime.core.BadGatewayError import org.ostelco.prime.logger import org.ostelco.prime.model.Bundle import org.ostelco.prime.model.Offer @@ -14,6 +20,11 @@ import org.ostelco.prime.model.Subscriber import org.ostelco.prime.model.Subscription import org.ostelco.prime.module.getResource import org.ostelco.prime.ocs.OcsAdminService +import org.ostelco.prime.ocs.OcsSubscriberService +import org.ostelco.prime.paymentprocessor.PaymentProcessor +import org.ostelco.prime.paymentprocessor.core.ProductInfo +import org.ostelco.prime.paymentprocessor.core.ProfileInfo +import org.ostelco.prime.storage.DocumentStore import org.ostelco.prime.storage.GraphStore import org.ostelco.prime.storage.NotFoundError import org.ostelco.prime.storage.StoreError @@ -47,7 +58,7 @@ class Neo4jStore : GraphStore by Neo4jStoreSingleton object Neo4jStoreSingleton : GraphStore { - private val ocs: OcsAdminService by lazy { getResource() } + private val ocsAdminService: OcsAdminService by lazy { getResource() } private val logger by logger() // @@ -130,6 +141,7 @@ object Neo4jStoreSingleton : GraphStore { readTransaction { subscriberStore.get(subscriberId, transaction) } // TODO vihang: Move this logic to DSL + Rule Engine + Triggers, when they are ready + // >> BEGIN override fun addSubscriber(subscriber: Subscriber, referredBy: String?): Either = writeTransaction { if (subscriber.id == referredBy) { @@ -158,7 +170,7 @@ object Neo4jStoreSingleton : GraphStore { } } .flatMap { - ocs.addBundle(Bundle(bundleId, 1_000_000_000)) + ocsAdminService.addBundle(Bundle(bundleId, 1_000_000_000)) Either.right(Unit) } } else { @@ -176,14 +188,15 @@ object Neo4jStoreSingleton : GraphStore { } } .flatMap { - ocs.addBundle(Bundle(bundleId, 100_000_000)) + ocsAdminService.addBundle(Bundle(bundleId, 100_000_000)) Either.right(Unit) } }.flatMap { subscriberToBundleStore.create(subscriber.id, bundleId, transaction) } .flatMap { subscriberToSegmentStore.create(subscriber.id, "all", transaction) } .ifFailedThenRollback(transaction) } - + // << END + override fun updateSubscriber(subscriber: Subscriber): Either = writeTransaction { subscriberStore.update(subscriber, transaction) .ifFailedThenRollback(transaction) @@ -232,7 +245,7 @@ object Neo4jStoreSingleton : GraphStore { either.flatMap { _ -> subscriptionToBundleStore.create(subscription, bundle, transaction) .flatMap { - ocs.addMsisdnToBundleMapping(msisdn, bundle.id) + ocsAdminService.addMsisdnToBundleMapping(msisdn, bundle.id) Either.right(Unit) } } @@ -282,30 +295,147 @@ object Neo4jStoreSingleton : GraphStore { override fun getProduct(subscriberId: String, sku: String): Either { return readTransaction { - subscriberStore.exists(subscriberId, transaction) - .flatMap { - read(""" + getProduct(subscriberId, sku, transaction) + } + } + + private fun getProduct(subscriberId: String, sku: String, transaction: Transaction): Either { + return subscriberStore.exists(subscriberId, transaction) + .flatMap { + read(""" MATCH (:${subscriberEntity.name} {id: '$subscriberId'}) -[:${subscriberToSegmentRelation.relation.name}]->(:${segmentEntity.name}) <-[:${offerToSegmentRelation.relation.name}]-(:${offerEntity.name}) -[:${offerToProductRelation.relation.name}]->(product:${productEntity.name} {sku: '$sku'}) RETURN product; """.trimIndent(), - transaction) { statementResult -> - if (statementResult.hasNext()) { - Either.right(productEntity.createEntity(statementResult.single().get("product").asMap())) - } else { - Either.left(NotFoundError(type = productEntity.name, id = sku)) - } + transaction) { statementResult -> + if (statementResult.hasNext()) { + Either.right(productEntity.createEntity(statementResult.single().get("product").asMap())) + } else { + Either.left(NotFoundError(type = productEntity.name, id = sku)) } } - } + } } // // Purchase Records // + // TODO vihang: Move this logic to DSL + Rule Engine + Triggers, when they are ready + // >> BEGIN + private val documentStore by lazy { getResource() } + private val paymentProcessor by lazy { getResource() } + private val ocs by lazy { getResource() } + private val analyticsReporter by lazy { getResource() } + + private fun getPaymentProfile(name: String): Either = + documentStore.getPaymentId(name) + ?.let { profileInfoId -> Either.right(ProfileInfo(profileInfoId)) } + ?: Either.left(BadGatewayError("Failed to fetch payment customer ID")) + + private fun createAndStorePaymentProfile(name: String): Either { + return paymentProcessor.createPaymentProfile(name) + .flatMap { profileInfo -> + setPaymentProfile(name, profileInfo) + .map { profileInfo } + } + } + + private fun setPaymentProfile(name: String, profileInfo: ProfileInfo): Either = + Either.cond( + test = documentStore.createPaymentId(name, profileInfo.id), + ifTrue = { Unit }, + ifFalse = { BadGatewayError("Failed to save payment customer ID") }) + + override fun purchaseProduct( + subscriberId: String, + sku: String, + sourceId: String?, + saveCard: Boolean): Either = writeTransaction { + + val result = getProduct(subscriberId, sku, transaction) + // If we can't find the product, return not-found + .mapLeft { org.ostelco.prime.core.NotFoundError("Product unavailable") } + .flatMap { product: Product -> + // Fetch/Create stripe payment profile for the subscriber. + getPaymentProfile(subscriberId) + .fold( + { createAndStorePaymentProfile(subscriberId) }, + { profileInfo -> Either.right(profileInfo) } + ) + .map { profileInfo -> Pair(product, profileInfo) } + } + .flatMap { (product, profileInfo) -> + // Add payment source + if (sourceId != null) { + paymentProcessor.addSource(profileInfo.id, sourceId).map { sourceInfo -> Triple(product, profileInfo, sourceInfo.id) } + } else { + Either.right(Triple(product, profileInfo, null)) + } + } + .flatMap { (product, profileInfo, savedSourceId) -> + // Authorize stripe charge for this purchase + val price = product.price + //TODO: If later steps fail, then refund the authorized charge + paymentProcessor.authorizeCharge(profileInfo.id, savedSourceId, price.amount, price.currency) + .mapLeft { apiError -> + logger.error("failed to authorize purchase for customerId ${profileInfo.id}, sourceId $savedSourceId, sku $sku") + apiError + } + .map { chargeId -> Tuple4(profileInfo, savedSourceId, chargeId, product) } + } + .flatMap { (profileInfo, savedSourceId, chargeId, product) -> + val purchaseRecord = PurchaseRecord( + id = chargeId, + product = product, + timestamp = Instant.now().toEpochMilli(), + msisdn = "") + // Create purchase record + createPurchaseRecordRelation(subscriberId, purchaseRecord, transaction) + .mapLeft { storeError -> + paymentProcessor.refundCharge(chargeId, profileInfo.id) + logger.error("failed to save purchase record, for customerId ${profileInfo.id}, chargeId $chargeId, payment will be unclaimed in Stripe") + BadGatewayError(storeError.message) + } + // Notify OCS + .flatMap { + //TODO vihang: Handle errors (when it becomes available) + ocs.topup(subscriberId, sku) + // TODO vihang: handle currency conversion + analyticsReporter.reportMetric(REVENUE, product.price.amount.toLong()) + analyticsReporter.reportMetric(USERS_PAID_AT_LEAST_ONCE, getPaidSubscriberCount(transaction)) + Either.right(Tuple4(profileInfo, savedSourceId, chargeId, product)) + } + } + .mapLeft { error -> + transaction.failure() + error + } + + result.map { (profileInfo, _, chargeId, _) -> + // Capture the charge, our database have been updated. + paymentProcessor.captureCharge(chargeId, profileInfo.id) + .mapLeft { + // TODO payment: retry capture charge + logger.error("Capture failed for customerId ${profileInfo.id}, chargeId $chargeId, Fix this in Stripe Dashboard") + } + } + result.map { (profileInfo, savedSourceId, _, _) -> + // Remove the payment source + if (!saveCard && savedSourceId != null) { + paymentProcessor.removeSource(profileInfo.id, savedSourceId) + .mapLeft { apiError -> + logger.error("Failed to remove card, for customerId ${profileInfo.id}, sourceId $sourceId") + apiError + } + } + } + result.map { (_, _, _, product) -> ProductInfo(product.sku) } + } + // << END + override fun getPurchaseRecords(subscriberId: String): Either> { return readTransaction { subscriberStore.getRelations(subscriberId, purchaseRecordRelation, transaction) @@ -429,7 +559,11 @@ object Neo4jStoreSingleton : GraphStore { } override fun getPaidSubscriberCount(): Long = readTransaction { - read(""" + getPaidSubscriberCount(transaction) + } + + private fun getPaidSubscriberCount(transaction: Transaction): Long { + return read(""" MATCH (subscriber:${subscriberEntity.name})-[:${purchaseRecordRelation.relation.name}]->(product:${productEntity.name}) WHERE product.`price/amount` > 0 RETURN count(subscriber) AS count @@ -438,7 +572,6 @@ object Neo4jStoreSingleton : GraphStore { result.single().get("count").asLong() } } - // // Stores // diff --git a/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessor.kt b/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessor.kt index f8afb287b..5049da518 100644 --- a/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessor.kt +++ b/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessor.kt @@ -12,7 +12,11 @@ import org.ostelco.prime.core.BadGatewayError import org.ostelco.prime.core.ForbiddenError import org.ostelco.prime.core.NotFoundError import org.ostelco.prime.logger -import org.ostelco.prime.paymentprocessor.core.* +import org.ostelco.prime.paymentprocessor.core.PlanInfo +import org.ostelco.prime.paymentprocessor.core.ProductInfo +import org.ostelco.prime.paymentprocessor.core.ProfileInfo +import org.ostelco.prime.paymentprocessor.core.SourceInfo +import org.ostelco.prime.paymentprocessor.core.SubscriptionInfo class StripePaymentProcessor : PaymentProcessor { @@ -139,7 +143,7 @@ class StripePaymentProcessor : PaymentProcessor { } } - override fun captureCharge(chargeId: String, customerId: String, sourceId: String?): Either { + override fun captureCharge(chargeId: String, customerId: String): Either { val errorMessage = "Failed to capture charge for customerId $customerId chargeId $chargeId" return either(ForbiddenError(errorMessage)) { Charge.retrieve(chargeId) @@ -161,6 +165,10 @@ class StripePaymentProcessor : PaymentProcessor { } } + override fun refundCharge(chargeId: String, customerId: String): Either { + TODO("payment") + } + override fun removeSource(customerId: String, sourceId: String): Either = either(ForbiddenError("Failed to remove source ${sourceId} from customer ${customerId}")) { Customer.retrieve(customerId).sources.retrieve(sourceId).delete().id diff --git a/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/PaymentProcessor.kt b/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/PaymentProcessor.kt index d68c9ecf0..74af17b8b 100644 --- a/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/PaymentProcessor.kt +++ b/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/PaymentProcessor.kt @@ -2,7 +2,11 @@ package org.ostelco.prime.paymentprocessor import arrow.core.Either import org.ostelco.prime.core.ApiError -import org.ostelco.prime.paymentprocessor.core.* +import org.ostelco.prime.paymentprocessor.core.PlanInfo +import org.ostelco.prime.paymentprocessor.core.ProductInfo +import org.ostelco.prime.paymentprocessor.core.ProfileInfo +import org.ostelco.prime.paymentprocessor.core.SourceInfo +import org.ostelco.prime.paymentprocessor.core.SubscriptionInfo interface PaymentProcessor { @@ -104,10 +108,16 @@ interface PaymentProcessor { /** * @param chargeId ID of the of the authorized charge from authorizeCharge() * @param customerId Customer id in the payment system - * @param sourceId id of the payment source * @return id of the charge if authorization was successful */ - fun captureCharge(chargeId: String, customerId: String, sourceId: String?): Either + fun captureCharge(chargeId: String, customerId: String): Either + + /** + * @param chargeId ID of the of the authorized charge to refund from authorizeCharge() + * @param customerId Customer id in the payment system + * @return id of the charge + */ + fun refundCharge(chargeId: String, customerId: String): Either /** * @param customerId Customer id in the payment system diff --git a/prime-api/src/main/kotlin/org/ostelco/prime/storage/Variants.kt b/prime-api/src/main/kotlin/org/ostelco/prime/storage/Variants.kt index a5fd68d89..06c2bbe0b 100644 --- a/prime-api/src/main/kotlin/org/ostelco/prime/storage/Variants.kt +++ b/prime-api/src/main/kotlin/org/ostelco/prime/storage/Variants.kt @@ -1,6 +1,7 @@ package org.ostelco.prime.storage import arrow.core.Either +import org.ostelco.prime.core.ApiError import org.ostelco.prime.model.ApplicationToken import org.ostelco.prime.model.Bundle import org.ostelco.prime.model.Offer @@ -10,6 +11,7 @@ import org.ostelco.prime.model.PurchaseRecord import org.ostelco.prime.model.Segment import org.ostelco.prime.model.Subscriber import org.ostelco.prime.model.Subscription +import org.ostelco.prime.paymentprocessor.core.ProductInfo interface ClientDocumentStore { @@ -118,6 +120,11 @@ interface ClientGraphStore { * Get user who has referred this user. */ fun getReferredBy(subscriberId: String): Either + + /** + * Temporary method to perform purchase as atomic transaction + */ + fun purchaseProduct(subscriberId: String, sku: String, sourceId: String?, saveCard: Boolean): Either } interface AdminGraphStore { From 92c2ae222b874f8b485f79496dc4ab01ca08cb4a Mon Sep 17 00:00:00 2001 From: Vihang Patil Date: Tue, 4 Sep 2018 13:26:54 +0200 Subject: [PATCH 12/78] Minor refactoring --- .../StripePaymentProcessor.kt | 83 +++++++++---------- 1 file changed, 37 insertions(+), 46 deletions(-) diff --git a/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessor.kt b/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessor.kt index 5049da518..24e86813a 100644 --- a/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessor.kt +++ b/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessor.kt @@ -20,10 +20,10 @@ import org.ostelco.prime.paymentprocessor.core.SubscriptionInfo class StripePaymentProcessor : PaymentProcessor { - private val LOG by logger() + private val logger by logger() override fun getSavedSources(customerId: String): Either> = - either (NotFoundError("Failed to get sources for customer ${customerId}")) { + either(NotFoundError("Failed to get sources for customer $customerId")) { val sources = mutableListOf() val customer = Customer.retrieve(customerId) customer.sources.data.forEach { @@ -33,90 +33,81 @@ class StripePaymentProcessor : PaymentProcessor { } override fun createPaymentProfile(userEmail: String): Either = - either(ForbiddenError("Failed to create profile for user ${userEmail}")) { - val customerParams = HashMap() - customerParams.put("email", userEmail) + either(ForbiddenError("Failed to create profile for user $userEmail")) { + val customerParams = mapOf("email" to userEmail) ProfileInfo(Customer.create(customerParams).id) } override fun createPlan(productId: String, amount: Int, currency: String, interval: PaymentProcessor.Interval): Either = - either(ForbiddenError("Failed to create plan with product id ${productId} amount ${amount} currency ${currency} interval ${interval.value}")) { - val planParams = HashMap() - planParams["amount"] = amount - planParams["interval"] = interval.value - planParams["product"] = productId - planParams["currency"] = currency + either(ForbiddenError("Failed to create plan with product id $productId amount $amount currency $currency interval ${interval.value}")) { + val planParams = mapOf( + "amount" to amount, + "interval" to interval.value, + "product" to productId, + "currency" to currency) PlanInfo(Plan.create(planParams).id) } override fun removePlan(planId: String): Either = - either(NotFoundError("Failed to delete plan ${planId}")) { + either(NotFoundError("Failed to delete plan $planId")) { val plan = Plan.retrieve(planId) PlanInfo(plan.delete().id) } override fun createProduct(sku: String): Either = - either(ForbiddenError("Failed to create product with sku ${sku}")) { - val productParams = HashMap() - productParams["name"] = sku - productParams["type"] = "service" + either(ForbiddenError("Failed to create product with sku $sku")) { + val productParams = mapOf( + "name" to sku, + "type" to "service") ProductInfo(Product.create(productParams).id) } override fun removeProduct(productId: String): Either = - either(NotFoundError("Failed to delete product ${productId}")) { + either(NotFoundError("Failed to delete product $productId")) { val product = Product.retrieve(productId) ProductInfo(product.delete().id) } override fun addSource(customerId: String, sourceId: String): Either = - either(ForbiddenError("Failed to add source ${sourceId} to customer ${customerId}")) { + either(ForbiddenError("Failed to add source $sourceId to customer $customerId")) { val customer = Customer.retrieve(customerId) - val params = HashMap() - params["source"] = sourceId + val params = mapOf("source" to sourceId) SourceInfo(customer.sources.create(params).id) } override fun setDefaultSource(customerId: String, sourceId: String): Either = - either(ForbiddenError("Failed to set default source ${sourceId} for customer ${customerId}")) { + either(ForbiddenError("Failed to set default source $sourceId for customer $customerId")) { val customer = Customer.retrieve(customerId) - val updateParams = HashMap() - updateParams.put("default_source", sourceId) + val updateParams = mapOf("default_source" to sourceId) val customerUpdated = customer.update(updateParams) SourceInfo(customerUpdated.defaultSource) } override fun getDefaultSource(customerId: String): Either = - either(NotFoundError( "Failed to get default source for customer ${customerId}")) { + either(NotFoundError("Failed to get default source for customer $customerId")) { SourceInfo(Customer.retrieve(customerId).defaultSource) } override fun deletePaymentProfile(customerId: String): Either = - either(NotFoundError("Failed to delete customer ${customerId}")) { + either(NotFoundError("Failed to delete customer $customerId")) { val customer = Customer.retrieve(customerId) ProfileInfo(customer.delete().id) } override fun subscribeToPlan(planId: String, customerId: String): Either = - either(ForbiddenError("Failed to subscribe customer ${customerId} to plan ${planId}")) { - val item = HashMap() - item["plan"] = planId - - val items = HashMap() - items["0"] = item - - val params = HashMap() - params["customer"] = customerId - params["items"] = items + either(ForbiddenError("Failed to subscribe customer $customerId to plan $planId")) { + val item = mapOf("plan" to planId) + val params = mapOf( + "customer" to customerId, + "items" to mapOf("0" to item)) SubscriptionInfo(Subscription.create(params).id) } override fun cancelSubscription(subscriptionId: String, atIntervalEnd: Boolean): Either = - either(ForbiddenError("Failed to unsubscribe subscription Id : ${subscriptionId} atIntervalEnd ${atIntervalEnd}")) { + either(ForbiddenError("Failed to unsubscribe subscription Id : $subscriptionId atIntervalEnd $atIntervalEnd")) { val subscription = Subscription.retrieve(subscriptionId) - val subscriptionParams = HashMap() - subscriptionParams["at_period_end"] = atIntervalEnd + val subscriptionParams = mapOf("at_period_end" to atIntervalEnd) SubscriptionInfo(subscription.cancel(subscriptionParams).id) } @@ -124,11 +115,11 @@ class StripePaymentProcessor : PaymentProcessor { override fun authorizeCharge(customerId: String, sourceId: String?, amount: Int, currency: String): Either { val errorMessage = "Failed to authorize the charge for customerId $customerId sourceId $sourceId amount $amount currency $currency" return either(ForbiddenError(errorMessage)) { - val chargeParams = HashMap() - chargeParams["amount"] = amount - chargeParams["currency"] = currency - chargeParams["customer"] = customerId - chargeParams["capture"] = false + val chargeParams = mutableMapOf( + "amount" to amount, + "currency" to currency, + "customer" to customerId, + "capture" to false) if (sourceId != null) { chargeParams["source"] = sourceId } @@ -159,7 +150,7 @@ class StripePaymentProcessor : PaymentProcessor { charge.capture() Either.right(charge.id) } catch (e: Exception) { - LOG.warn(errorMessage, e) + logger.warn(errorMessage, e) Either.left(BadGatewayError(errorMessage)) } } @@ -170,7 +161,7 @@ class StripePaymentProcessor : PaymentProcessor { } override fun removeSource(customerId: String, sourceId: String): Either = - either(ForbiddenError("Failed to remove source ${sourceId} from customer ${customerId}")) { + either(ForbiddenError("Failed to remove source $sourceId from customer $customerId")) { Customer.retrieve(customerId).sources.retrieve(sourceId).delete().id } @@ -178,7 +169,7 @@ class StripePaymentProcessor : PaymentProcessor { return try { Either.right(action()) } catch (e: Exception) { - LOG.warn(apiError.description, e) + logger.warn(apiError.description, e) Either.left(apiError) } } From 6f8bc9fe176f401b1569db830d26db6f614c0586 Mon Sep 17 00:00:00 2001 From: Martin Cederlof Date: Tue, 4 Sep 2018 14:08:20 +0200 Subject: [PATCH 13/78] Added refund charge to StripePaymentProcessor --- .../ostelco/prime/storage/graph/Neo4jStore.kt | 2 +- .../StripePaymentProcessorTest.kt | 16 ++++++++++++++++ .../paymentprocessor/StripePaymentProcessor.kt | 12 +++++++++--- .../prime/paymentprocessor/PaymentProcessor.kt | 3 +-- 4 files changed, 27 insertions(+), 6 deletions(-) diff --git a/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt b/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt index ea71d45d0..95ab64649 100644 --- a/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt +++ b/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt @@ -395,7 +395,7 @@ object Neo4jStoreSingleton : GraphStore { // Create purchase record createPurchaseRecordRelation(subscriberId, purchaseRecord, transaction) .mapLeft { storeError -> - paymentProcessor.refundCharge(chargeId, profileInfo.id) + paymentProcessor.refundCharge(chargeId) logger.error("failed to save purchase record, for customerId ${profileInfo.id}, chargeId $chargeId, payment will be unclaimed in Stripe") BadGatewayError(storeError.message) } diff --git a/payment-processor/src/integration-tests/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessorTest.kt b/payment-processor/src/integration-tests/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessorTest.kt index 074efa20b..cea785c71 100644 --- a/payment-processor/src/integration-tests/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessorTest.kt +++ b/payment-processor/src/integration-tests/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessorTest.kt @@ -90,6 +90,22 @@ class StripePaymentProcessorTest { assertEquals(true, resultRemoveDefault.isRight()) } + @Test + fun createAuthorizeChargeAndRefund() { + val resultAddSource = paymentProcessor.addSource(stripeCustomerId, createPaymentSourceId()) + assertEquals(true, resultAddSource.isRight()) + + val resultAuthorizeCharge = paymentProcessor.authorizeCharge(stripeCustomerId, resultAddSource.fold({ "" }, { it.id }), 1000, "nok") + assertEquals(true, resultAuthorizeCharge.isRight()) + + val resultRefundCharge = paymentProcessor.refundCharge(resultAuthorizeCharge.fold({ "" }, { it } )) + assertEquals(true, resultRefundCharge.isRight()) + assertEquals(resultAuthorizeCharge.fold({ "" }, { it } ), resultRefundCharge.fold({ "" }, { it } )) + + val resultRemoveSource = paymentProcessor.removeSource(stripeCustomerId, resultAddSource.fold({ "" }, { it.id })) + assertEquals(true, resultRemoveSource.isRight()) + } + @Test fun createAndRemoveProduct() { val resultCreateProduct = paymentProcessor.createProduct("TestSku") diff --git a/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessor.kt b/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessor.kt index 24e86813a..76837bb6f 100644 --- a/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessor.kt +++ b/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessor.kt @@ -17,6 +17,10 @@ import org.ostelco.prime.paymentprocessor.core.ProductInfo import org.ostelco.prime.paymentprocessor.core.ProfileInfo import org.ostelco.prime.paymentprocessor.core.SourceInfo import org.ostelco.prime.paymentprocessor.core.SubscriptionInfo +import com.stripe.model.Refund +import java.util.HashMap + + class StripePaymentProcessor : PaymentProcessor { @@ -156,9 +160,11 @@ class StripePaymentProcessor : PaymentProcessor { } } - override fun refundCharge(chargeId: String, customerId: String): Either { - TODO("payment") - } + override fun refundCharge(chargeId: String): Either = + either(NotFoundError("Failed to refund charge $chargeId")) { + val refundParams = mapOf("charge" to chargeId) + Refund.create(refundParams).charge + } override fun removeSource(customerId: String, sourceId: String): Either = either(ForbiddenError("Failed to remove source $sourceId from customer $customerId")) { diff --git a/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/PaymentProcessor.kt b/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/PaymentProcessor.kt index 74af17b8b..4948d8ae2 100644 --- a/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/PaymentProcessor.kt +++ b/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/PaymentProcessor.kt @@ -114,10 +114,9 @@ interface PaymentProcessor { /** * @param chargeId ID of the of the authorized charge to refund from authorizeCharge() - * @param customerId Customer id in the payment system * @return id of the charge */ - fun refundCharge(chargeId: String, customerId: String): Either + fun refundCharge(chargeId: String): Either /** * @param customerId Customer id in the payment system From 235c620a7f27599edd9c952358a945b33a810eef Mon Sep 17 00:00:00 2001 From: Martin Cederlof Date: Tue, 4 Sep 2018 15:11:00 +0200 Subject: [PATCH 14/78] Fix codacy complains --- .../main/java/org/ostelco/ocsgw/data/grpc/OcsgwMetrics.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ocsgw/src/main/java/org/ostelco/ocsgw/data/grpc/OcsgwMetrics.java b/ocsgw/src/main/java/org/ostelco/ocsgw/data/grpc/OcsgwMetrics.java index ea516025f..3b863e8dd 100644 --- a/ocsgw/src/main/java/org/ostelco/ocsgw/data/grpc/OcsgwMetrics.java +++ b/ocsgw/src/main/java/org/ostelco/ocsgw/data/grpc/OcsgwMetrics.java @@ -24,7 +24,7 @@ import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; -public class OcsgwMetrics { +class OcsgwMetrics { private static final Logger LOG = LoggerFactory.getLogger(OcsgwMetrics.class); @@ -44,7 +44,7 @@ public class OcsgwMetrics { private int lastActiveSessions = 0; - public OcsgwMetrics(String metricsServerHostname, ServiceAccountJwtAccessCredentials credentials) { + OcsgwMetrics(String metricsServerHostname, ServiceAccountJwtAccessCredentials credentials) { try { final NettyChannelBuilder nettyChannelBuilder = NettyChannelBuilder From 67674172973410e89e44d8d8e07f2a2d90412b65 Mon Sep 17 00:00:00 2001 From: Martin Cederlof Date: Wed, 5 Sep 2018 16:33:20 +0200 Subject: [PATCH 15/78] Added information to log messages --- .../DataConsumptionInfoPublisher.kt | 2 +- .../client/api/store/SubscriberDAOImpl.kt | 24 +++++++++---------- .../ostelco/prime/events/EventProcessor.kt | 4 ++-- .../org/ostelco/prime/ocs/EventHandlerImpl.kt | 19 ++++++++------- 4 files changed, 26 insertions(+), 23 deletions(-) diff --git a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DataConsumptionInfoPublisher.kt b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DataConsumptionInfoPublisher.kt index 674750198..89905ae6d 100644 --- a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DataConsumptionInfoPublisher.kt +++ b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DataConsumptionInfoPublisher.kt @@ -80,7 +80,7 @@ object DataConsumptionInfoPublisher : Managed { override fun onSuccess(messageId: String) { // Once published, returns server-assigned message ids (unique within the topic) - logger.debug(messageId) + logger.debug("Published message $messageId") } }) } diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt index 1d9d2bca0..1aae9a635 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt @@ -55,7 +55,7 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu BadRequestError("Incomplete profile description. ${it.message}") } } catch (e: Exception) { - logger.error("Failed to fetch profile", e) + logger.error("Failed to fetch profile for subscriberId ${subscriberId}", e) Either.left(NotFoundError("Failed to fetch profile")) } } @@ -73,7 +73,7 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu getProfile(subscriberId) } } catch (e: Exception) { - logger.error("Failed to create profile", e) + logger.error("Failed to create profile for subscriberId ${subscriberId}", e) Either.left(ForbiddenError("Failed to create profile")) } } @@ -87,7 +87,7 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu try { storage.addNotificationToken(msisdn, applicationToken) } catch (e: Exception) { - logger.error("Failed to store ApplicationToken", e) + logger.error("Failed to store ApplicationToken for msisdn ${msisdn}", e) return Either.left(InsuffientStorageError("Failed to store ApplicationToken")) } return getNotificationToken(msisdn, applicationToken.applicationID) @@ -99,7 +99,7 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu ?.let { Either.right(it) } ?: return Either.left(NotFoundError("Failed to get ApplicationToken")) } catch (e: Exception) { - logger.error("Failed to get ApplicationToken", e) + logger.error("Failed to get ApplicationToken for msisdn ${msisdn}", e) return Either.left(NotFoundError("Failed to get ApplicationToken")) } } @@ -111,7 +111,7 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu try { storage.updateSubscriber(profile) } catch (e: Exception) { - logger.error("Failed to update profile", e) + logger.error("Failed to update profile for subscriberId ${subscriberId}", e) return Either.left(NotFoundError("Failed to update profile")) } @@ -128,7 +128,7 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu } .mapLeft { NotFoundError(it.message) } } catch (e: Exception) { - logger.error("Failed to get balance", e) + logger.error("Failed to get balance for subscriber ${subscriberId}", e) return Either.left(NotFoundError("Failed to get balance")) } } @@ -139,7 +139,7 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu NotFoundError("Failed to get subscriptions. ${it.message}") } } catch (e: Exception) { - logger.error("Failed to get subscriptions", e) + logger.error("Failed to get subscriptions for subscriberId $subscriberId", e) return Either.left(NotFoundError("Failed to get subscriptions")) } } @@ -156,7 +156,7 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu { NotFoundError("Failed to get purchase history. ${it.message}") }, { it.toList() }) } catch (e: Exception) { - logger.error("Failed to get purchase history", e) + logger.error("Failed to get purchase history for subscriberId ${subscriberId}", e) Either.left(NotFoundError("Failed to get purchase history")) } } @@ -167,7 +167,7 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu NotFoundError("Did not find msisdn for this subscription. ${it.message}") } } catch (e: Exception) { - logger.error("Did not find msisdn for this subscription", e) + logger.error("Did not find msisdn for subscriberId $subscriberId", e) Either.left(NotFoundError("Did not find subscription")) } } @@ -178,7 +178,7 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu { NotFoundError(it.message) }, { products -> products.values }) } catch (e: Exception) { - logger.error("Failed to get Products", e) + logger.error("Failed to get Products for subscriberId ${subscriberId}", e) Either.left(NotFoundError("Failed to get Products")) } @@ -313,7 +313,7 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu { NotFoundError("Failed to get referral list. ${it.message}") }, { list -> list.map { Person(it) } }) } catch (e: Exception) { - logger.error("Failed to get referral list", e) + logger.error("Failed to get referral list for subscriberId ${subscriberId}", e) Either.left(NotFoundError("Failed to get referral list")) } } @@ -324,7 +324,7 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu { NotFoundError("Failed to get referred-by. ${it.message}") }, { Person(name = it) }) } catch (e: Exception) { - logger.error("Failed to get referred-by", e) + logger.error("Failed to get referred-by for subscriberId ${subscriberId}", e) Either.left(NotFoundError("Failed to get referred-by")) } } diff --git a/ocs/src/main/kotlin/org/ostelco/prime/events/EventProcessor.kt b/ocs/src/main/kotlin/org/ostelco/prime/events/EventProcessor.kt index 59f8fc6a0..13131a944 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/events/EventProcessor.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/events/EventProcessor.kt @@ -31,8 +31,8 @@ class EventProcessor( || event.messageType == RELEASE_RESERVED_BUCKET || event.messageType == TOPUP_DATA_BUNDLE_BALANCE || event.messageType == REMOVE_MSISDN_TO_BUNDLE_MAPPING) { - logger.info("Updating data bundle balance for {} : {} to {} bytes", - event.msisdn, event.bundleId, event.bundleBytes) + logger.info("Updating data bundle balance for bundleId : {} to {} bytes", + event.bundleId, event.bundleBytes) val bundleId = event.bundleId if (bundleId != null) { storage.updateBundle(Bundle(bundleId, event.bundleBytes)) diff --git a/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt b/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt index 2402c9727..968ac8a4a 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt @@ -50,14 +50,17 @@ internal class EventHandlerImpl(private val ocsService: OcsService) : EventHandl } private fun logEventProcessing(msg: String, event: OcsEvent) { - logger.info("{}", msg) - logger.info("MSISDN: {}", event.msisdn) - logger.info("requested bytes: {}", event.requestedBucketBytes) - logger.info("reserved bytes: {}", event.reservedBucketBytes) - logger.info("used bytes: {}", event.usedBucketBytes) - logger.info("bundle bytes: {}", event.bundleBytes) - logger.info("Reporting reason: {}", event.reportingReason) - logger.info("request id: {} ",event.ocsgwRequestId) + val logString = """ + ${msg} + Msisdn: ${event.msisdn} + Requested bytes: ${event.requestedBucketBytes} + Used bytes: ${event.usedBucketBytes} + Bundle bytes: ${event.bundleBytes} + Reporting reason: ${event.reportingReason} + Request id: ${event.ocsgwRequestId} + """.trimIndent() + + logger.info(logString) } private fun handleCreditControlRequest(event: OcsEvent) { From 84577558e88e4fe73d6be36a77ec6c470e43eaa0 Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Thu, 6 Sep 2018 10:02:11 +0200 Subject: [PATCH 16/78] Add pubsub publisher for purchase records --- analytics-module/build.gradle | 1 + .../prime/analytics/AnalyticsModule.kt | 8 +- .../prime/analytics/AnalyticsServiceImpl.kt | 6 ++ .../DataConsumptionInfoPublisher.kt | 2 +- .../publishers/PurchaseInfoPublisher.kt | 94 +++++++++++++++++++ .../client/api/store/SubscriberDAOImpl.kt | 5 + .../org/ostelco/prime/model/Entities.kt | 13 +++ .../ostelco/prime/storage/graph/Neo4jStore.kt | 2 + .../prime/analytics/AnalyticsService.kt | 3 + prime/config/config.yaml | 3 +- prime/config/test.yaml | 3 +- prime/script/wait.sh | 1 + .../integration-tests/resources/config.yaml | 3 +- 13 files changed, 138 insertions(+), 6 deletions(-) create mode 100644 analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PurchaseInfoPublisher.kt diff --git a/analytics-module/build.gradle b/analytics-module/build.gradle index adcf08f5a..a14bb9719 100644 --- a/analytics-module/build.gradle +++ b/analytics-module/build.gradle @@ -9,6 +9,7 @@ dependencies { implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlinVersion" implementation "io.dropwizard:dropwizard-core:$dropwizardVersion" implementation "com.google.cloud:google-cloud-pubsub:$googleCloudVersion" + implementation 'com.google.code.gson:gson:2.8.5' testImplementation "io.dropwizard:dropwizard-testing:$dropwizardVersion" testImplementation 'org.mockito:mockito-core:2.18.3' diff --git a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsModule.kt b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsModule.kt index 98b6a8ca8..49621a689 100644 --- a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsModule.kt +++ b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsModule.kt @@ -35,8 +35,12 @@ class AnalyticsConfig { lateinit var projectId: String @NotEmpty - @JsonProperty("topicId") - lateinit var topicId: String + @JsonProperty("dataTrafficTopicId") + lateinit var dataTrafficTopicId: String + + @NotEmpty + @JsonProperty("purchaseInfoTopicId") + lateinit var purchaseInfoTopicId: String } object ConfigRegistry { diff --git a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsServiceImpl.kt b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsServiceImpl.kt index 99b9c34c5..7dafff765 100644 --- a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsServiceImpl.kt +++ b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsServiceImpl.kt @@ -2,7 +2,9 @@ package org.ostelco.prime.analytics import org.ostelco.prime.analytics.metrics.CustomMetricsRegistry import org.ostelco.prime.analytics.publishers.DataConsumptionInfoPublisher +import org.ostelco.prime.analytics.publishers.PurchaseInfoPublisher import org.ostelco.prime.logger +import org.ostelco.prime.model.PurchaseRecord class AnalyticsServiceImpl : AnalyticsService { @@ -16,4 +18,8 @@ class AnalyticsServiceImpl : AnalyticsService { override fun reportMetric(primeMetric: PrimeMetric, value: Long) { CustomMetricsRegistry.updateMetricValue(primeMetric, value) } + + override fun reportPurchaseInfo(purchaseRecord: PurchaseRecord, subscriberId: String, status: String) { + PurchaseInfoPublisher.publish(purchaseRecord, subscriberId, status) + } } \ No newline at end of file diff --git a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DataConsumptionInfoPublisher.kt b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DataConsumptionInfoPublisher.kt index 674750198..e5b651e64 100644 --- a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DataConsumptionInfoPublisher.kt +++ b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DataConsumptionInfoPublisher.kt @@ -30,7 +30,7 @@ object DataConsumptionInfoPublisher : Managed { @Throws(IOException::class) override fun start() { - val topicName = ProjectTopicName.of(config.projectId, config.topicId) + val topicName = ProjectTopicName.of(config.projectId, config.dataTrafficTopicId) // Create a publisher instance with default settings bound to the topic publisher = Publisher.newBuilder(topicName).build() diff --git a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PurchaseInfoPublisher.kt b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PurchaseInfoPublisher.kt new file mode 100644 index 000000000..14e522b09 --- /dev/null +++ b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PurchaseInfoPublisher.kt @@ -0,0 +1,94 @@ +package org.ostelco.prime.analytics.publishers + +import com.google.api.core.ApiFutureCallback +import com.google.api.core.ApiFutures +import com.google.api.gax.rpc.ApiException +import com.google.cloud.pubsub.v1.Publisher +import com.google.gson.* +import com.google.gson.reflect.TypeToken +import com.google.protobuf.ByteString +import com.google.pubsub.v1.ProjectTopicName +import com.google.pubsub.v1.PubsubMessage +import io.dropwizard.lifecycle.Managed +import org.ostelco.prime.analytics.ConfigRegistry +import org.ostelco.prime.logger +import org.ostelco.prime.model.PurchaseRecord +import org.ostelco.prime.model.PurchaseRecordInfo +import java.io.IOException + + +/** + * This class publishes the purchase information events to the Google Cloud Pub/Sub. + */ +object PurchaseInfoPublisher : Managed { + + private val logger by logger() + + private var gson: Gson = createGson() + + private lateinit var publisher: Publisher + + @Throws(IOException::class) + override fun start() { + + val topicName = ProjectTopicName.of(ConfigRegistry.config.projectId, ConfigRegistry.config.purchaseInfoTopicId) + + // Create a publisher instance with default settings bound to the topic + publisher = Publisher.newBuilder(topicName).build() + } + + @Throws(Exception::class) + override fun stop() { + // When finished with the publisher, shutdown to free up resources. + publisher.shutdown() + } + + private fun createGson(): Gson { + val builder = GsonBuilder() + val mapType = object : TypeToken>() {}.type + val serializer = JsonSerializer> { src, _, _ -> + val array = JsonArray() + src.forEach { k, v -> + val property = JsonObject() + property.addProperty("key", k) + property.addProperty("value", v) + array.add(property) + } + array + } + builder.registerTypeAdapter(mapType, serializer) + return builder.create() + } + + private fun convertToJson(purchaseRecordInfo: PurchaseRecordInfo): ByteString = + ByteString.copyFromUtf8(gson.toJson(purchaseRecordInfo)) + + + fun publish(purchaseRecord: PurchaseRecord, subscriberId: String, status: String) { + + val pubsubMessage = PubsubMessage.newBuilder() + .setData(convertToJson(PurchaseRecordInfo(purchaseRecord, subscriberId, status))) + .build() + + //schedule a message to be published, messages are automatically batched + val future = publisher.publish(pubsubMessage) + + // add an asynchronous callback to handle success / failure + ApiFutures.addCallback(future, object : ApiFutureCallback { + + override fun onFailure(throwable: Throwable) { + if (throwable is ApiException) { + // details on the API exception + logger.warn("Status code: {}", throwable.statusCode.code) + logger.warn("Retrying: {}", throwable.isRetryable) + } + logger.warn("Error publishing purchase record for msisdn: {}", purchaseRecord.msisdn) + } + + override fun onSuccess(messageId: String) { + // Once published, returns server-assigned message ids (unique within the topic) + logger.debug(messageId) + } + }) + } +} diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt index 1d9d2bca0..2e5616ed3 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt @@ -273,6 +273,11 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu } // Notify OCS .flatMap { + //TODO: While aborting transactions, send a record with "reverted" status + analyticsReporter.reportPurchaseInfo( + purchaseRecord = purchaseRecord, + subscriberId = subscriberId, + status = "success") //TODO: Handle errors (when it becomes available) ocsSubscriberService.topup(subscriberId, sku) Either.right(Tuple4(profileInfo, savedSourceId, chargeId, product)) diff --git a/model/src/main/kotlin/org/ostelco/prime/model/Entities.kt b/model/src/main/kotlin/org/ostelco/prime/model/Entities.kt index fd7659c2b..fabefa108 100644 --- a/model/src/main/kotlin/org/ostelco/prime/model/Entities.kt +++ b/model/src/main/kotlin/org/ostelco/prime/model/Entities.kt @@ -84,6 +84,19 @@ data class PurchaseRecord( val product: Product, val timestamp: Long) : HasId +data class PurchaseRecordInfo(override val id: String, + val subscriberId: String, + val product: Product, + val timestamp: Long, + val status: String) : HasId { + constructor(purchaseRecord: PurchaseRecord, subscriberId: String, status: String = "success") : this( + purchaseRecord.id, + subscriberId, + purchaseRecord.product, + purchaseRecord.timestamp, + status) +} + data class PseudonymEntity( val msisdn: String, val pseudonym: String, diff --git a/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt b/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt index b2f109993..ac89bc583 100644 --- a/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt +++ b/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt @@ -3,6 +3,7 @@ package org.ostelco.prime.storage.graph import arrow.core.Either import arrow.core.flatMap import org.neo4j.driver.v1.Transaction +import org.ostelco.prime.analytics.AnalyticsService import org.ostelco.prime.logger import org.ostelco.prime.model.Bundle import org.ostelco.prime.model.Offer @@ -49,6 +50,7 @@ object Neo4jStoreSingleton : GraphStore { private val ocs: OcsAdminService by lazy { getResource() } private val logger by logger() + private val analyticsReporter by lazy { getResource() } // // Entity diff --git a/prime-api/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsService.kt b/prime-api/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsService.kt index 466f9f0be..c7088ea54 100644 --- a/prime-api/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsService.kt +++ b/prime-api/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsService.kt @@ -2,10 +2,13 @@ package org.ostelco.prime.analytics import org.ostelco.prime.analytics.MetricType.COUNTER import org.ostelco.prime.analytics.MetricType.GAUGE +import org.ostelco.prime.model.PurchaseRecord +import org.ostelco.prime.model.Subscriber interface AnalyticsService { fun reportTrafficInfo(msisdn: String, usedBytes: Long, bundleBytes: Long) fun reportMetric(primeMetric: PrimeMetric, value: Long) + fun reportPurchaseInfo(purchaseRecord: PurchaseRecord, subscriberId: String, status: String) } enum class PrimeMetric(val metricType: MetricType) { diff --git a/prime/config/config.yaml b/prime/config/config.yaml index 1f4bf8d61..da4dec32b 100644 --- a/prime/config/config.yaml +++ b/prime/config/config.yaml @@ -11,7 +11,8 @@ modules: - type: analytics config: projectId: pantel-2decb - topicId: data-traffic + dataTrafficTopicId: data-traffic + purchaseInfoTopicId: purchase-info - type: ocs config: lowBalanceThreshold: 100000000 diff --git a/prime/config/test.yaml b/prime/config/test.yaml index 11def9a99..7f1850185 100644 --- a/prime/config/test.yaml +++ b/prime/config/test.yaml @@ -13,7 +13,8 @@ modules: - type: analytics config: projectId: pantel-2decb - topicId: data-traffic + dataTrafficTopicId: data-traffic + purchaseInfoTopicId: purchase-info - type: ocs config: lowBalanceThreshold: 0 diff --git a/prime/script/wait.sh b/prime/script/wait.sh index f5450a2cb..3ed1947f0 100755 --- a/prime/script/wait.sh +++ b/prime/script/wait.sh @@ -38,6 +38,7 @@ echo "Creating topics and subscriptions...." curl -X PUT pubsub-emulator:8085/v1/projects/pantel-2decb/topics/data-traffic curl -X PUT pubsub-emulator:8085/v1/projects/pantel-2decb/topics/pseudo-traffic curl -X PUT -H "Content-Type: application/json" -d '{"topic":"projects/pantel-2decb/topics/data-traffic","ackDeadlineSeconds":10}' pubsub-emulator:8085/v1/projects/pantel-2decb/subscriptions/test-pseudo +curl -X PUT pubsub-emulator:8085/v1/projects/pantel-2decb/topics/purchase-info echo "Done creating topics and subscriptions" diff --git a/prime/src/integration-tests/resources/config.yaml b/prime/src/integration-tests/resources/config.yaml index a8475ea4c..6e4faf8eb 100644 --- a/prime/src/integration-tests/resources/config.yaml +++ b/prime/src/integration-tests/resources/config.yaml @@ -11,7 +11,8 @@ modules: - type: analytics config: projectId: pantel-2decb - topicId: data-traffic + dataTrafficTopicId: data-traffic + purchaseInfoTopicId: purchase-info - type: ocs config: lowBalanceThreshold: 0 From 9d271a73f3d43815fbd147cd4a38cb2f46cfe523 Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Thu, 6 Sep 2018 10:47:36 +0200 Subject: [PATCH 17/78] Pseudonymize subscriberId pushed to pubsub Currently we are using the msisdn pseudonymizer. --- .../org/ostelco/prime/analytics/AnalyticsModule.kt | 2 ++ .../analytics/publishers/PurchaseInfoPublisher.kt | 10 +++++++++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsModule.kt b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsModule.kt index 49621a689..1f0c7d0b8 100644 --- a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsModule.kt +++ b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsModule.kt @@ -6,6 +6,7 @@ import io.dropwizard.setup.Environment import org.hibernate.validator.constraints.NotEmpty import org.ostelco.prime.analytics.metrics.CustomMetricsRegistry import org.ostelco.prime.analytics.publishers.DataConsumptionInfoPublisher +import org.ostelco.prime.analytics.publishers.PurchaseInfoPublisher import org.ostelco.prime.module.PrimeModule @JsonTypeName("analytics") @@ -26,6 +27,7 @@ class AnalyticsModule : PrimeModule { // dropwizard starts Analytics events publisher env.lifecycle().manage(DataConsumptionInfoPublisher) + env.lifecycle().manage(PurchaseInfoPublisher) } } diff --git a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PurchaseInfoPublisher.kt b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PurchaseInfoPublisher.kt index 14e522b09..4c9c63e68 100644 --- a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PurchaseInfoPublisher.kt +++ b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PurchaseInfoPublisher.kt @@ -14,7 +14,10 @@ import org.ostelco.prime.analytics.ConfigRegistry import org.ostelco.prime.logger import org.ostelco.prime.model.PurchaseRecord import org.ostelco.prime.model.PurchaseRecordInfo +import org.ostelco.prime.module.getResource +import org.ostelco.prime.pseudonymizer.PseudonymizerService import java.io.IOException +import java.net.URLEncoder /** @@ -24,6 +27,8 @@ object PurchaseInfoPublisher : Managed { private val logger by logger() + private val pseudonymizerService by lazy { getResource() } + private var gson: Gson = createGson() private lateinit var publisher: Publisher @@ -66,8 +71,11 @@ object PurchaseInfoPublisher : Managed { fun publish(purchaseRecord: PurchaseRecord, subscriberId: String, status: String) { + val encodedSubscriberId = URLEncoder.encode(subscriberId,"UTF-8") + val pseudonym = pseudonymizerService.getPseudonymEntityFor(encodedSubscriberId, purchaseRecord.timestamp).pseudonym + val pubsubMessage = PubsubMessage.newBuilder() - .setData(convertToJson(PurchaseRecordInfo(purchaseRecord, subscriberId, status))) + .setData(convertToJson(PurchaseRecordInfo(purchaseRecord, pseudonym, status))) .build() //schedule a message to be published, messages are automatically batched From 21359033f0468983145d82f09797d900dbe1d309 Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Thu, 6 Sep 2018 11:09:15 +0200 Subject: [PATCH 18/78] Remove default value for status --- model/src/main/kotlin/org/ostelco/prime/model/Entities.kt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/model/src/main/kotlin/org/ostelco/prime/model/Entities.kt b/model/src/main/kotlin/org/ostelco/prime/model/Entities.kt index fabefa108..391e81366 100644 --- a/model/src/main/kotlin/org/ostelco/prime/model/Entities.kt +++ b/model/src/main/kotlin/org/ostelco/prime/model/Entities.kt @@ -89,7 +89,7 @@ data class PurchaseRecordInfo(override val id: String, val product: Product, val timestamp: Long, val status: String) : HasId { - constructor(purchaseRecord: PurchaseRecord, subscriberId: String, status: String = "success") : this( + constructor(purchaseRecord: PurchaseRecord, subscriberId: String, status: String) : this( purchaseRecord.id, subscriberId, purchaseRecord.product, From 90db4e53980c35173c4057862f94461c3d4d723a Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Thu, 6 Sep 2018 11:12:09 +0200 Subject: [PATCH 19/78] Add "msisdn" prefix to all Pseudonym vriable and method names. Preparing for SubscriberId Pseudonym entity --- .../DataConsumptionInfoPublisher.kt | 2 +- .../publishers/PurchaseInfoPublisher.kt | 2 +- .../prime/client/api/store/SubscriberDAO.kt | 4 +- .../client/api/store/SubscriberDAOImpl.kt | 4 +- .../api/resources/SubscriptionResourceTest.kt | 8 +-- .../org/ostelco/prime/model/Entities.kt | 14 +++-- .../pseudonymizer/PseudonymizerService.kt | 8 +-- .../kotlin/org/ostelco/pseudonym/Model.kt | 4 +- .../pseudonym/resources/PseudonymResource.kt | 10 ++-- .../pseudonym/service/PseudonymExport.kt | 4 +- .../service/PseudonymizerServiceSingleton.kt | 56 +++++++++---------- .../pseudonym/PseudonymResourceTest.kt | 54 +++++++++--------- 12 files changed, 89 insertions(+), 81 deletions(-) diff --git a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DataConsumptionInfoPublisher.kt b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DataConsumptionInfoPublisher.kt index e5b651e64..2709813a7 100644 --- a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DataConsumptionInfoPublisher.kt +++ b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DataConsumptionInfoPublisher.kt @@ -49,7 +49,7 @@ object DataConsumptionInfoPublisher : Managed { } val now = Instant.now().toEpochMilli() - val pseudonym = pseudonymizerService.getPseudonymEntityFor(msisdn, now).pseudonym + val pseudonym = pseudonymizerService.getMsisdnPseudonymEntityFor(msisdn, now).pseudonym val data = DataTrafficInfo.newBuilder() .setMsisdn(pseudonym) diff --git a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PurchaseInfoPublisher.kt b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PurchaseInfoPublisher.kt index 4c9c63e68..b4fec14d2 100644 --- a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PurchaseInfoPublisher.kt +++ b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PurchaseInfoPublisher.kt @@ -72,7 +72,7 @@ object PurchaseInfoPublisher : Managed { fun publish(purchaseRecord: PurchaseRecord, subscriberId: String, status: String) { val encodedSubscriberId = URLEncoder.encode(subscriberId,"UTF-8") - val pseudonym = pseudonymizerService.getPseudonymEntityFor(encodedSubscriberId, purchaseRecord.timestamp).pseudonym + val pseudonym = pseudonymizerService.getMsisdnPseudonymEntityFor(encodedSubscriberId, purchaseRecord.timestamp).pseudonym val pubsubMessage = PubsubMessage.newBuilder() .setData(convertToJson(PurchaseRecordInfo(purchaseRecord, pseudonym, status))) diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAO.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAO.kt index 45bf42fb5..db74ff4a3 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAO.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAO.kt @@ -5,7 +5,7 @@ import org.ostelco.prime.client.api.model.Consent import org.ostelco.prime.client.api.model.Person import org.ostelco.prime.client.api.model.SubscriptionStatus import org.ostelco.prime.core.ApiError -import org.ostelco.prime.model.ActivePseudonyms +import org.ostelco.prime.model.ActiveMsisdnPseudonyms import org.ostelco.prime.model.ApplicationToken import org.ostelco.prime.model.Product import org.ostelco.prime.model.PurchaseRecord @@ -91,5 +91,5 @@ interface SubscriberDAO { @Deprecated(message = "use purchaseProduct") fun purchaseProductWithoutPayment(subscriberId: String, sku: String): Either - fun getActivePseudonymOfMsisdnForSubscriber(subscriberId: String): Either + fun getActivePseudonymOfMsisdnForSubscriber(subscriberId: String): Either } diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt index 2e5616ed3..30818f29a 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt @@ -17,7 +17,7 @@ import org.ostelco.prime.core.ForbiddenError import org.ostelco.prime.core.InsuffientStorageError import org.ostelco.prime.core.NotFoundError import org.ostelco.prime.logger -import org.ostelco.prime.model.ActivePseudonyms +import org.ostelco.prime.model.ActiveMsisdnPseudonyms import org.ostelco.prime.model.ApplicationToken import org.ostelco.prime.model.Product import org.ostelco.prime.model.PurchaseRecord @@ -144,7 +144,7 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu } } - override fun getActivePseudonymOfMsisdnForSubscriber(subscriberId: String): Either { + override fun getActivePseudonymOfMsisdnForSubscriber(subscriberId: String): Either { return storage.getMsisdn(subscriberId) .mapLeft { NotFoundError("Failed to msisdn for user. ${it.message}") } .map { msisdn -> pseudonymizer.getActivePseudonymsForMsisdn(msisdn) } diff --git a/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/SubscriptionResourceTest.kt b/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/SubscriptionResourceTest.kt index ad6926288..b3633e88d 100644 --- a/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/SubscriptionResourceTest.kt +++ b/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/SubscriptionResourceTest.kt @@ -23,10 +23,10 @@ import org.ostelco.prime.client.api.auth.OAuthAuthenticator import org.ostelco.prime.client.api.model.SubscriptionStatus import org.ostelco.prime.client.api.store.SubscriberDAO import org.ostelco.prime.client.api.util.AccessToken -import org.ostelco.prime.model.ActivePseudonyms +import org.ostelco.prime.model.ActiveMsisdnPseudonyms import org.ostelco.prime.model.Price import org.ostelco.prime.model.Product -import org.ostelco.prime.model.PseudonymEntity +import org.ostelco.prime.model.MsisdnPseudonymEntity import org.ostelco.prime.model.PurchaseRecord import java.time.Instant import java.util.* @@ -79,8 +79,8 @@ class SubscriptionResourceTest { val arg = argumentCaptor() val msisdn = "4790300001" - val pseudonym = PseudonymEntity(msisdn, "random", 0, 1) - val activePseudonyms = ActivePseudonyms(pseudonym, pseudonym) + val pseudonym = MsisdnPseudonymEntity(msisdn, "random", 0, 1) + val activePseudonyms = ActiveMsisdnPseudonyms(pseudonym, pseudonym) `when`(DAO.getActivePseudonymOfMsisdnForSubscriber(arg.capture())) .thenReturn(Either.right(activePseudonyms)) diff --git a/model/src/main/kotlin/org/ostelco/prime/model/Entities.kt b/model/src/main/kotlin/org/ostelco/prime/model/Entities.kt index 391e81366..b0245341d 100644 --- a/model/src/main/kotlin/org/ostelco/prime/model/Entities.kt +++ b/model/src/main/kotlin/org/ostelco/prime/model/Entities.kt @@ -97,12 +97,18 @@ data class PurchaseRecordInfo(override val id: String, status) } -data class PseudonymEntity( +data class MsisdnPseudonymEntity( val msisdn: String, val pseudonym: String, val start: Long, val end: Long) -data class ActivePseudonyms( - val current: PseudonymEntity, - val next: PseudonymEntity) \ No newline at end of file +data class SubscriberIdPseudonymEntity( + val subscriberId: String, + val pseudonym: String, + val start: Long, + val end: Long) + +data class ActiveMsisdnPseudonyms( + val current: MsisdnPseudonymEntity, + val next: MsisdnPseudonymEntity) \ No newline at end of file diff --git a/prime-api/src/main/kotlin/org/ostelco/prime/pseudonymizer/PseudonymizerService.kt b/prime-api/src/main/kotlin/org/ostelco/prime/pseudonymizer/PseudonymizerService.kt index f48b214c7..dc27669aa 100644 --- a/prime-api/src/main/kotlin/org/ostelco/prime/pseudonymizer/PseudonymizerService.kt +++ b/prime-api/src/main/kotlin/org/ostelco/prime/pseudonymizer/PseudonymizerService.kt @@ -1,11 +1,11 @@ package org.ostelco.prime.pseudonymizer -import org.ostelco.prime.model.ActivePseudonyms -import org.ostelco.prime.model.PseudonymEntity +import org.ostelco.prime.model.ActiveMsisdnPseudonyms +import org.ostelco.prime.model.MsisdnPseudonymEntity interface PseudonymizerService { - fun getActivePseudonymsForMsisdn(msisdn: String): ActivePseudonyms + fun getActivePseudonymsForMsisdn(msisdn: String): ActiveMsisdnPseudonyms - fun getPseudonymEntityFor(msisdn: String, timestamp: Long): PseudonymEntity + fun getMsisdnPseudonymEntityFor(msisdn: String, timestamp: Long): MsisdnPseudonymEntity } \ No newline at end of file diff --git a/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/Model.kt b/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/Model.kt index 81d6334f1..e02687f3c 100644 --- a/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/Model.kt +++ b/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/Model.kt @@ -1,10 +1,12 @@ package org.ostelco.pseudonym -const val PseudonymEntityKind = "Pseudonym" +const val MsisdnPseudonymEntityKind = "Pseudonym" const val msisdnPropertyName = "msisdn" const val pseudonymPropertyName = "pseudonym" const val startPropertyName = "start" const val endPropertyName = "end" +const val SubscriberIdPseudonymEntityKind = "SubscriberPseudonym" +const val subscriberIdPropertyName = "subscriberId" const val ExportTaskKind = "ExportTask" const val exportIdPropertyName = "exportId" diff --git a/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/resources/PseudonymResource.kt b/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/resources/PseudonymResource.kt index 054bdabf2..d44857f32 100644 --- a/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/resources/PseudonymResource.kt +++ b/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/resources/PseudonymResource.kt @@ -37,7 +37,7 @@ class PseudonymResource { fun getPseudonym(@NotBlank @PathParam("msisdn") msisdn: String, @NotBlank @PathParam("timestamp") timestamp: String): Response { logger.info("GET pseudonym for Msisdn = $msisdn at timestamp = $timestamp") - val entity = PseudonymizerServiceSingleton.getPseudonymEntityFor(msisdn, timestamp.toLong()) + val entity = PseudonymizerServiceSingleton.getMsisdnPseudonymEntityFor(msisdn, timestamp.toLong()) return Response.ok(entity, MediaType.APPLICATION_JSON).build() } @@ -51,7 +51,7 @@ class PseudonymResource { fun getPseudonym(@NotBlank @PathParam("msisdn") msisdn: String): Response { val timestamp = Instant.now().toEpochMilli() logger.info("GET pseudonym for Msisdn = $msisdn at current time, timestamp = $timestamp") - val entity = PseudonymizerServiceSingleton.getPseudonymEntityFor(msisdn, timestamp) + val entity = PseudonymizerServiceSingleton.getMsisdnPseudonymEntityFor(msisdn, timestamp) return Response.ok(entity, MediaType.APPLICATION_JSON).build() } @@ -76,7 +76,7 @@ class PseudonymResource { @Path("/find/{pseudonym}") fun findPseudonym(@NotBlank @PathParam("pseudonym") pseudonym: String): Response { logger.info("Find details for pseudonym = $pseudonym") - return PseudonymizerServiceSingleton.findPseudonym(pseudonym = pseudonym) + return PseudonymizerServiceSingleton.findMsisdnPseudonym(pseudonym = pseudonym) ?.let { Response.ok(it, MediaType.APPLICATION_JSON).build() } ?: Response.status(Status.NOT_FOUND).build() } @@ -90,7 +90,7 @@ class PseudonymResource { @Path("/delete/{msisdn}") fun deleteAllPseudonyms(@NotBlank @PathParam("msisdn") msisdn: String): Response { logger.info("delete all pseudonyms for Msisdn = $msisdn") - val count = PseudonymizerServiceSingleton.deleteAllPseudonyms(msisdn = msisdn) + val count = PseudonymizerServiceSingleton.deleteAllMsisdnPseudonyms(msisdn = msisdn) // Return a Json object with number of records deleted. val countMap = mapOf("count" to count) logger.info("deleted $count records for Msisdn = $msisdn") @@ -106,7 +106,7 @@ class PseudonymResource { @Path("/export/{exportId}") fun exportPseudonyms(@NotBlank @PathParam("exportId") exportId: String): Response { logger.info("GET export all pseudonyms to the table $exportId") - PseudonymizerServiceSingleton.exportPseudonyms(exportId = exportId) + PseudonymizerServiceSingleton.exportMsisdnPseudonyms(exportId = exportId) return Response.ok("Started Exporting", MediaType.TEXT_PLAIN).build() } diff --git a/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymExport.kt b/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymExport.kt index e94c9872d..242c7d0dd 100644 --- a/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymExport.kt +++ b/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymExport.kt @@ -17,7 +17,7 @@ import com.google.cloud.datastore.StructuredQuery import com.google.common.cache.Cache import com.google.common.cache.CacheBuilder import org.ostelco.pseudonym.ExportTaskKind -import org.ostelco.pseudonym.PseudonymEntityKind +import org.ostelco.pseudonym.MsisdnPseudonymEntityKind import org.ostelco.pseudonym.errorPropertyName import org.ostelco.pseudonym.exportIdPropertyName import org.ostelco.pseudonym.msisdnPropertyName @@ -83,7 +83,7 @@ class PseudonymExport(private val exportId: String, private val bigquery: BigQue // Dump pseudonyms to BQ, one page at a time. Since all records in a // page are inserted at once, use a small page size val queryBuilder = Query.newEntityQueryBuilder() - .setKind(PseudonymEntityKind) + .setKind(MsisdnPseudonymEntityKind) .setOrderBy(StructuredQuery.OrderBy.asc(msisdnPropertyName)) .setLimit(pageSize) if (cursor != null) { diff --git a/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymizerServiceSingleton.kt b/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymizerServiceSingleton.kt index 44509bb39..215a21569 100644 --- a/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymizerServiceSingleton.kt +++ b/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymizerServiceSingleton.kt @@ -15,12 +15,12 @@ import com.google.common.cache.Cache import com.google.common.cache.CacheBuilder import io.dropwizard.setup.Environment import org.ostelco.prime.logger -import org.ostelco.prime.model.ActivePseudonyms -import org.ostelco.prime.model.PseudonymEntity +import org.ostelco.prime.model.ActiveMsisdnPseudonyms +import org.ostelco.prime.model.MsisdnPseudonymEntity import org.ostelco.prime.pseudonymizer.PseudonymizerService import org.ostelco.pseudonym.ConfigRegistry import org.ostelco.pseudonym.ExportTaskKind -import org.ostelco.pseudonym.PseudonymEntityKind +import org.ostelco.pseudonym.MsisdnPseudonymEntityKind import org.ostelco.pseudonym.endPropertyName import org.ostelco.pseudonym.errorPropertyName import org.ostelco.pseudonym.exportIdPropertyName @@ -69,7 +69,7 @@ object PseudonymizerServiceSingleton : PseudonymizerService { private val executor = Executors.newFixedThreadPool(3) - val pseudonymCache: Cache = CacheBuilder.newBuilder() + val msisdnPseudonymCache: Cache = CacheBuilder.newBuilder() .maximumSize(5000) .build() @@ -85,41 +85,41 @@ object PseudonymizerServiceSingleton : PseudonymizerService { } } - override fun getActivePseudonymsForMsisdn(msisdn: String): ActivePseudonyms { + override fun getActivePseudonymsForMsisdn(msisdn: String): ActiveMsisdnPseudonyms { val currentTimestamp = Instant.now().toEpochMilli() val nextTimestamp = dateBounds.getNextPeriodStart(currentTimestamp) logger.info("GET pseudonym for Msisdn = $msisdn at timestamps = $currentTimestamp & $nextTimestamp") - val current = getPseudonymEntityFor(msisdn, currentTimestamp) - val next = getPseudonymEntityFor(msisdn, nextTimestamp) - return ActivePseudonyms(current, next) + val current = getMsisdnPseudonymEntityFor(msisdn, currentTimestamp) + val next = getMsisdnPseudonymEntityFor(msisdn, nextTimestamp) + return ActiveMsisdnPseudonyms(current, next) } - override fun getPseudonymEntityFor(msisdn: String, timestamp: Long): PseudonymEntity { + override fun getMsisdnPseudonymEntityFor(msisdn: String, timestamp: Long): MsisdnPseudonymEntity { val (bounds, keyPrefix) = dateBounds.getBoundsNKeyPrefix(msisdn, timestamp) // Retrieves the element from cache. - return pseudonymCache.get(keyPrefix) { - getPseudonymEntity(keyPrefix) ?: createPseudonym(msisdn, bounds, keyPrefix) + return msisdnPseudonymCache.get(keyPrefix) { + getMsisdnPseudonymEntity(keyPrefix) ?: createMsisdnPseudonym(msisdn, bounds, keyPrefix) } } - fun findPseudonym(pseudonym: String): PseudonymEntity? { + fun findMsisdnPseudonym(pseudonym: String): MsisdnPseudonymEntity? { val query = Query.newEntityQueryBuilder() - .setKind(PseudonymEntityKind) + .setKind(MsisdnPseudonymEntityKind) .setFilter(PropertyFilter.eq(pseudonymPropertyName, pseudonym)) .setLimit(1) .build() val results = datastore.run(query) if (results.hasNext()) { val entity = results.next() - return convertToPseudonymEntity(entity) + return convertToMsisdnPseudonymEntity(entity) } logger.info("Couldn't find, pseudonym = $pseudonym") return null } - fun deleteAllPseudonyms(msisdn: String): Int { + fun deleteAllMsisdnPseudonyms(msisdn: String): Int { val query = Query.newEntityQueryBuilder() - .setKind(PseudonymEntityKind) + .setKind(MsisdnPseudonymEntityKind) .setFilter(PropertyFilter.eq(msisdnPropertyName, msisdn)) .setLimit(1) .build() @@ -133,7 +133,7 @@ object PseudonymizerServiceSingleton : PseudonymizerService { return count } - fun exportPseudonyms(exportId: String) { + fun exportMsisdnPseudonyms(exportId: String) { bigQuery?.apply { logger.info("GET export all pseudonyms to the table $exportId") val exporter = PseudonymExport(exportId = exportId, bigquery = this, datastore = datastore) @@ -203,24 +203,24 @@ object PseudonymizerServiceSingleton : PseudonymizerService { return null } - private fun getPseudonymKey(keyPrefix: String): Key { - return datastore.newKeyFactory().setKind(PseudonymEntityKind).newKey(keyPrefix) + private fun getMsisdnPseudonymKey(keyPrefix: String): Key { + return datastore.newKeyFactory().setKind(MsisdnPseudonymEntityKind).newKey(keyPrefix) } - private fun getPseudonymEntity(keyPrefix: String): PseudonymEntity? { - val pseudonymKey = getPseudonymKey(keyPrefix) + private fun getMsisdnPseudonymEntity(keyPrefix: String): MsisdnPseudonymEntity? { + val pseudonymKey = getMsisdnPseudonymKey(keyPrefix) val value = datastore.get(pseudonymKey) if (value != null) { // Create the object from datastore entity - return convertToPseudonymEntity(value) + return convertToMsisdnPseudonymEntity(value) } return null } - private fun createPseudonym(msisdn: String, bounds: Bounds, keyPrefix: String): PseudonymEntity { + private fun createMsisdnPseudonym(msisdn: String, bounds: Bounds, keyPrefix: String): MsisdnPseudonymEntity { val uuid = UUID.randomUUID().toString() - var entity = PseudonymEntity(msisdn, uuid, bounds.start, bounds.end) - val pseudonymKey = getPseudonymKey(keyPrefix) + var entity = MsisdnPseudonymEntity(msisdn, uuid, bounds.start, bounds.end) + val pseudonymKey = getMsisdnPseudonymKey(keyPrefix) val transaction = datastore.newTransaction() try { @@ -238,7 +238,7 @@ object PseudonymizerServiceSingleton : PseudonymizerService { transaction.commit() } else { // Use the existing one - entity = convertToPseudonymEntity(currentEntity) + entity = convertToMsisdnPseudonymEntity(currentEntity) } } finally { if (transaction.isActive) { @@ -248,8 +248,8 @@ object PseudonymizerServiceSingleton : PseudonymizerService { return entity } - private fun convertToPseudonymEntity(entity: Entity): PseudonymEntity { - return PseudonymEntity( + private fun convertToMsisdnPseudonymEntity(entity: Entity): MsisdnPseudonymEntity { + return MsisdnPseudonymEntity( entity.getString(msisdnPropertyName), entity.getString(pseudonymPropertyName), entity.getLong(startPropertyName), diff --git a/pseudonym-server/src/test/kotlin/org/ostelco/pseudonym/PseudonymResourceTest.kt b/pseudonym-server/src/test/kotlin/org/ostelco/pseudonym/PseudonymResourceTest.kt index 7c9d6ce98..6d1590945 100644 --- a/pseudonym-server/src/test/kotlin/org/ostelco/pseudonym/PseudonymResourceTest.kt +++ b/pseudonym-server/src/test/kotlin/org/ostelco/pseudonym/PseudonymResourceTest.kt @@ -7,8 +7,8 @@ import io.dropwizard.testing.junit.ResourceTestRule import org.junit.ClassRule import org.junit.Test import org.mockito.Mockito.mock -import org.ostelco.prime.model.ActivePseudonyms -import org.ostelco.prime.model.PseudonymEntity +import org.ostelco.prime.model.ActiveMsisdnPseudonyms +import org.ostelco.prime.model.MsisdnPseudonymEntity import org.ostelco.pseudonym.resources.PseudonymResource import org.ostelco.pseudonym.service.PseudonymizerServiceSingleton import javax.ws.rs.core.Response.Status @@ -81,7 +81,7 @@ class PseudonymResourceTest { @Test fun testGetPseudonym() { - lateinit var pseudonymEntity:PseudonymEntity + lateinit var msisdnPseudonymEntity:MsisdnPseudonymEntity run { val result = resources ?.target("$pathForCurrent/$testMsisdn1") @@ -91,21 +91,21 @@ class PseudonymResourceTest { if (result == null) return assertEquals(Status.OK.statusCode, result.status) val json = result.readEntity(String::class.java) - pseudonymEntity = mapper.readValue(json) - assertEquals(testMsisdn1, pseudonymEntity.msisdn) + msisdnPseudonymEntity = mapper.readValue(json) + assertEquals(testMsisdn1, msisdnPseudonymEntity.msisdn) } run { val result = resources - ?.target("$pathForGet/$testMsisdn1/${pseudonymEntity.start}") + ?.target("$pathForGet/$testMsisdn1/${msisdnPseudonymEntity.start}") ?.request() ?.get() assertNotNull(result) if (result == null) return assertEquals(Status.OK.statusCode, result.status) val json = result.readEntity(String::class.java) - val pseudonymEntity2 = mapper.readValue(json) - assertEquals(pseudonymEntity.pseudonym, pseudonymEntity2.pseudonym) + val pseudonymEntity2 = mapper.readValue(json) + assertEquals(msisdnPseudonymEntity.pseudonym, pseudonymEntity2.pseudonym) } } @@ -115,7 +115,7 @@ class PseudonymResourceTest { @Test fun testActivePseudonyms() { - lateinit var pseudonymEntity:PseudonymEntity + lateinit var msisdnPseudonymEntity:MsisdnPseudonymEntity run { val result = resources ?.target("$pathForCurrent/$testMsisdn1") @@ -125,8 +125,8 @@ class PseudonymResourceTest { if (result == null) return assertEquals(Status.OK.statusCode, result.status) val json = result.readEntity(String::class.java) - pseudonymEntity = mapper.readValue(json) - assertEquals(testMsisdn1, pseudonymEntity.msisdn) + msisdnPseudonymEntity = mapper.readValue(json) + assertEquals(testMsisdn1, msisdnPseudonymEntity.msisdn) } run { @@ -139,13 +139,13 @@ class PseudonymResourceTest { assertEquals(Status.OK.statusCode, result.status) val json = result.readEntity(String::class.java) // This is how the client will recieve the output. - val mapOfPseudonyms: Map = mapper.readValue(json) + val mapOfPseudonyms: Map = mapper.readValue(json) val current = mapOfPseudonyms["current"] val next = mapOfPseudonyms["next"] assertNotNull(current) assertNotNull(next) if (current != null && next != null) { - assertEquals(current.pseudonym, pseudonymEntity.pseudonym) + assertEquals(current.pseudonym, msisdnPseudonymEntity.pseudonym) assertEquals(current.end + 1, next.start) } } @@ -157,7 +157,7 @@ class PseudonymResourceTest { @Test fun testActivePseudonymUsingModel() { - lateinit var pseudonymEntity:PseudonymEntity + lateinit var msisdnPseudonymEntity:MsisdnPseudonymEntity run { val result = resources ?.target("$pathForCurrent/$testMsisdn1") @@ -167,8 +167,8 @@ class PseudonymResourceTest { if (result == null) return assertEquals(Status.OK.statusCode, result.status) val json = result.readEntity(String::class.java) - pseudonymEntity = mapper.readValue(json) - assertEquals(testMsisdn1, pseudonymEntity.msisdn) + msisdnPseudonymEntity = mapper.readValue(json) + assertEquals(testMsisdn1, msisdnPseudonymEntity.msisdn) } run { @@ -180,8 +180,8 @@ class PseudonymResourceTest { if (result == null) return assertEquals(Status.OK.statusCode, result.status) val json = result.readEntity(String::class.java) - val active = mapper.readValue(json) - assertEquals(active.current.pseudonym, pseudonymEntity.pseudonym) + val active = mapper.readValue(json) + assertEquals(active.current.pseudonym, msisdnPseudonymEntity.pseudonym) assertEquals(active.current.end + 1, active.next.start) } } @@ -192,7 +192,7 @@ class PseudonymResourceTest { @Test fun testFindPseudonym() { - lateinit var pseudonymEntity:PseudonymEntity + lateinit var msisdnPseudonymEntity:MsisdnPseudonymEntity run { val result = resources ?.target("$pathForCurrent/$testMsisdn1") @@ -202,20 +202,20 @@ class PseudonymResourceTest { if (result == null) return assertEquals(Status.OK.statusCode, result.status) val json = result.readEntity(String::class.java) - pseudonymEntity = mapper.readValue(json) - assertEquals(testMsisdn1, pseudonymEntity.msisdn) + msisdnPseudonymEntity = mapper.readValue(json) + assertEquals(testMsisdn1, msisdnPseudonymEntity.msisdn) } run { val result = resources - ?.target("$pathForFind/${pseudonymEntity.pseudonym}") + ?.target("$pathForFind/${msisdnPseudonymEntity.pseudonym}") ?.request() ?.get() assertNotNull(result) if (result == null) return assertEquals(Status.OK.statusCode, result.status) val json = result.readEntity(String::class.java) - val pseudonymEntity2 = mapper.readValue(json) + val pseudonymEntity2 = mapper.readValue(json) assertEquals(testMsisdn1, pseudonymEntity2.msisdn) } } @@ -225,7 +225,7 @@ class PseudonymResourceTest { */ @Test fun testDeletePseudonym() { - lateinit var pseudonymEntity:PseudonymEntity + lateinit var msisdnPseudonymEntity:MsisdnPseudonymEntity run { val result = resources ?.target("$pathForCurrent/$testMsisdn2") @@ -235,8 +235,8 @@ class PseudonymResourceTest { if (result == null) return assertEquals(Status.OK.statusCode, result.status) val json = result.readEntity(String::class.java) - pseudonymEntity = mapper.readValue(json) - assertEquals(testMsisdn2, pseudonymEntity.msisdn) + msisdnPseudonymEntity = mapper.readValue(json) + assertEquals(testMsisdn2, msisdnPseudonymEntity.msisdn) } run { @@ -255,7 +255,7 @@ class PseudonymResourceTest { run { val result = resources - ?.target("$pathForFind/${pseudonymEntity.pseudonym}") + ?.target("$pathForFind/${msisdnPseudonymEntity.pseudonym}") ?.request() ?.get() assertNotNull(result) From 7ce4913cf33a9215617b287dac8e0995071f092e Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Thu, 6 Sep 2018 12:21:35 +0200 Subject: [PATCH 20/78] Change the name of 'msisdn' to sourceId. This will make the PseudonymEntity reusable. --- .../main/kotlin/org/ostelco/prime/model/Entities.kt | 2 +- .../service/PseudonymizerServiceSingleton.kt | 2 +- .../org/ostelco/pseudonym/PseudonymResourceTest.kt | 12 ++++++------ 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/model/src/main/kotlin/org/ostelco/prime/model/Entities.kt b/model/src/main/kotlin/org/ostelco/prime/model/Entities.kt index b0245341d..1512ebd04 100644 --- a/model/src/main/kotlin/org/ostelco/prime/model/Entities.kt +++ b/model/src/main/kotlin/org/ostelco/prime/model/Entities.kt @@ -98,7 +98,7 @@ data class PurchaseRecordInfo(override val id: String, } data class MsisdnPseudonymEntity( - val msisdn: String, + val sourceId: String, val pseudonym: String, val start: Long, val end: Long) diff --git a/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymizerServiceSingleton.kt b/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymizerServiceSingleton.kt index 215a21569..7c36055e1 100644 --- a/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymizerServiceSingleton.kt +++ b/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymizerServiceSingleton.kt @@ -229,7 +229,7 @@ object PseudonymizerServiceSingleton : PseudonymizerService { if (currentEntity == null) { // Prepare the new datastore entity val pseudonym = Entity.newBuilder(pseudonymKey) - .set(msisdnPropertyName, entity.msisdn) + .set(msisdnPropertyName, entity.sourceId) .set(pseudonymPropertyName, entity.pseudonym) .set(startPropertyName, entity.start) .set(endPropertyName, entity.end) diff --git a/pseudonym-server/src/test/kotlin/org/ostelco/pseudonym/PseudonymResourceTest.kt b/pseudonym-server/src/test/kotlin/org/ostelco/pseudonym/PseudonymResourceTest.kt index 6d1590945..71b7a750b 100644 --- a/pseudonym-server/src/test/kotlin/org/ostelco/pseudonym/PseudonymResourceTest.kt +++ b/pseudonym-server/src/test/kotlin/org/ostelco/pseudonym/PseudonymResourceTest.kt @@ -92,7 +92,7 @@ class PseudonymResourceTest { assertEquals(Status.OK.statusCode, result.status) val json = result.readEntity(String::class.java) msisdnPseudonymEntity = mapper.readValue(json) - assertEquals(testMsisdn1, msisdnPseudonymEntity.msisdn) + assertEquals(testMsisdn1, msisdnPseudonymEntity.sourceId) } run { @@ -126,7 +126,7 @@ class PseudonymResourceTest { assertEquals(Status.OK.statusCode, result.status) val json = result.readEntity(String::class.java) msisdnPseudonymEntity = mapper.readValue(json) - assertEquals(testMsisdn1, msisdnPseudonymEntity.msisdn) + assertEquals(testMsisdn1, msisdnPseudonymEntity.sourceId) } run { @@ -168,7 +168,7 @@ class PseudonymResourceTest { assertEquals(Status.OK.statusCode, result.status) val json = result.readEntity(String::class.java) msisdnPseudonymEntity = mapper.readValue(json) - assertEquals(testMsisdn1, msisdnPseudonymEntity.msisdn) + assertEquals(testMsisdn1, msisdnPseudonymEntity.sourceId) } run { @@ -203,7 +203,7 @@ class PseudonymResourceTest { assertEquals(Status.OK.statusCode, result.status) val json = result.readEntity(String::class.java) msisdnPseudonymEntity = mapper.readValue(json) - assertEquals(testMsisdn1, msisdnPseudonymEntity.msisdn) + assertEquals(testMsisdn1, msisdnPseudonymEntity.sourceId) } run { @@ -216,7 +216,7 @@ class PseudonymResourceTest { assertEquals(Status.OK.statusCode, result.status) val json = result.readEntity(String::class.java) val pseudonymEntity2 = mapper.readValue(json) - assertEquals(testMsisdn1, pseudonymEntity2.msisdn) + assertEquals(testMsisdn1, pseudonymEntity2.sourceId) } } @@ -236,7 +236,7 @@ class PseudonymResourceTest { assertEquals(Status.OK.statusCode, result.status) val json = result.readEntity(String::class.java) msisdnPseudonymEntity = mapper.readValue(json) - assertEquals(testMsisdn2, msisdnPseudonymEntity.msisdn) + assertEquals(testMsisdn2, msisdnPseudonymEntity.sourceId) } run { From 696010de1703a860514f97c36e5a9f6d63020e38 Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Thu, 6 Sep 2018 12:27:26 +0200 Subject: [PATCH 21/78] Rename MsisdnPseudonymEntity to PseudonymEntity Making the name generic (agian). --- .../prime/client/api/store/SubscriberDAO.kt | 4 +- .../client/api/store/SubscriberDAOImpl.kt | 4 +- .../api/resources/SubscriptionResourceTest.kt | 8 +-- .../org/ostelco/prime/model/Entities.kt | 14 ++--- .../pseudonymizer/PseudonymizerService.kt | 8 +-- .../service/PseudonymizerServiceSingleton.kt | 26 ++++----- .../pseudonym/PseudonymResourceTest.kt | 54 +++++++++---------- 7 files changed, 56 insertions(+), 62 deletions(-) diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAO.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAO.kt index db74ff4a3..45bf42fb5 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAO.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAO.kt @@ -5,7 +5,7 @@ import org.ostelco.prime.client.api.model.Consent import org.ostelco.prime.client.api.model.Person import org.ostelco.prime.client.api.model.SubscriptionStatus import org.ostelco.prime.core.ApiError -import org.ostelco.prime.model.ActiveMsisdnPseudonyms +import org.ostelco.prime.model.ActivePseudonyms import org.ostelco.prime.model.ApplicationToken import org.ostelco.prime.model.Product import org.ostelco.prime.model.PurchaseRecord @@ -91,5 +91,5 @@ interface SubscriberDAO { @Deprecated(message = "use purchaseProduct") fun purchaseProductWithoutPayment(subscriberId: String, sku: String): Either - fun getActivePseudonymOfMsisdnForSubscriber(subscriberId: String): Either + fun getActivePseudonymOfMsisdnForSubscriber(subscriberId: String): Either } diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt index 30818f29a..2e5616ed3 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt @@ -17,7 +17,7 @@ import org.ostelco.prime.core.ForbiddenError import org.ostelco.prime.core.InsuffientStorageError import org.ostelco.prime.core.NotFoundError import org.ostelco.prime.logger -import org.ostelco.prime.model.ActiveMsisdnPseudonyms +import org.ostelco.prime.model.ActivePseudonyms import org.ostelco.prime.model.ApplicationToken import org.ostelco.prime.model.Product import org.ostelco.prime.model.PurchaseRecord @@ -144,7 +144,7 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu } } - override fun getActivePseudonymOfMsisdnForSubscriber(subscriberId: String): Either { + override fun getActivePseudonymOfMsisdnForSubscriber(subscriberId: String): Either { return storage.getMsisdn(subscriberId) .mapLeft { NotFoundError("Failed to msisdn for user. ${it.message}") } .map { msisdn -> pseudonymizer.getActivePseudonymsForMsisdn(msisdn) } diff --git a/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/SubscriptionResourceTest.kt b/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/SubscriptionResourceTest.kt index b3633e88d..ad6926288 100644 --- a/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/SubscriptionResourceTest.kt +++ b/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/SubscriptionResourceTest.kt @@ -23,10 +23,10 @@ import org.ostelco.prime.client.api.auth.OAuthAuthenticator import org.ostelco.prime.client.api.model.SubscriptionStatus import org.ostelco.prime.client.api.store.SubscriberDAO import org.ostelco.prime.client.api.util.AccessToken -import org.ostelco.prime.model.ActiveMsisdnPseudonyms +import org.ostelco.prime.model.ActivePseudonyms import org.ostelco.prime.model.Price import org.ostelco.prime.model.Product -import org.ostelco.prime.model.MsisdnPseudonymEntity +import org.ostelco.prime.model.PseudonymEntity import org.ostelco.prime.model.PurchaseRecord import java.time.Instant import java.util.* @@ -79,8 +79,8 @@ class SubscriptionResourceTest { val arg = argumentCaptor() val msisdn = "4790300001" - val pseudonym = MsisdnPseudonymEntity(msisdn, "random", 0, 1) - val activePseudonyms = ActiveMsisdnPseudonyms(pseudonym, pseudonym) + val pseudonym = PseudonymEntity(msisdn, "random", 0, 1) + val activePseudonyms = ActivePseudonyms(pseudonym, pseudonym) `when`(DAO.getActivePseudonymOfMsisdnForSubscriber(arg.capture())) .thenReturn(Either.right(activePseudonyms)) diff --git a/model/src/main/kotlin/org/ostelco/prime/model/Entities.kt b/model/src/main/kotlin/org/ostelco/prime/model/Entities.kt index 1512ebd04..d02412ace 100644 --- a/model/src/main/kotlin/org/ostelco/prime/model/Entities.kt +++ b/model/src/main/kotlin/org/ostelco/prime/model/Entities.kt @@ -97,18 +97,12 @@ data class PurchaseRecordInfo(override val id: String, status) } -data class MsisdnPseudonymEntity( +data class PseudonymEntity( val sourceId: String, val pseudonym: String, val start: Long, val end: Long) -data class SubscriberIdPseudonymEntity( - val subscriberId: String, - val pseudonym: String, - val start: Long, - val end: Long) - -data class ActiveMsisdnPseudonyms( - val current: MsisdnPseudonymEntity, - val next: MsisdnPseudonymEntity) \ No newline at end of file +data class ActivePseudonyms( + val current: PseudonymEntity, + val next: PseudonymEntity) diff --git a/prime-api/src/main/kotlin/org/ostelco/prime/pseudonymizer/PseudonymizerService.kt b/prime-api/src/main/kotlin/org/ostelco/prime/pseudonymizer/PseudonymizerService.kt index dc27669aa..8e446944d 100644 --- a/prime-api/src/main/kotlin/org/ostelco/prime/pseudonymizer/PseudonymizerService.kt +++ b/prime-api/src/main/kotlin/org/ostelco/prime/pseudonymizer/PseudonymizerService.kt @@ -1,11 +1,11 @@ package org.ostelco.prime.pseudonymizer -import org.ostelco.prime.model.ActiveMsisdnPseudonyms -import org.ostelco.prime.model.MsisdnPseudonymEntity +import org.ostelco.prime.model.ActivePseudonyms +import org.ostelco.prime.model.PseudonymEntity interface PseudonymizerService { - fun getActivePseudonymsForMsisdn(msisdn: String): ActiveMsisdnPseudonyms + fun getActivePseudonymsForMsisdn(msisdn: String): ActivePseudonyms - fun getMsisdnPseudonymEntityFor(msisdn: String, timestamp: Long): MsisdnPseudonymEntity + fun getMsisdnPseudonymEntityFor(msisdn: String, timestamp: Long): PseudonymEntity } \ No newline at end of file diff --git a/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymizerServiceSingleton.kt b/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymizerServiceSingleton.kt index 7c36055e1..ee6763796 100644 --- a/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymizerServiceSingleton.kt +++ b/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymizerServiceSingleton.kt @@ -15,8 +15,8 @@ import com.google.common.cache.Cache import com.google.common.cache.CacheBuilder import io.dropwizard.setup.Environment import org.ostelco.prime.logger -import org.ostelco.prime.model.ActiveMsisdnPseudonyms -import org.ostelco.prime.model.MsisdnPseudonymEntity +import org.ostelco.prime.model.ActivePseudonyms +import org.ostelco.prime.model.PseudonymEntity import org.ostelco.prime.pseudonymizer.PseudonymizerService import org.ostelco.pseudonym.ConfigRegistry import org.ostelco.pseudonym.ExportTaskKind @@ -69,7 +69,7 @@ object PseudonymizerServiceSingleton : PseudonymizerService { private val executor = Executors.newFixedThreadPool(3) - val msisdnPseudonymCache: Cache = CacheBuilder.newBuilder() + val PSEUDONYM_CACHE: Cache = CacheBuilder.newBuilder() .maximumSize(5000) .build() @@ -85,24 +85,24 @@ object PseudonymizerServiceSingleton : PseudonymizerService { } } - override fun getActivePseudonymsForMsisdn(msisdn: String): ActiveMsisdnPseudonyms { + override fun getActivePseudonymsForMsisdn(msisdn: String): ActivePseudonyms { val currentTimestamp = Instant.now().toEpochMilli() val nextTimestamp = dateBounds.getNextPeriodStart(currentTimestamp) logger.info("GET pseudonym for Msisdn = $msisdn at timestamps = $currentTimestamp & $nextTimestamp") val current = getMsisdnPseudonymEntityFor(msisdn, currentTimestamp) val next = getMsisdnPseudonymEntityFor(msisdn, nextTimestamp) - return ActiveMsisdnPseudonyms(current, next) + return ActivePseudonyms(current, next) } - override fun getMsisdnPseudonymEntityFor(msisdn: String, timestamp: Long): MsisdnPseudonymEntity { + override fun getMsisdnPseudonymEntityFor(msisdn: String, timestamp: Long): PseudonymEntity { val (bounds, keyPrefix) = dateBounds.getBoundsNKeyPrefix(msisdn, timestamp) // Retrieves the element from cache. - return msisdnPseudonymCache.get(keyPrefix) { + return PSEUDONYM_CACHE.get(keyPrefix) { getMsisdnPseudonymEntity(keyPrefix) ?: createMsisdnPseudonym(msisdn, bounds, keyPrefix) } } - fun findMsisdnPseudonym(pseudonym: String): MsisdnPseudonymEntity? { + fun findMsisdnPseudonym(pseudonym: String): PseudonymEntity? { val query = Query.newEntityQueryBuilder() .setKind(MsisdnPseudonymEntityKind) .setFilter(PropertyFilter.eq(pseudonymPropertyName, pseudonym)) @@ -207,7 +207,7 @@ object PseudonymizerServiceSingleton : PseudonymizerService { return datastore.newKeyFactory().setKind(MsisdnPseudonymEntityKind).newKey(keyPrefix) } - private fun getMsisdnPseudonymEntity(keyPrefix: String): MsisdnPseudonymEntity? { + private fun getMsisdnPseudonymEntity(keyPrefix: String): PseudonymEntity? { val pseudonymKey = getMsisdnPseudonymKey(keyPrefix) val value = datastore.get(pseudonymKey) if (value != null) { @@ -217,9 +217,9 @@ object PseudonymizerServiceSingleton : PseudonymizerService { return null } - private fun createMsisdnPseudonym(msisdn: String, bounds: Bounds, keyPrefix: String): MsisdnPseudonymEntity { + private fun createMsisdnPseudonym(msisdn: String, bounds: Bounds, keyPrefix: String): PseudonymEntity { val uuid = UUID.randomUUID().toString() - var entity = MsisdnPseudonymEntity(msisdn, uuid, bounds.start, bounds.end) + var entity = PseudonymEntity(msisdn, uuid, bounds.start, bounds.end) val pseudonymKey = getMsisdnPseudonymKey(keyPrefix) val transaction = datastore.newTransaction() @@ -248,8 +248,8 @@ object PseudonymizerServiceSingleton : PseudonymizerService { return entity } - private fun convertToMsisdnPseudonymEntity(entity: Entity): MsisdnPseudonymEntity { - return MsisdnPseudonymEntity( + private fun convertToMsisdnPseudonymEntity(entity: Entity): PseudonymEntity { + return PseudonymEntity( entity.getString(msisdnPropertyName), entity.getString(pseudonymPropertyName), entity.getLong(startPropertyName), diff --git a/pseudonym-server/src/test/kotlin/org/ostelco/pseudonym/PseudonymResourceTest.kt b/pseudonym-server/src/test/kotlin/org/ostelco/pseudonym/PseudonymResourceTest.kt index 71b7a750b..288afdd53 100644 --- a/pseudonym-server/src/test/kotlin/org/ostelco/pseudonym/PseudonymResourceTest.kt +++ b/pseudonym-server/src/test/kotlin/org/ostelco/pseudonym/PseudonymResourceTest.kt @@ -7,8 +7,8 @@ import io.dropwizard.testing.junit.ResourceTestRule import org.junit.ClassRule import org.junit.Test import org.mockito.Mockito.mock -import org.ostelco.prime.model.ActiveMsisdnPseudonyms -import org.ostelco.prime.model.MsisdnPseudonymEntity +import org.ostelco.prime.model.ActivePseudonyms +import org.ostelco.prime.model.PseudonymEntity import org.ostelco.pseudonym.resources.PseudonymResource import org.ostelco.pseudonym.service.PseudonymizerServiceSingleton import javax.ws.rs.core.Response.Status @@ -81,7 +81,7 @@ class PseudonymResourceTest { @Test fun testGetPseudonym() { - lateinit var msisdnPseudonymEntity:MsisdnPseudonymEntity + lateinit var pseudonymEntity:PseudonymEntity run { val result = resources ?.target("$pathForCurrent/$testMsisdn1") @@ -91,21 +91,21 @@ class PseudonymResourceTest { if (result == null) return assertEquals(Status.OK.statusCode, result.status) val json = result.readEntity(String::class.java) - msisdnPseudonymEntity = mapper.readValue(json) - assertEquals(testMsisdn1, msisdnPseudonymEntity.sourceId) + pseudonymEntity = mapper.readValue(json) + assertEquals(testMsisdn1, pseudonymEntity.sourceId) } run { val result = resources - ?.target("$pathForGet/$testMsisdn1/${msisdnPseudonymEntity.start}") + ?.target("$pathForGet/$testMsisdn1/${pseudonymEntity.start}") ?.request() ?.get() assertNotNull(result) if (result == null) return assertEquals(Status.OK.statusCode, result.status) val json = result.readEntity(String::class.java) - val pseudonymEntity2 = mapper.readValue(json) - assertEquals(msisdnPseudonymEntity.pseudonym, pseudonymEntity2.pseudonym) + val pseudonymEntity2 = mapper.readValue(json) + assertEquals(pseudonymEntity.pseudonym, pseudonymEntity2.pseudonym) } } @@ -115,7 +115,7 @@ class PseudonymResourceTest { @Test fun testActivePseudonyms() { - lateinit var msisdnPseudonymEntity:MsisdnPseudonymEntity + lateinit var pseudonymEntity:PseudonymEntity run { val result = resources ?.target("$pathForCurrent/$testMsisdn1") @@ -125,8 +125,8 @@ class PseudonymResourceTest { if (result == null) return assertEquals(Status.OK.statusCode, result.status) val json = result.readEntity(String::class.java) - msisdnPseudonymEntity = mapper.readValue(json) - assertEquals(testMsisdn1, msisdnPseudonymEntity.sourceId) + pseudonymEntity = mapper.readValue(json) + assertEquals(testMsisdn1, pseudonymEntity.sourceId) } run { @@ -139,13 +139,13 @@ class PseudonymResourceTest { assertEquals(Status.OK.statusCode, result.status) val json = result.readEntity(String::class.java) // This is how the client will recieve the output. - val mapOfPseudonyms: Map = mapper.readValue(json) + val mapOfPseudonyms: Map = mapper.readValue(json) val current = mapOfPseudonyms["current"] val next = mapOfPseudonyms["next"] assertNotNull(current) assertNotNull(next) if (current != null && next != null) { - assertEquals(current.pseudonym, msisdnPseudonymEntity.pseudonym) + assertEquals(current.pseudonym, pseudonymEntity.pseudonym) assertEquals(current.end + 1, next.start) } } @@ -157,7 +157,7 @@ class PseudonymResourceTest { @Test fun testActivePseudonymUsingModel() { - lateinit var msisdnPseudonymEntity:MsisdnPseudonymEntity + lateinit var pseudonymEntity:PseudonymEntity run { val result = resources ?.target("$pathForCurrent/$testMsisdn1") @@ -167,8 +167,8 @@ class PseudonymResourceTest { if (result == null) return assertEquals(Status.OK.statusCode, result.status) val json = result.readEntity(String::class.java) - msisdnPseudonymEntity = mapper.readValue(json) - assertEquals(testMsisdn1, msisdnPseudonymEntity.sourceId) + pseudonymEntity = mapper.readValue(json) + assertEquals(testMsisdn1, pseudonymEntity.sourceId) } run { @@ -180,8 +180,8 @@ class PseudonymResourceTest { if (result == null) return assertEquals(Status.OK.statusCode, result.status) val json = result.readEntity(String::class.java) - val active = mapper.readValue(json) - assertEquals(active.current.pseudonym, msisdnPseudonymEntity.pseudonym) + val active = mapper.readValue(json) + assertEquals(active.current.pseudonym, pseudonymEntity.pseudonym) assertEquals(active.current.end + 1, active.next.start) } } @@ -192,7 +192,7 @@ class PseudonymResourceTest { @Test fun testFindPseudonym() { - lateinit var msisdnPseudonymEntity:MsisdnPseudonymEntity + lateinit var pseudonymEntity:PseudonymEntity run { val result = resources ?.target("$pathForCurrent/$testMsisdn1") @@ -202,20 +202,20 @@ class PseudonymResourceTest { if (result == null) return assertEquals(Status.OK.statusCode, result.status) val json = result.readEntity(String::class.java) - msisdnPseudonymEntity = mapper.readValue(json) - assertEquals(testMsisdn1, msisdnPseudonymEntity.sourceId) + pseudonymEntity = mapper.readValue(json) + assertEquals(testMsisdn1, pseudonymEntity.sourceId) } run { val result = resources - ?.target("$pathForFind/${msisdnPseudonymEntity.pseudonym}") + ?.target("$pathForFind/${pseudonymEntity.pseudonym}") ?.request() ?.get() assertNotNull(result) if (result == null) return assertEquals(Status.OK.statusCode, result.status) val json = result.readEntity(String::class.java) - val pseudonymEntity2 = mapper.readValue(json) + val pseudonymEntity2 = mapper.readValue(json) assertEquals(testMsisdn1, pseudonymEntity2.sourceId) } } @@ -225,7 +225,7 @@ class PseudonymResourceTest { */ @Test fun testDeletePseudonym() { - lateinit var msisdnPseudonymEntity:MsisdnPseudonymEntity + lateinit var pseudonymEntity:PseudonymEntity run { val result = resources ?.target("$pathForCurrent/$testMsisdn2") @@ -235,8 +235,8 @@ class PseudonymResourceTest { if (result == null) return assertEquals(Status.OK.statusCode, result.status) val json = result.readEntity(String::class.java) - msisdnPseudonymEntity = mapper.readValue(json) - assertEquals(testMsisdn2, msisdnPseudonymEntity.sourceId) + pseudonymEntity = mapper.readValue(json) + assertEquals(testMsisdn2, pseudonymEntity.sourceId) } run { @@ -255,7 +255,7 @@ class PseudonymResourceTest { run { val result = resources - ?.target("$pathForFind/${msisdnPseudonymEntity.pseudonym}") + ?.target("$pathForFind/${pseudonymEntity.pseudonym}") ?.request() ?.get() assertNotNull(result) From e256a768cb9b5ea46abe782f377851054d486f35 Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Thu, 6 Sep 2018 12:33:53 +0200 Subject: [PATCH 22/78] Rename cache --- .../pseudonym/service/PseudonymizerServiceSingleton.kt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymizerServiceSingleton.kt b/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymizerServiceSingleton.kt index ee6763796..a41c74464 100644 --- a/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymizerServiceSingleton.kt +++ b/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymizerServiceSingleton.kt @@ -69,7 +69,7 @@ object PseudonymizerServiceSingleton : PseudonymizerService { private val executor = Executors.newFixedThreadPool(3) - val PSEUDONYM_CACHE: Cache = CacheBuilder.newBuilder() + val pseudonymCache: Cache = CacheBuilder.newBuilder() .maximumSize(5000) .build() @@ -97,7 +97,7 @@ object PseudonymizerServiceSingleton : PseudonymizerService { override fun getMsisdnPseudonymEntityFor(msisdn: String, timestamp: Long): PseudonymEntity { val (bounds, keyPrefix) = dateBounds.getBoundsNKeyPrefix(msisdn, timestamp) // Retrieves the element from cache. - return PSEUDONYM_CACHE.get(keyPrefix) { + return pseudonymCache.get(keyPrefix) { getMsisdnPseudonymEntity(keyPrefix) ?: createMsisdnPseudonym(msisdn, bounds, keyPrefix) } } @@ -255,4 +255,4 @@ object PseudonymizerServiceSingleton : PseudonymizerService { entity.getLong(startPropertyName), entity.getLong(endPropertyName)) } -} \ No newline at end of file +} From 7ddfc9a84caa1601da30c7f0ec06412f38f86ce9 Mon Sep 17 00:00:00 2001 From: Martin Cederlof Date: Thu, 6 Sep 2018 12:39:59 +0200 Subject: [PATCH 23/78] Fix use of curly brace in logging --- .../client/api/store/SubscriberDAOImpl.kt | 20 +++++++------- .../org/ostelco/prime/ocs/EventHandlerImpl.kt | 2 +- .../StripePaymentProcessor.kt | 26 +++++++++---------- 3 files changed, 24 insertions(+), 24 deletions(-) diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt index 1aae9a635..3f72601a8 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt @@ -55,7 +55,7 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu BadRequestError("Incomplete profile description. ${it.message}") } } catch (e: Exception) { - logger.error("Failed to fetch profile for subscriberId ${subscriberId}", e) + logger.error("Failed to fetch profile for subscriberId $subscriberId", e) Either.left(NotFoundError("Failed to fetch profile")) } } @@ -73,7 +73,7 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu getProfile(subscriberId) } } catch (e: Exception) { - logger.error("Failed to create profile for subscriberId ${subscriberId}", e) + logger.error("Failed to create profile for subscriberId $subscriberId", e) Either.left(ForbiddenError("Failed to create profile")) } } @@ -87,7 +87,7 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu try { storage.addNotificationToken(msisdn, applicationToken) } catch (e: Exception) { - logger.error("Failed to store ApplicationToken for msisdn ${msisdn}", e) + logger.error("Failed to store ApplicationToken for msisdn $msisdn", e) return Either.left(InsuffientStorageError("Failed to store ApplicationToken")) } return getNotificationToken(msisdn, applicationToken.applicationID) @@ -99,7 +99,7 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu ?.let { Either.right(it) } ?: return Either.left(NotFoundError("Failed to get ApplicationToken")) } catch (e: Exception) { - logger.error("Failed to get ApplicationToken for msisdn ${msisdn}", e) + logger.error("Failed to get ApplicationToken for msisdn $msisdn", e) return Either.left(NotFoundError("Failed to get ApplicationToken")) } } @@ -111,7 +111,7 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu try { storage.updateSubscriber(profile) } catch (e: Exception) { - logger.error("Failed to update profile for subscriberId ${subscriberId}", e) + logger.error("Failed to update profile for subscriberId $subscriberId", e) return Either.left(NotFoundError("Failed to update profile")) } @@ -128,7 +128,7 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu } .mapLeft { NotFoundError(it.message) } } catch (e: Exception) { - logger.error("Failed to get balance for subscriber ${subscriberId}", e) + logger.error("Failed to get balance for subscriber $subscriberId", e) return Either.left(NotFoundError("Failed to get balance")) } } @@ -156,7 +156,7 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu { NotFoundError("Failed to get purchase history. ${it.message}") }, { it.toList() }) } catch (e: Exception) { - logger.error("Failed to get purchase history for subscriberId ${subscriberId}", e) + logger.error("Failed to get purchase history for subscriberId $subscriberId", e) Either.left(NotFoundError("Failed to get purchase history")) } } @@ -178,7 +178,7 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu { NotFoundError(it.message) }, { products -> products.values }) } catch (e: Exception) { - logger.error("Failed to get Products for subscriberId ${subscriberId}", e) + logger.error("Failed to get Products for subscriberId $subscriberId", e) Either.left(NotFoundError("Failed to get Products")) } @@ -313,7 +313,7 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu { NotFoundError("Failed to get referral list. ${it.message}") }, { list -> list.map { Person(it) } }) } catch (e: Exception) { - logger.error("Failed to get referral list for subscriberId ${subscriberId}", e) + logger.error("Failed to get referral list for subscriberId $subscriberId", e) Either.left(NotFoundError("Failed to get referral list")) } } @@ -324,7 +324,7 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu { NotFoundError("Failed to get referred-by. ${it.message}") }, { Person(name = it) }) } catch (e: Exception) { - logger.error("Failed to get referred-by for subscriberId ${subscriberId}", e) + logger.error("Failed to get referred-by for subscriberId $subscriberId", e) Either.left(NotFoundError("Failed to get referred-by")) } } diff --git a/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt b/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt index 968ac8a4a..e17d30c0b 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt @@ -51,7 +51,7 @@ internal class EventHandlerImpl(private val ocsService: OcsService) : EventHandl private fun logEventProcessing(msg: String, event: OcsEvent) { val logString = """ - ${msg} + $msg Msisdn: ${event.msisdn} Requested bytes: ${event.requestedBucketBytes} Used bytes: ${event.usedBucketBytes} diff --git a/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessor.kt b/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessor.kt index f8afb287b..79fa28746 100644 --- a/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessor.kt +++ b/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessor.kt @@ -19,7 +19,7 @@ class StripePaymentProcessor : PaymentProcessor { private val LOG by logger() override fun getSavedSources(customerId: String): Either> = - either (NotFoundError("Failed to get sources for customer ${customerId}")) { + either (NotFoundError("Failed to get sources for customer $customerId")) { val sources = mutableListOf() val customer = Customer.retrieve(customerId) customer.sources.data.forEach { @@ -29,14 +29,14 @@ class StripePaymentProcessor : PaymentProcessor { } override fun createPaymentProfile(userEmail: String): Either = - either(ForbiddenError("Failed to create profile for user ${userEmail}")) { + either(ForbiddenError("Failed to create profile for user $userEmail")) { val customerParams = HashMap() customerParams.put("email", userEmail) ProfileInfo(Customer.create(customerParams).id) } override fun createPlan(productId: String, amount: Int, currency: String, interval: PaymentProcessor.Interval): Either = - either(ForbiddenError("Failed to create plan with product id ${productId} amount ${amount} currency ${currency} interval ${interval.value}")) { + either(ForbiddenError("Failed to create plan with product id $productId amount $amount currency $currency interval ${interval.value}")) { val planParams = HashMap() planParams["amount"] = amount planParams["interval"] = interval.value @@ -46,13 +46,13 @@ class StripePaymentProcessor : PaymentProcessor { } override fun removePlan(planId: String): Either = - either(NotFoundError("Failed to delete plan ${planId}")) { + either(NotFoundError("Failed to delete plan $planId")) { val plan = Plan.retrieve(planId) PlanInfo(plan.delete().id) } override fun createProduct(sku: String): Either = - either(ForbiddenError("Failed to create product with sku ${sku}")) { + either(ForbiddenError("Failed to create product with sku $sku")) { val productParams = HashMap() productParams["name"] = sku productParams["type"] = "service" @@ -60,13 +60,13 @@ class StripePaymentProcessor : PaymentProcessor { } override fun removeProduct(productId: String): Either = - either(NotFoundError("Failed to delete product ${productId}")) { + either(NotFoundError("Failed to delete product $productId")) { val product = Product.retrieve(productId) ProductInfo(product.delete().id) } override fun addSource(customerId: String, sourceId: String): Either = - either(ForbiddenError("Failed to add source ${sourceId} to customer ${customerId}")) { + either(ForbiddenError("Failed to add source $sourceId to customer $customerId")) { val customer = Customer.retrieve(customerId) val params = HashMap() params["source"] = sourceId @@ -74,7 +74,7 @@ class StripePaymentProcessor : PaymentProcessor { } override fun setDefaultSource(customerId: String, sourceId: String): Either = - either(ForbiddenError("Failed to set default source ${sourceId} for customer ${customerId}")) { + either(ForbiddenError("Failed to set default source $sourceId for customer $customerId")) { val customer = Customer.retrieve(customerId) val updateParams = HashMap() updateParams.put("default_source", sourceId) @@ -83,18 +83,18 @@ class StripePaymentProcessor : PaymentProcessor { } override fun getDefaultSource(customerId: String): Either = - either(NotFoundError( "Failed to get default source for customer ${customerId}")) { + either(NotFoundError( "Failed to get default source for customer $customerId")) { SourceInfo(Customer.retrieve(customerId).defaultSource) } override fun deletePaymentProfile(customerId: String): Either = - either(NotFoundError("Failed to delete customer ${customerId}")) { + either(NotFoundError("Failed to delete customer $customerId")) { val customer = Customer.retrieve(customerId) ProfileInfo(customer.delete().id) } override fun subscribeToPlan(planId: String, customerId: String): Either = - either(ForbiddenError("Failed to subscribe customer ${customerId} to plan ${planId}")) { + either(ForbiddenError("Failed to subscribe customer $customerId to plan $planId")) { val item = HashMap() item["plan"] = planId @@ -109,7 +109,7 @@ class StripePaymentProcessor : PaymentProcessor { } override fun cancelSubscription(subscriptionId: String, atIntervalEnd: Boolean): Either = - either(ForbiddenError("Failed to unsubscribe subscription Id : ${subscriptionId} atIntervalEnd ${atIntervalEnd}")) { + either(ForbiddenError("Failed to unsubscribe subscription Id : $subscriptionId atIntervalEnd $atIntervalEnd")) { val subscription = Subscription.retrieve(subscriptionId) val subscriptionParams = HashMap() subscriptionParams["at_period_end"] = atIntervalEnd @@ -162,7 +162,7 @@ class StripePaymentProcessor : PaymentProcessor { } override fun removeSource(customerId: String, sourceId: String): Either = - either(ForbiddenError("Failed to remove source ${sourceId} from customer ${customerId}")) { + either(ForbiddenError("Failed to remove source $sourceId from customer $customerId")) { Customer.retrieve(customerId).sources.retrieve(sourceId).delete().id } From 0195cf8da5cd15b38e4e57fde6b886de15c32e22 Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Thu, 6 Sep 2018 13:21:38 +0200 Subject: [PATCH 24/78] Split Datastore access functions to a seperate class --- prime/infra/dev/prime-client-api.yaml | 4 +- prime/infra/prod/prime-client-api.yaml | 4 +- .../service/PseudonymizerServiceSingleton.kt | 100 +++++++++++------- 3 files changed, 66 insertions(+), 42 deletions(-) diff --git a/prime/infra/dev/prime-client-api.yaml b/prime/infra/dev/prime-client-api.yaml index b6e682c36..419278233 100644 --- a/prime/infra/dev/prime-client-api.yaml +++ b/prime/infra/dev/prime-client-api.yaml @@ -482,7 +482,7 @@ definitions: PseudonymEntity: type: object properties: - msisdn: + sourceId: type: string pseudonym: type: string @@ -495,7 +495,7 @@ definitions: type: integer format: int64 required: - - msisdn + - sourceId - pseudonym - start - end diff --git a/prime/infra/prod/prime-client-api.yaml b/prime/infra/prod/prime-client-api.yaml index 2d0234ba1..8993f7d64 100644 --- a/prime/infra/prod/prime-client-api.yaml +++ b/prime/infra/prod/prime-client-api.yaml @@ -482,7 +482,7 @@ definitions: PseudonymEntity: type: object properties: - msisdn: + sourceId: type: string pseudonym: type: string @@ -495,7 +495,7 @@ definitions: type: integer format: int64 required: - - msisdn + - sourceId - pseudonym - start - end diff --git a/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymizerServiceSingleton.kt b/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymizerServiceSingleton.kt index a41c74464..4dc97f11a 100644 --- a/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymizerServiceSingleton.kt +++ b/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymizerServiceSingleton.kt @@ -67,6 +67,7 @@ object PseudonymizerServiceSingleton : PseudonymizerService { private var bigQuery: BigQuery? = null private val dateBounds: DateBounds = WeeklyBounds() + private val msisdnPseudonymiser: Pseudonymizer = Pseudonymizer(MsisdnPseudonymEntityKind, msisdnPropertyName) private val executor = Executors.newFixedThreadPool(3) val pseudonymCache: Cache = CacheBuilder.newBuilder() @@ -83,6 +84,7 @@ object PseudonymizerServiceSingleton : PseudonymizerService { logger.info("Local testing, BigQuery is not available...") null } + msisdnPseudonymiser.init(datastore, bigQuery, dateBounds) } override fun getActivePseudonymsForMsisdn(msisdn: String): ActivePseudonyms { @@ -98,39 +100,16 @@ object PseudonymizerServiceSingleton : PseudonymizerService { val (bounds, keyPrefix) = dateBounds.getBoundsNKeyPrefix(msisdn, timestamp) // Retrieves the element from cache. return pseudonymCache.get(keyPrefix) { - getMsisdnPseudonymEntity(keyPrefix) ?: createMsisdnPseudonym(msisdn, bounds, keyPrefix) + msisdnPseudonymiser.getPseudonymEntity(keyPrefix) ?: msisdnPseudonymiser.createPseudonym(msisdn, bounds, keyPrefix) } } fun findMsisdnPseudonym(pseudonym: String): PseudonymEntity? { - val query = Query.newEntityQueryBuilder() - .setKind(MsisdnPseudonymEntityKind) - .setFilter(PropertyFilter.eq(pseudonymPropertyName, pseudonym)) - .setLimit(1) - .build() - val results = datastore.run(query) - if (results.hasNext()) { - val entity = results.next() - return convertToMsisdnPseudonymEntity(entity) - } - logger.info("Couldn't find, pseudonym = $pseudonym") - return null + return msisdnPseudonymiser.findPseudonym(pseudonym) } fun deleteAllMsisdnPseudonyms(msisdn: String): Int { - val query = Query.newEntityQueryBuilder() - .setKind(MsisdnPseudonymEntityKind) - .setFilter(PropertyFilter.eq(msisdnPropertyName, msisdn)) - .setLimit(1) - .build() - val results = datastore.run(query) - var count = 0 - while (results.hasNext()) { - val entity = results.next() - datastore.delete(entity.key) - count++ - } - return count + return msisdnPseudonymiser.deleteAllPseudonyms(msisdn) } fun exportMsisdnPseudonyms(exportId: String) { @@ -202,25 +181,70 @@ object PseudonymizerServiceSingleton : PseudonymizerService { } return null } +} + + +class Pseudonymizer(val entityKind: String, val sourcePropertyName: String) { + private val logger by logger() + private lateinit var datastore: Datastore + private var bigQuery: BigQuery? = null + private lateinit var dateBounds: DateBounds + + fun init(ds: Datastore, bq: BigQuery? = null, bounds: DateBounds) { + datastore = ds + bigQuery = bq + dateBounds = bounds + } + + fun findPseudonym(pseudonym: String): PseudonymEntity? { + val query = Query.newEntityQueryBuilder() + .setKind(entityKind) + .setFilter(PropertyFilter.eq(pseudonymPropertyName, pseudonym)) + .setLimit(1) + .build() + val results = datastore.run(query) + if (results.hasNext()) { + val entity = results.next() + return convertToPseudonymEntity(entity) + } + logger.info("Couldn't find, pseudonym = $pseudonym") + return null + } + + fun deleteAllPseudonyms(sourceId: String): Int { + val query = Query.newEntityQueryBuilder() + .setKind(entityKind) + .setFilter(PropertyFilter.eq(sourcePropertyName, sourceId)) + .setLimit(1) + .build() + val results = datastore.run(query) + var count = 0 + while (results.hasNext()) { + val entity = results.next() + datastore.delete(entity.key) + count++ + } + return count + } - private fun getMsisdnPseudonymKey(keyPrefix: String): Key { - return datastore.newKeyFactory().setKind(MsisdnPseudonymEntityKind).newKey(keyPrefix) + private fun getPseudonymKey(keyPrefix: String): Key { + return datastore.newKeyFactory().setKind(entityKind).newKey(keyPrefix) } - private fun getMsisdnPseudonymEntity(keyPrefix: String): PseudonymEntity? { - val pseudonymKey = getMsisdnPseudonymKey(keyPrefix) + fun getPseudonymEntity(keyPrefix: String): PseudonymEntity? { + val pseudonymKey = getPseudonymKey(keyPrefix) val value = datastore.get(pseudonymKey) if (value != null) { // Create the object from datastore entity - return convertToMsisdnPseudonymEntity(value) + return convertToPseudonymEntity(value) } return null } - private fun createMsisdnPseudonym(msisdn: String, bounds: Bounds, keyPrefix: String): PseudonymEntity { + fun createPseudonym(sourceId: String, bounds: Bounds, keyPrefix: String): PseudonymEntity { val uuid = UUID.randomUUID().toString() - var entity = PseudonymEntity(msisdn, uuid, bounds.start, bounds.end) - val pseudonymKey = getMsisdnPseudonymKey(keyPrefix) + var entity = PseudonymEntity(sourceId, uuid, bounds.start, bounds.end) + val pseudonymKey = getPseudonymKey(keyPrefix) val transaction = datastore.newTransaction() try { @@ -229,7 +253,7 @@ object PseudonymizerServiceSingleton : PseudonymizerService { if (currentEntity == null) { // Prepare the new datastore entity val pseudonym = Entity.newBuilder(pseudonymKey) - .set(msisdnPropertyName, entity.sourceId) + .set(sourcePropertyName, entity.sourceId) .set(pseudonymPropertyName, entity.pseudonym) .set(startPropertyName, entity.start) .set(endPropertyName, entity.end) @@ -238,7 +262,7 @@ object PseudonymizerServiceSingleton : PseudonymizerService { transaction.commit() } else { // Use the existing one - entity = convertToMsisdnPseudonymEntity(currentEntity) + entity = convertToPseudonymEntity(currentEntity) } } finally { if (transaction.isActive) { @@ -248,9 +272,9 @@ object PseudonymizerServiceSingleton : PseudonymizerService { return entity } - private fun convertToMsisdnPseudonymEntity(entity: Entity): PseudonymEntity { + private fun convertToPseudonymEntity(entity: Entity): PseudonymEntity { return PseudonymEntity( - entity.getString(msisdnPropertyName), + entity.getString(sourcePropertyName), entity.getString(pseudonymPropertyName), entity.getLong(startPropertyName), entity.getLong(endPropertyName)) From 7ec4797101763c8af3362651ec73426932cdbc2a Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Thu, 6 Sep 2018 13:39:13 +0200 Subject: [PATCH 25/78] Add Subscriber Id pseudonyms --- .../DataConsumptionInfoPublisher.kt | 2 +- .../publishers/PurchaseInfoPublisher.kt | 2 +- .../pseudonymizer/PseudonymizerService.kt | 5 ++- .../pseudonym/resources/PseudonymResource.kt | 4 +- .../service/PseudonymizerServiceSingleton.kt | 38 +++++++++++-------- 5 files changed, 30 insertions(+), 21 deletions(-) diff --git a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DataConsumptionInfoPublisher.kt b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DataConsumptionInfoPublisher.kt index 2709813a7..df0258aa8 100644 --- a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DataConsumptionInfoPublisher.kt +++ b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DataConsumptionInfoPublisher.kt @@ -49,7 +49,7 @@ object DataConsumptionInfoPublisher : Managed { } val now = Instant.now().toEpochMilli() - val pseudonym = pseudonymizerService.getMsisdnPseudonymEntityFor(msisdn, now).pseudonym + val pseudonym = pseudonymizerService.getMsisdnPseudonym(msisdn, now).pseudonym val data = DataTrafficInfo.newBuilder() .setMsisdn(pseudonym) diff --git a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PurchaseInfoPublisher.kt b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PurchaseInfoPublisher.kt index b4fec14d2..376494564 100644 --- a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PurchaseInfoPublisher.kt +++ b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PurchaseInfoPublisher.kt @@ -72,7 +72,7 @@ object PurchaseInfoPublisher : Managed { fun publish(purchaseRecord: PurchaseRecord, subscriberId: String, status: String) { val encodedSubscriberId = URLEncoder.encode(subscriberId,"UTF-8") - val pseudonym = pseudonymizerService.getMsisdnPseudonymEntityFor(encodedSubscriberId, purchaseRecord.timestamp).pseudonym + val pseudonym = pseudonymizerService.getSubscriberIdPseudonym(encodedSubscriberId, purchaseRecord.timestamp).pseudonym val pubsubMessage = PubsubMessage.newBuilder() .setData(convertToJson(PurchaseRecordInfo(purchaseRecord, pseudonym, status))) diff --git a/prime-api/src/main/kotlin/org/ostelco/prime/pseudonymizer/PseudonymizerService.kt b/prime-api/src/main/kotlin/org/ostelco/prime/pseudonymizer/PseudonymizerService.kt index 8e446944d..f89f0f327 100644 --- a/prime-api/src/main/kotlin/org/ostelco/prime/pseudonymizer/PseudonymizerService.kt +++ b/prime-api/src/main/kotlin/org/ostelco/prime/pseudonymizer/PseudonymizerService.kt @@ -7,5 +7,8 @@ interface PseudonymizerService { fun getActivePseudonymsForMsisdn(msisdn: String): ActivePseudonyms - fun getMsisdnPseudonymEntityFor(msisdn: String, timestamp: Long): PseudonymEntity + fun getMsisdnPseudonym(msisdn: String, timestamp: Long): PseudonymEntity + + fun getSubscriberIdPseudonym(subscriberId: String, timestamp: Long): PseudonymEntity + } \ No newline at end of file diff --git a/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/resources/PseudonymResource.kt b/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/resources/PseudonymResource.kt index d44857f32..85a6f37c9 100644 --- a/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/resources/PseudonymResource.kt +++ b/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/resources/PseudonymResource.kt @@ -37,7 +37,7 @@ class PseudonymResource { fun getPseudonym(@NotBlank @PathParam("msisdn") msisdn: String, @NotBlank @PathParam("timestamp") timestamp: String): Response { logger.info("GET pseudonym for Msisdn = $msisdn at timestamp = $timestamp") - val entity = PseudonymizerServiceSingleton.getMsisdnPseudonymEntityFor(msisdn, timestamp.toLong()) + val entity = PseudonymizerServiceSingleton.getMsisdnPseudonym(msisdn, timestamp.toLong()) return Response.ok(entity, MediaType.APPLICATION_JSON).build() } @@ -51,7 +51,7 @@ class PseudonymResource { fun getPseudonym(@NotBlank @PathParam("msisdn") msisdn: String): Response { val timestamp = Instant.now().toEpochMilli() logger.info("GET pseudonym for Msisdn = $msisdn at current time, timestamp = $timestamp") - val entity = PseudonymizerServiceSingleton.getMsisdnPseudonymEntityFor(msisdn, timestamp) + val entity = PseudonymizerServiceSingleton.getMsisdnPseudonym(msisdn, timestamp) return Response.ok(entity, MediaType.APPLICATION_JSON).build() } diff --git a/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymizerServiceSingleton.kt b/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymizerServiceSingleton.kt index 4dc97f11a..e82932c4c 100644 --- a/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymizerServiceSingleton.kt +++ b/pseudonym-server/src/main/kotlin/org/ostelco/pseudonym/service/PseudonymizerServiceSingleton.kt @@ -18,17 +18,8 @@ import org.ostelco.prime.logger import org.ostelco.prime.model.ActivePseudonyms import org.ostelco.prime.model.PseudonymEntity import org.ostelco.prime.pseudonymizer.PseudonymizerService -import org.ostelco.pseudonym.ConfigRegistry -import org.ostelco.pseudonym.ExportTaskKind -import org.ostelco.pseudonym.MsisdnPseudonymEntityKind -import org.ostelco.pseudonym.endPropertyName -import org.ostelco.pseudonym.errorPropertyName -import org.ostelco.pseudonym.exportIdPropertyName -import org.ostelco.pseudonym.msisdnPropertyName -import org.ostelco.pseudonym.pseudonymPropertyName +import org.ostelco.pseudonym.* import org.ostelco.pseudonym.resources.ExportTask -import org.ostelco.pseudonym.startPropertyName -import org.ostelco.pseudonym.statusPropertyName import org.ostelco.pseudonym.utils.WeeklyBounds import java.time.Instant import java.util.* @@ -68,9 +59,13 @@ object PseudonymizerServiceSingleton : PseudonymizerService { private val dateBounds: DateBounds = WeeklyBounds() private val msisdnPseudonymiser: Pseudonymizer = Pseudonymizer(MsisdnPseudonymEntityKind, msisdnPropertyName) + private val subscriberIdPseudonymiser: Pseudonymizer = Pseudonymizer(SubscriberIdPseudonymEntityKind, subscriberIdPropertyName) private val executor = Executors.newFixedThreadPool(3) - val pseudonymCache: Cache = CacheBuilder.newBuilder() + val msisdnPseudonymCache: Cache = CacheBuilder.newBuilder() + .maximumSize(5000) + .build() + val subscriberIdPseudonymCache: Cache = CacheBuilder.newBuilder() .maximumSize(5000) .build() @@ -85,22 +80,33 @@ object PseudonymizerServiceSingleton : PseudonymizerService { null } msisdnPseudonymiser.init(datastore, bigQuery, dateBounds) + subscriberIdPseudonymiser.init(datastore, bigQuery, dateBounds) } override fun getActivePseudonymsForMsisdn(msisdn: String): ActivePseudonyms { val currentTimestamp = Instant.now().toEpochMilli() val nextTimestamp = dateBounds.getNextPeriodStart(currentTimestamp) logger.info("GET pseudonym for Msisdn = $msisdn at timestamps = $currentTimestamp & $nextTimestamp") - val current = getMsisdnPseudonymEntityFor(msisdn, currentTimestamp) - val next = getMsisdnPseudonymEntityFor(msisdn, nextTimestamp) + val current = getMsisdnPseudonym(msisdn, currentTimestamp) + val next = getMsisdnPseudonym(msisdn, nextTimestamp) return ActivePseudonyms(current, next) } - override fun getMsisdnPseudonymEntityFor(msisdn: String, timestamp: Long): PseudonymEntity { + override fun getMsisdnPseudonym(msisdn: String, timestamp: Long): PseudonymEntity { val (bounds, keyPrefix) = dateBounds.getBoundsNKeyPrefix(msisdn, timestamp) // Retrieves the element from cache. - return pseudonymCache.get(keyPrefix) { - msisdnPseudonymiser.getPseudonymEntity(keyPrefix) ?: msisdnPseudonymiser.createPseudonym(msisdn, bounds, keyPrefix) + return msisdnPseudonymCache.get(keyPrefix) { + msisdnPseudonymiser.getPseudonymEntity(keyPrefix) + ?: msisdnPseudonymiser.createPseudonym(msisdn, bounds, keyPrefix) + } + } + + override fun getSubscriberIdPseudonym(subscriberId: String, timestamp: Long): PseudonymEntity { + val (bounds, keyPrefix) = dateBounds.getBoundsNKeyPrefix(subscriberId, timestamp) + // Retrieves the element from cache. + return subscriberIdPseudonymCache.get(keyPrefix) { + subscriberIdPseudonymiser.getPseudonymEntity(keyPrefix) + ?: subscriberIdPseudonymiser.createPseudonym(subscriberId, bounds, keyPrefix) } } From 6f6477ea498576bf492d4e81c39622abcdccde9b Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Thu, 6 Sep 2018 13:59:03 +0200 Subject: [PATCH 26/78] Remove unused code --- .../main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt | 2 -- 1 file changed, 2 deletions(-) diff --git a/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt b/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt index ac89bc583..b2f109993 100644 --- a/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt +++ b/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt @@ -3,7 +3,6 @@ package org.ostelco.prime.storage.graph import arrow.core.Either import arrow.core.flatMap import org.neo4j.driver.v1.Transaction -import org.ostelco.prime.analytics.AnalyticsService import org.ostelco.prime.logger import org.ostelco.prime.model.Bundle import org.ostelco.prime.model.Offer @@ -50,7 +49,6 @@ object Neo4jStoreSingleton : GraphStore { private val ocs: OcsAdminService by lazy { getResource() } private val logger by logger() - private val analyticsReporter by lazy { getResource() } // // Entity From 37f8db4f1e68680412872467105a0905e0d8a53e Mon Sep 17 00:00:00 2001 From: "Kjell M. Myksvoll" Date: Thu, 6 Sep 2018 14:20:03 +0200 Subject: [PATCH 27/78] Adds acceptance tests for source payment handling --- acceptance-tests/script/wait.sh | 1 + .../org/ostelco/at/common/StripePayment.kt | 9 +++++++ .../kotlin/org/ostelco/at/okhttp/Tests.kt | 27 +++++++++++++++++++ 3 files changed, 37 insertions(+) diff --git a/acceptance-tests/script/wait.sh b/acceptance-tests/script/wait.sh index 6a86cecfb..64400794a 100755 --- a/acceptance-tests/script/wait.sh +++ b/acceptance-tests/script/wait.sh @@ -22,6 +22,7 @@ java -cp '/acceptance-tests.jar' org.junit.runner.JUnitCore \ org.ostelco.at.okhttp.GetPseudonymsTest \ org.ostelco.at.okhttp.GetProductsTest \ org.ostelco.at.okhttp.GetSubscriptionStatusTest \ + org.ostelco.at.okhttp.SourceTest \ org.ostelco.at.okhttp.PurchaseTest \ org.ostelco.at.okhttp.ConsentTest \ org.ostelco.at.okhttp.ProfileTest \ diff --git a/acceptance-tests/src/main/kotlin/org/ostelco/at/common/StripePayment.kt b/acceptance-tests/src/main/kotlin/org/ostelco/at/common/StripePayment.kt index 4805f4773..57ca065dc 100644 --- a/acceptance-tests/src/main/kotlin/org/ostelco/at/common/StripePayment.kt +++ b/acceptance-tests/src/main/kotlin/org/ostelco/at/common/StripePayment.kt @@ -34,6 +34,15 @@ object StripePayment { return source.id } + fun getCardIdForTokenId(tokenId: String) : String { + + // https://stripe.com/docs/api/java#create_source + Stripe.apiKey = System.getenv("STRIPE_API_KEY") + + val token = Token.retrieve(tokenId) + return token.card.id + } + fun deleteAllCustomers() { // https://stripe.com/docs/api/java#create_card_token Stripe.apiKey = System.getenv("STRIPE_API_KEY") diff --git a/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt b/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt index 1232427be..707e87106 100644 --- a/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt +++ b/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt @@ -163,6 +163,33 @@ class GetProductsTest { } } +class SourceTest { + + @Test + fun `okhttp test - POST source create`() { + + StripePayment.deleteAllCustomers() + + val email = "purchase-${randomInt()}@test.com" + createProfile(name = "Test Payment Source", email = email) + + val client = clientForSubject(subject = email) + + val sourceId = StripePayment.createPaymentTokenId() + + // Ties source with user profile both local and with Stripe + client.createSource(sourceId) + + Thread.sleep(200) + + val sources = client.listSources() + assert(sources.size > 0) { "Expected at least one payment source for profile $email" } + + val cardId = StripePayment.getCardIdForTokenId(sourceId) + assertNotNull(sources.first { it.id == cardId }, "Expected card $cardId in list of payment sources for profile $email") + } +} + class PurchaseTest { @Test From de1d3b92d2fa5eb9e47888ce79bbcadf37236d99 Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Thu, 6 Sep 2018 14:20:09 +0200 Subject: [PATCH 28/78] Remove unused code. --- .../main/kotlin/org/ostelco/prime/analytics/AnalyticsService.kt | 1 - 1 file changed, 1 deletion(-) diff --git a/prime-api/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsService.kt b/prime-api/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsService.kt index c7088ea54..ddfcac34a 100644 --- a/prime-api/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsService.kt +++ b/prime-api/src/main/kotlin/org/ostelco/prime/analytics/AnalyticsService.kt @@ -3,7 +3,6 @@ package org.ostelco.prime.analytics import org.ostelco.prime.analytics.MetricType.COUNTER import org.ostelco.prime.analytics.MetricType.GAUGE import org.ostelco.prime.model.PurchaseRecord -import org.ostelco.prime.model.Subscriber interface AnalyticsService { fun reportTrafficInfo(msisdn: String, usedBytes: Long, bundleBytes: Long) From baeef3a5eb7fedc6017347a7124f5b21ff3aa6cb Mon Sep 17 00:00:00 2001 From: Vihang Patil Date: Thu, 6 Sep 2018 15:14:18 +0200 Subject: [PATCH 29/78] Minor fixes and updates. --- README.md | 1 - acceptance-tests/Dockerfile | 1 + acceptance-tests/build.gradle | 2 + acceptance-tests/config/.gitignore | 1 + .../kotlin/org/ostelco/at/common/Firebase.kt | 41 ++++ .../kotlin/org/ostelco/at/jersey/Tests.kt | 2 + .../kotlin/org/ostelco/at/okhttp/Tests.kt | 2 + .../prime/appnotifier/FirebaseModule.kt | 2 - .../prime/client/api/ClientApiModule.kt | 14 ++ docker-compose.override.yaml | 1 + docs/NEO4J.md | 2 +- docs/TEST.md | 8 +- docs/test-deployment/deployment.png | Bin 17272 -> 15671 bytes docs/test-deployment/deployment.puml | 13 +- .../prime/storage/embeddedgraph/GraphStore.kt | 183 ------------------ prime/build.gradle | 4 +- prime/config/config.yaml | 10 +- prime/infra/dev/prime-client-api.yaml | 5 +- prime/script/deploy-dev-direct.sh | 2 +- prime/script/deploy-direct.sh | 2 + 20 files changed, 89 insertions(+), 207 deletions(-) create mode 100644 acceptance-tests/config/.gitignore create mode 100644 acceptance-tests/src/main/kotlin/org/ostelco/at/common/Firebase.kt delete mode 100644 embedded-graph-store/src/main/kotlin/org/ostelco/prime/storage/embeddedgraph/GraphStore.kt diff --git a/README.md b/README.md index 804ce931b..ae502309c 100644 --- a/README.md +++ b/README.md @@ -21,7 +21,6 @@ Mono Repository for core protocols and services around a OCS/BSS for packet data * [diameter-stack](./diameter-stack/README.md) * [diameter-test](./diameter-test/README.md) * [exporter](./exporter/README.md) - * [ext-pgw](./ext-pgw/README.md) * [ocs-api](./ocs-api/README.md) * [ocsgw](./ocsgw/README.md) * [ostelco-lib](./ostelco-lib/README.md) diff --git a/acceptance-tests/Dockerfile b/acceptance-tests/Dockerfile index 2217a0487..f897cb74a 100644 --- a/acceptance-tests/Dockerfile +++ b/acceptance-tests/Dockerfile @@ -6,6 +6,7 @@ RUN apt-get update \ && apt-get install -y --no-install-recommends netcat \ && rm -rf /var/lib/apt/lists/* +COPY config/ /secret/ COPY src/main/resources/ / COPY script/wait.sh /wait.sh COPY build/libs/acceptance-tests-uber.jar /acceptance-tests.jar diff --git a/acceptance-tests/build.gradle b/acceptance-tests/build.gradle index 3b6a16248..a8074a553 100644 --- a/acceptance-tests/build.gradle +++ b/acceptance-tests/build.gradle @@ -14,6 +14,8 @@ dependencies { implementation project(":prime-client-api") implementation project(':diameter-test') + implementation 'com.google.firebase:firebase-admin:6.4.0' + implementation "com.stripe:stripe-java:6.8.0" implementation 'io.jsonwebtoken:jjwt:0.9.1' // tests fail when updated to 2.27 diff --git a/acceptance-tests/config/.gitignore b/acceptance-tests/config/.gitignore new file mode 100644 index 000000000..bf045303f --- /dev/null +++ b/acceptance-tests/config/.gitignore @@ -0,0 +1 @@ +pantel-prod.json \ No newline at end of file diff --git a/acceptance-tests/src/main/kotlin/org/ostelco/at/common/Firebase.kt b/acceptance-tests/src/main/kotlin/org/ostelco/at/common/Firebase.kt new file mode 100644 index 000000000..35052c378 --- /dev/null +++ b/acceptance-tests/src/main/kotlin/org/ostelco/at/common/Firebase.kt @@ -0,0 +1,41 @@ +package org.ostelco.at.common + +import com.google.auth.oauth2.GoogleCredentials +import com.google.firebase.FirebaseApp +import com.google.firebase.FirebaseOptions +import com.google.firebase.database.FirebaseDatabase +import java.io.FileInputStream +import java.nio.file.Files +import java.nio.file.Paths + +object Firebase { + + private fun setupFirebaseInstance(): FirebaseDatabase { + + try { + FirebaseApp.getInstance() + } catch (e: Exception) { + val databaseName = "pantel-2decb" + val configFile = System.getenv("GOOGLE_APPLICATION_CREDENTIALS") ?: "config/pantel-prod.json" + + val credentials: GoogleCredentials = if (Files.exists(Paths.get(configFile))) { + FileInputStream(configFile).use { serviceAccount -> GoogleCredentials.fromStream(serviceAccount) } + } else { + throw Exception() + } + + val options = FirebaseOptions.Builder() + .setCredentials(credentials) + .setDatabaseUrl("https://$databaseName.firebaseio.com/") + .build() + + FirebaseApp.initializeApp(options) + } + + return FirebaseDatabase.getInstance() + } + + fun deleteAllPaymentCustomers() { + setupFirebaseInstance().getReference("test/paymentId").removeValueAsync().get() + } +} \ No newline at end of file diff --git a/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt b/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt index 91186ed16..2a3006db1 100644 --- a/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt +++ b/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt @@ -1,6 +1,7 @@ package org.ostelco.at.jersey import org.junit.Test +import org.ostelco.at.common.Firebase import org.ostelco.at.common.StripePayment import org.ostelco.at.common.createProfile import org.ostelco.at.common.createSubscription @@ -229,6 +230,7 @@ class PurchaseTest { fun `jersey test - POST products purchase`() { StripePayment.deleteAllCustomers() + Firebase.deleteAllPaymentCustomers() val email = "purchase-${randomInt()}@test.com" createProfile(name = "Test Purchase User", email = email) diff --git a/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt b/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt index 673645bad..aa9873cd3 100644 --- a/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt +++ b/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt @@ -1,6 +1,7 @@ package org.ostelco.at.okhttp import org.junit.Test +import org.ostelco.at.common.Firebase import org.ostelco.at.common.StripePayment import org.ostelco.at.common.createProfile import org.ostelco.at.common.createSubscription @@ -169,6 +170,7 @@ class PurchaseTest { fun `okhttp test - POST products purchase`() { StripePayment.deleteAllCustomers() + Firebase.deleteAllPaymentCustomers() val email = "purchase-${randomInt()}@test.com" createProfile(name = "Test Purchase User", email = email) diff --git a/app-notifier/src/main/kotlin/org/ostelco/prime/appnotifier/FirebaseModule.kt b/app-notifier/src/main/kotlin/org/ostelco/prime/appnotifier/FirebaseModule.kt index da4a2d12c..4ddb234c5 100644 --- a/app-notifier/src/main/kotlin/org/ostelco/prime/appnotifier/FirebaseModule.kt +++ b/app-notifier/src/main/kotlin/org/ostelco/prime/appnotifier/FirebaseModule.kt @@ -17,7 +17,6 @@ class FirebaseModule : PrimeModule { @JsonProperty("config") fun setConfig(config: FirebaseConfig) { - println("Config set for AppNotifier") setupFirebaseApp(config.databaseName, config.configFile) } @@ -26,7 +25,6 @@ class FirebaseModule : PrimeModule { configFile: String) { try { - println("Setting up Firebase for FirebaseAppNotifier. databaseName : $databaseName , configFile : $configFile ") val credentials: GoogleCredentials = if (Files.exists(Paths.get(configFile))) { FileInputStream(configFile).use { serviceAccount -> GoogleCredentials.fromStream(serviceAccount) } } else { diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/ClientApiModule.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/ClientApiModule.kt index 8795b4970..b1d3f7105 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/ClientApiModule.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/ClientApiModule.kt @@ -10,6 +10,7 @@ import io.dropwizard.auth.CachingAuthenticator import io.dropwizard.auth.oauth.OAuthCredentialAuthFilter.Builder import io.dropwizard.client.JerseyClientBuilder import io.dropwizard.setup.Environment +import org.eclipse.jetty.servlets.CrossOriginFilter import org.ostelco.prime.client.api.auth.AccessTokenPrincipal import org.ostelco.prime.client.api.auth.OAuthAuthenticator import org.ostelco.prime.client.api.metrics.reportMetricsAtStartUp @@ -28,8 +29,11 @@ import org.ostelco.prime.module.PrimeModule import org.ostelco.prime.module.getResource import org.ostelco.prime.ocs.OcsSubscriberService import org.ostelco.prime.storage.ClientDataSource +import java.util.* +import javax.servlet.DispatcherType import javax.ws.rs.client.Client + /** * Provides API for client. * @@ -45,6 +49,16 @@ class ClientApiModule : PrimeModule { override fun init(env: Environment) { + // Allow CORS + val corsFilterRegistration = env.servlets().addFilter("CORS", CrossOriginFilter::class.java) + // Configure CORS parameters + corsFilterRegistration.setInitParameter("allowedOrigins", "*") + corsFilterRegistration.setInitParameter("allowedHeaders", + "Cache-Control,If-Modified-Since,Pragma,Content-Type,Authorization,X-Requested-With,Content-Length,Accept,Origin") + corsFilterRegistration.setInitParameter("allowedMethods", "OPTIONS,GET,PUT,POST,DELETE,HEAD") + corsFilterRegistration.addMappingForUrlPatterns(EnumSet.allOf(DispatcherType::class.java), true, "/*") + + val dao = SubscriberDAOImpl(storage, ocsSubscriberService) val jerseyEnv = env.jersey() diff --git a/docker-compose.override.yaml b/docker-compose.override.yaml index 9d810bf56..f95c8f439 100644 --- a/docker-compose.override.yaml +++ b/docker-compose.override.yaml @@ -93,6 +93,7 @@ services: environment: - PRIME_SOCKET=prime:8080 - STRIPE_API_KEY=${STRIPE_API_KEY} + - GOOGLE_APPLICATION_CREDENTIALS=/secret/pantel-prod.json networks: net: ipv4_address: 172.16.238.2 diff --git a/docs/NEO4J.md b/docs/NEO4J.md index bf7c42976..881d7ba3c 100644 --- a/docs/NEO4J.md +++ b/docs/NEO4J.md @@ -32,7 +32,7 @@ kubectl config get-contexts If name of the cluster, where neo4j is deployed, is `private-cluster`, then change `kubectl config`. ```bash -kubectl config set-context $(kubectl config get-contexts --output name | grep private-cluster) +kubectl config use-context $(kubectl config get-contexts --output name | grep private-cluster) ``` ### Port forward from neo4j pods diff --git a/docs/TEST.md b/docs/TEST.md index 4b93d4df8..b24b9098f 100644 --- a/docs/TEST.md +++ b/docs/TEST.md @@ -29,7 +29,7 @@ openssl req -x509 -nodes -days 365 -newkey rsa:2048 -keyout ./nginx.key -out ./n cp nginx.crt ../../ocsgw/config/metrics.crt ``` -### Test ext-pgw -- ocsgw -- prime --firebase +### Test acceptance-tests ```bash gradle clean build @@ -40,12 +40,6 @@ docker-compose up --build --abort-on-container-exit ```bash gradle prime:integration -``` - - * Test pubsub -- pseudonymiser(--datastore) -- pubsub - -```bash -docker-compose up --build -f docker-compose.yaml -f docker-compose.pseu.yaml --abort-on-container-exit ``` ## Configuring emulators diff --git a/docs/test-deployment/deployment.png b/docs/test-deployment/deployment.png index 964ec59dbee421779f2ab10fb7ac0d480bfcfc94..4e2d59309001636000a433a3ce040e2b9f5f45cd 100644 GIT binary patch literal 15671 zcma*O1yq$y)CP(oN`pv9cY`PmhekrW5fBihI}aeGgh+QB;1JT?Ag!V_heo=)yWcip?zbuE^h_wdf_nb~{Kex7+ISVc(&3!MZV2?+`7jqGbRB%}v(;D-eDA$T$; z(~S!LV|I~x>+-?g!NbPX%mqot)Xvn&$i>uz%GiU-!o|fwkb}d)#>mda)z;?42YcJ6 zF9paz3!gq}ymk3MpCci;r+TZ&%eRT)ws0(qVx}EO#r*!;@T8ztv$*83dMFjmlu1}< zSFR01Jk6~eV}X>=x+&$6@M?<9!7v}3b{n(k7;6UG%l#om{}6s+wr$AE zw&eNJ3c?w63{6XTt6QEbo}1^kLhugra5hnc^z$^&#o=dZ=RK%icP`c_=$u*dbS`&j zN_F8$cXHJMBAsH7*)D1=iEZiHj~d;7h`C+S+k6gxX?)$-8}_1t@zSDdSM&8d3;DBQ zwBmu$+)CWjZ&Y#3^CoA%h@bPJyzao2b7j(GGI}XzHh_2fd)HLxbAuI`auh5%RfqdG zwx@Bib^|=d^aD5aka8Or{AtQ1=zK)01#uJLk|d75FL%KhRM>c<72Jz2~HDLxd(sOs*w zIrBcy1^HKly!PjzOk%8$2|8S@QMCCMF&5-1I9_0T*C8Q6gx|at*KnWQfup@sytwMf zlju~cG+~_@CpMuBh^gd0wT+^`YWdSI*pK|9pZz{(t?n zjR_v#e>YAHe!2gY5e8Ly^X7nn^6=@ z-yN4sdlE7&9w4cim|wD9rf{1h$mr?mUnGVOp@7b50&;TbOEw3yWNvP5Bqb#U$dq(- zN#sa?O^LC?dCFx5&6lG^+61Y}($Zg=nwk<468!y<(FBErgkHURN{@vUMF=S>Dyq_6 zSYBRkfbIyIBU0Db*PY7Hx!UCFGx&V2&k5Oe-@bhtm4S?m%yt75fjwNL2dJ7RRN>NA?N| z3RkB)mZ-3X_MOzYxYrv^5fO;K6h3BeY;5fIE-a)^ag(s}+FCOumnQcEzgu=DrkC8@ z+k;s_etr^rv(1VM3gcHy=olEvsr-X^%ITLUTW;Iqj>EZ%UeH{D>n0+uPgT-aa=+ z$;gLt+5(ZZrU0s(&k7eQ2b?WK1n>F9E3yp zZ=}^zFe+6}oRTKbGbOM*moTBTm7c^A!$#;%KtRCR-W->xu*YHcZ!Xg8llE}ZDx&9E z-@aM6gRwFb=lQ8#r~3MK z$|&K5yHj->B{)iUNJ!Z;h&Up3r~-1^uU~9zZ25UIkrdlywo|n^)TO1Ry~>4!h4hm; z%F2S*Iy$)8_NMD4C3S0U>SLei{>Ity+MgGfOAikZ|K|?P&=+iM@0#3SVG+MF zesn`hLLzrV*AW+of64daMMV93Sv0nzXHTC#Rb7OI&-vcLKXP+(GcB3*hjey#6Sl&1 z%i*-Nv>Sgfj@FizA0vrvZVP>A$E2mrI@$VrwlkG@YdhN{(3MDR1OkZrwTw(hM~94_ zUdknK7{rx&;tTy3hAm#eo4^Vk6!P-$)aW-tM|`)(UzuHZ#WD`Jf|Z3huVRL`dS5}4 z*9;}51OC=KuciCm65r3Rnz{KGLD$XW-5Jk`O4IEP)Wxm8W!WDD1O!$;&VQ2%jfst& zNSEEJu^KgwF;-Mm6dC>K;h{RKsQyYD+fqVUSXdW5HZrym4u=O;9F3LeS)w4)ghOLV zk&rAas-jd%<%n6G8&N1dJUj*m2SKY6h{E#nD6@DNjQM&g)6>&qW4te4zU1X4%)tiVSf>%NYK;`AuMOqMyY0=MoSsI6g#0Qn2ifP{(fQTY zaB`V$P4}-~zyAE8=jO)GOM8G+?%db5;*`Q;otvBcMZF&h39Y&+D~opGV!11}#rSJO zLqk*3b*^FxHW|Oe=19R*oxP{K`}sp8F@iRsebpK$1IOJXkl5HXmc9JTp-?9$V*~qT z#xL$*6v@L3@0GEU@+p%3nrH>V|C}{(At7mqhH=@?wUCpNf?SuBkf1fz_~$(^FrNjI zF6_w*7Mz78giQbb=^6=xDE&>5J*uFK1oWbS*n-E{#2jQhd;7il`S*A(I(vlvaO>w* z_HI5TVc75}eLq7~-SFoV9>Do)YisE&x-)t%GdQ=|@Sr}`3q#}wCmYr8fA*S+774(GY&Pa3;pE9Zp$;5q<6a*iXioi$|j(= z$>t2ko0%WMgFpcN&&#Qa4`8gK?1b`Hea41BY6D;0{-M$-p5KBjDJch zdHU8z2Hy2**4qDj7B=}dCZr8P<4r#;{}XGi5+pWx!K9k111Y!(z9or&?ckyf#O{%y?9VoOFj zvex?dL_C~A1$6~&Yr^}`R_8ip8|9`SRaPbImi_oa+7W5LIbBb_g9nxu;?Fq+6XNu8 z7iB?JdsFkq=)!4cFzd-!Mtmx~e}ExTviq0K!wHY;b83awz`&>M>w~mh_v6mMg!&w{ zCxlQ@&0D35=wF`gfj~A|P(O5yq+mJO>?+EyuBjAcc%Sk4vq1`PNQo|wsKa{#NKlEM zFUY5-&7O=F2zFg=oq8u}J3Cw~Q4c5i;oNY1{LPciQIDAh0qy{&xB4>`+ydbqyVFW7 z0_RsLL$_J_5PI4ySJ8LPcz<8ZLK1lef|i;bGI?zuVu||BX8cxU>O&Y9BwC2$P`vwa ztKZ-+^&Th@;dxhT;71epQ{rYTkR>c9~f0N#eXmsPCuW_uWmWi~>s6@r7 zGxMbh?(FKKfYe6gukQNXuE-Y+JswhMaNUw6sjLeG*QK~{@rJ}^Onz! zj4p7rR6kU8>gp^)P%sO4&FY$4APl){cYA`o%y4e1PTA0)MM-zk8;T7w+xmohyw6Od z8%l|hOLgq0QC3^UfNE#U*fL!n=f&R(s;Z}S6XyMPiQm!aCj}r^2O|X^@Ci}~@r6u% z)U+v!^Z4QkSWA%Ew}MDn=NTXT)YjC@MW#h>`6T)1-U>rspH6r5qANG@w99Nl(t`nlT^EFFvxdia8svz}a9+-JZ$d0Q@daG#dX}@2 z?&f&$kN#$>yCdJ;`NPLC!LmjQYYUErb`EHv`f3gg9^m&KA>g!{CguzNI3SYCtH#H{V!bl0%u}{E$1~qt zTofYZ)p%R0ka$s}1fK+can8Uj5`|rAIiJi^6~f`CjH{j-wmVwHHQZ|a$G+{$iRqVBJRheJ3daX|3phUgvx>US2|lz}FXB@RO&n6Kq|sz9d@(X|)r8DkVm*{iUE{Gi*W@8S)8J}j8;*gVrWQd+ z@-eUaGpHu4CaVWa^nzrQnVBE_{K=;y&cwoNy8EWQjxSSS81d<1qdUI2+GUbb#n+|! z*-G|WKdIO4wo=tMmY)#}VsA}#pHY{gC-4fCum>$EsLhre?S6fF6Jqa&1Oe+mXf>P` zd~xP`r#Duh4)!NUUQNN&O_@`%yuG5IB zQs4Bt)}mPDMXut}%E}fK(+I6mG|^je6{vt&Z-#U@KBx#!PV&~)M2&pp+G-=-o84&~(Ehlo0oSY&>bIdP)9fe@RAr)udwnvxz~spU&MO zy5C{P@$r*C<`kq`;#J0%%zGcMn1f3@Hwg@5IbG?MlrW6@mEvmAOx+(Z%hai9bPFz7 z;)6k#db~a=DwD{yU*9jL`UIyJ(jlxlgHQ@o8tfx`g3Sa8A|D7p7}a+9&vA!?&Aha6 z|Jy6TR(GFyKkfBCZn+Hkf9x_yUMB?q{h6`v|86Np=AfBR@XroJX!Kd3&OBw60&{Wm z-{al?JpZ>xJ#CQvicLQC^WJaXDefE4xp1|3xy_-0EwLB+T#Ys^K*{%vJWw^^s7`C} zdQV}fbr||>Zbb!g zaR4_z-Q|7l!Afo3x*aKz3Y<>X4O*M(7fGtcGJOEn{x+Sl2j`-&Y;|>Y)Z{QNac|7t z=U7t){|SJ$=St(XTxnLJ7@U>XUW#gMb5kr>ZPd+ce`|{Q zT;(RL=ld`a<-Iu-JDsSvqSfstDb3;*EInp-K#6gd3sved`065C$^o<&=8B3NS`&VT z&LF-_`G=hkG{Sm@;V0hg3Pz}Qcn}NgqW_YZYj|zQEbtAL~A7LpC6DvQ8+W3eXlD0qySoJBUb&2}B*)9Q+eieM%;-~-VN8-wzC zQsKX!7!6f*Ej%ebS9}}#1ogo-=Ig|`HGAgh>%~*q!rF%Xs4W2vj?HR%764g3GsPV6 zwzuqLD{IXTec#zLUjItX?&oiX1V*cC;;ZDa&3$r1;8_s<Q_i&Zy@tP_n7}9IM>uQB=V#Dp0uPV!4inaD7^qShQ?~RE2wf%0- z@I+FJQi|!hG5Nz(ja-6kb837rDjsQ7aibsE;d7J;ZG9#}k2oOs)yq_izz7vdt5qlD zwe1#B4=w?c!TB62+Npx7U2{FrfQO(yeeC|tcie5e5I9c2H~u$~29q_(Nk{Z(nMmba z`C4gv{A;h=U(pAnN?A5dJ9imcBmZmDyGg`&bD5zDIjMjRf?Jx1{8eGVmG+CYHe8S4O9{)lSc1o}z;m4`UNk zcgNgji~Ms@rRWd6*6Ocf`Dn4hTPxWwSv#!9hX(rca6`HSL1q(q|C!rY{4to{hmX5@ z&Qi*G-|pcw5mmFuhR%uoFvRsR56;hXuoov+Bi3{g5^>^kd$Xm?S}%zT~i^&WQKt4^hmf3E7A)8cOa z*UC7xM>0TyE@Wv1GO2Fnw1^AlSe}P?&zLN(MpWlx!nw7os;ZAjnskBPZ0h$FVJDBL z+5^Y|C@3iJUZlMY0MHSQ{>s@ew!8YwcQS$3ucpQoFibEXM#j9TDBMRSf=n!jZduQMB`}gnX#Kb$( z^|57>ajss#Byup?QepE}5vv#P$qXR$5WT%@j>|qbmxG-|90p|E76aY!tgz3N zqCPfrEy4u}4E8e(HHnGZ^&L;jt6Ws0(pFws3}W~n`N=HRIFDv#WmQ=YGvK3BQrgwm zA17%h#cDG^j!>$|D*AeQz!1m?g!jbw`050lSMezYsHns=HRCsM(9w;pdqN0UW!Dv2 z3kod3B=qo!e1hj-SnZ2sourovsV$ZT2M4R?Dp-yc>)hSm)Y(k@s!d11AWRi-{<0zJ zeRNlX`5f{PYp%vxJB8QQ=VFDZkPmFY06lI1mNRx(l#iN@j&6N(b4aMs zZEtsJNm3FM3yT0B-$M3psXjRwnH*UVtUfpxZRH)e5Ieh~oE(e8X{q^^GHN;3N^P7I zHYslyGfK;Q!btYO=$K(FQeGxzX4l>62?V0_JQash#A|1gXY-A+a_h&F%`{<88ylN- zLt)|OAe;*h@}Qugp3crFhWxwpgC)R@nSSB38IOX6FsoQ?a2PZ(b(|wX6KF!v(UDzv zMU(6Dsza5MlaonENL=`@FOJM@ZEeR8O~UlUfHzoHvv%qf4*iC>a7czTcAd&>kE7MV zz`&-ezOF7}9xDwH0%~e_1dGwMG8TynC!c-339sDVT!of(1wUgnF*WtQ+Tkl?2X5Sp zotAvM)Db!1X!W%hhB?RVF`(!oJPL@(a0 zj}$zE{3_HeO7bZbi~Gn$;_Bj3tX)QN(&T%WW@cw?O$w+?JUkjkMnLcqwPfuLwP(UO z3|rozhqBGGCOuM2eqm>Rs?iqDsy$g_&DaaWN)8G6vcY(#mXC~lceL$pVX?m2H`kgc z6{P4=E+HIAA^eS;_hdjR5!&um6rG-~j0~8~*-XhN1O)Mn%4q{X>g?wZ>09GSVpY0d z-47rzuPEX&Y>Z<^gJEH9O(qbJh=_3#nnl(eTkh~Qv^6^h1U9d0n^_SlQ6z8cD-65*G^P_&z0`vJFpA|TJCSegdglunr)R6!-K1IlbGq!ek_8SOYmjOxQR|LjlVn0O9 zhtM%L43(Apox*V5n-|On{`80x4@DoUdK4xZTo@LHnW7wTVP1#}=+wjg{ft}dzolsQ zYk<^L4Q^~~JYtXgOO+E&+Xu~UF0k&O0Ck;KDI9i$=B0gS(b_W&8wNqt*}3Ny7KLiT zvYCm;Sa(=5l_BmQK@Mq8BPh_TD~%sL!>M!I7z&IiN?I^7GSV6oUVUhPdFF0v zW|o`hEBb!!yCW@@iFU?s$LtBjYfvfa%VW*DJxMS#Z+U-sBd-ov#TWGSfrsjVx?b%| z;aV`Irrp}wQtt=s&seFx{O;K3C^&q;LPMh-&@*^>o$+`-|JRXX=RIEaW7tg)X<>*|mvlj+{M_P+c0 zyVtx|?o`*#61s}PFxSdGn*Amg7%IKQ%ECg}T9Q;OY%~6Hk}oeOM>W^R!XhapWovU& zuAS{b139RT+FJcpw4SA*(#dCH#EO$8o?z+bXb}-t+hI6Z_BX4sIeB15G`-w+78e)y z@@krQjERX!3iW~cs^@Bgw7?{CH$MJ$HszHZnM^V@hi9bj!Roo9ku)&nieu^c+cz2# zfO_4=x|WVs`&fscU28m$^Ge$X%sf3d$JTw?5?g$Sd|w)QA7E!jA^lfvv#2FYQ242E zih%PPz!-p($im5qr@Z?UkrDHziBdXTV8fALg8G$%9e?8qtf#M!b#7U^%)pd#|HA0h zd!;)b95{dl1B5kUbP!xTC;ECj9Mr_YIGjios|&~EMtVxr*!L_+Lbi-gwSCFS#28Ly zl+a;}8hKnz>*b=9-Tjw#Cf?VJ<3^k_**>*zBS~%+5w?JMY$)Syqx>G4@fGuCmw$Y zkdTx0xtvsbquv=525Ke9U(D$yFW9|Ocm_{J}hpBmuDbIeu20pDzTF9H5X678gV!W% zzA~c2wBp0`xE{r&a7CW|7gwFgJ`jo}nj_+m&4S1+!Vr$6!EH^~!COujXs zw3ZbJk$CV(T|;HPp!R%6fc3xE$OKhf)keMD4k#oZ1W*fa#)5gdOu_lDKWdM;p?98r z!7m#kRFD=jz3V$Nq^7SQ{`Y2pUzm-coOcU(`phF!hWA75iFb_w|9V_$1L#rH@W$kV zI^bcPf9kpYX-Q4YLqn>6GXujXaN1LzA>sGbeoC~5g0iw)1#eY1*B5*!$#l;{R8GG; zc3%+jEy3cqM`e*MpQZfS<8Y1wKw|vs&dH_OEG>MqH5&X3Km2TSfdYk4S5&co?|r*R zC^w0L?r^MpHatp^I{T9NBOc<+PjI8P!=v``UpZI3(~r36RA5cxuGC`YT#NU>bA|4* zFzZ7Hv(78Sr|2s64o4X4pSEw(YyV9^BJ8;}hnQZD zzW4Pn9$}!$r$XAzUj2S*`TZT}y}Ko3{~!Xecz*;b-zg;IntLpe^1I*tLCjHPrWdTz ztQ)R+A;uF$o+5j)qqqOxTA)NzoOHE>YlZQMm^~Y+ z`PJ%4A*=)AOjzF)-|>EQc%6!}&<&BJ@CuYovV5PzVNPbQug)G{?_-h6cglgXe13(Z zY3Aw3b2K07Q(MLrhTMN`UgHE#)Pwm9C>D;Gd~B6aZfj1lZYM(qR^b8y))_28B+>3w zQ9f!7)`){`Z;#s+gyWK=)awUA&3=vM1)sxM>6Lojq9YO&O?Hcyc~O3JSMdEJ7StZa zt^|3FYCAh0sq}wZ-(UUBz8Bqd5dCCa=i?4p?M_(Uq_J1d!>_Zi^rQF2HJ=9AQ{k0Y zaZd`|GcE!aQ4=BQ0k^C-DFxp9r7RFLzWM?Te6qee#f|CrHl@6y`*$ z^VOu=Y&_6hxjj&3aS0N-VtnPlCtRl9hXHR?Gg^C__XJwPklP$0{paB~WdkQcQ@jI% zpJ0L67NAP|7!}q92Rm;bSrOn8EeiX~4rE&b|4w)dVf-JS#*YQ%cPJ=SA0cJ~L4z@t zYt^U``bEL#(!#hh=+1}lKK5QZ^k4x$qwhn&Qu%{UV^!-vKc~D9_LP+I(Mex?iaxI6 z)cL(RAfV0jO;5+$(Vrs$8 zKI>r9_R(cH;^5<$E4*cD$|ZvmvKtpS?_F~z-JGqY$?qZ)0%9j)7O87B_;w*f>Pgb= zNa9{Tfft5PVEMfq>li;*xnq=9wnEu^F;2x+ zUX`!%du8YW`G++y)(G;vc_S7o;eTk8=hd3L+Pc{@dE%N3`X^7Hjq7O0dYxa}m1LT% z{cjYj=zq#q(@;s8CyKdTB;Q-vP4v7wPXGSXbHvFC<;9FSjU4D<-vVJ*7&kJ(Y?&J% zm05~}CYlnRc4B-cwe`4l$mknL$Y+f9ASW&iIY+qq500SkjuA&-_~$$&CB0hhBc1cj z^dWf&(WX92F64r)ZWV7vxL01Q=stQ9k0(n3qL}ioWljy+seiF>H|9~ZClK=O@e52& zuj})JS>Ya_>aetm*c~aDQO+4IQ1?_+Jh7eP1;=2KxqfigpyPby1Wv2s=n~R!M-7-+ zQT|h(c{w{{bQY#48#M`9?@{fkI=IP{&9$gZR0)IJMc_{&4hgrN$?<|%&JPA27lMG+ zPvVksKHJR!hZIS$n}ww##$i=atjvXD)^N!WoUtzJ8BJF4K4PEB%SgV! zp-IMN#XCdRP(ZYjO*|-|L0n`ypAlp4xUj8!8KC~KuO`WiDIqTYxnN_0LRc`i1)Q~) zB*;?=Fophz8rS&{bK3@9EX>$z^ zrZOs$ypey!%^emV&c17*TERsiMi7`G*XW)O(NwkE-rl~wCIu*>sY$410Ti~i6&1k| z5oSR|1Ox(tg1d9AzMxvGX3aHJS6AH?L~@U~C)~76#p&CdR`%Y&&>{hi4(4!e^iBsw5{@m;wKjKd7lH$r_%Y z{}mj?R2WQWQSf-{Z=`gK`@te;RQ9s>M)+`E{%vHmNm%UOdls z+2>5fDy&^JM+W2~G<|&9zMy~ns9t2dwc7u;6d-V0k{!=~Ruis<1bp8fg~@$rIf@Dl zL@_nM7n!WUxJy#;@)F9C=u76=nrjs$mr+m9m?jYv6pUq5ej(yDTBsQZrDtSRACd`L z({yvqPwx3fLtTP!rx5Wv2T*+^HZ9Eu5E;Urr{m+}*Kp{Du-AE%{qp|412Hl2B;lqg z5YSE6JHr+L4P%mPjL=m<_BdD!{B+=vIfGVGbx!Ew%3IBOu-x^G<*V}=RBLJsF47E7 za9{3xN<>67dEmfLwpyOizWr{J0B!ZS)qP^ZCTIa*j)K{jo}xe3C_RXUKOQH&`Y_w# zeRcm>J+G1(?*o=qDPWXbkUaNhg+)>Z=)y^OfDmf}&>GBz{pq4*Aw`lrYJ>!6mNkF4 z7#Yn+@>Szh80qP;C`D2yM`2v%Nh4Do9YIjcKa2v7i;q0+>gIfg$Hoxd-QTF=Xa*}S zg17mBZlt!Viq+Q30QjWzM8bYQ&sWc^qW6-l&j1yX>m=~eMY6`0oINz*yTgKn^v`O0 zGi(g2UPO*2O78RDJ%|L)Nc9|M>qlLv__#7MGOLDy(yKMWAb3rcx&4BIp6oqs2np%# z7rj#uRc*{E4rj=7G&VLaT)h4e9w${VsS7{I{%s48-bj^c58<0M_8=tHA!qzJR9N?=b_lGgJpy=CTl@v$a6uhgo zH+*NSC)4wdesgOJ99_pkh2liGf=~dA&?J%bVxn?%bflj&0gf7n1l=651n$zrn@K4D ztWv$%>mtU?>F(Bhyk2nW4OWuC)hma2F@_7%9YBb!0se14yw!eAWYtAZTDl!@ft!EJ zl!Hp+o(@-zU)KRy*X_yiF`KQIS#@Wptf}dWUY%XAr+8>FyZ|+ys=8WQV`}Vv!og!6 zF0QJIii5K=b1;#+yvb7F(wn+Rc;-i_1L4Xo)YER6 z+?~iUjkhp*M^M})mz9+@TxKXLm^x8qelJ1=Qc=}u5&*`vH8iddSLB0vjkxI;8Qm6t zqsg@-^<28RBbX$;z|lTO_9&t9WnzS6fjN*50InxTNMBnUNKSa@=mN$v-S_hpVBd0c z+ox+4V9|gas!zI9h6|qbaJ0Uxd(WX$X#x_6Uq||@57Ox(-YwVia&mI8ZMI!)l%iq8 z;h}5cnvc0I`cX1nbh-2YT_@!n33O6D_=~PG`t__>JTC>&DL73!5pW2Q?}5~8IL$Ef z8ptvQjeZ4u`}Pgb_SbZn6fvb~1h6q{T24<-PZkpTUPpuO-g>;Va-%k&0URD0Vp0b> zw(RVw(b0I!p48-de{7AeNnq0QB8XRm7B5({WCX*;cm?f_d=fD_MfxdU6Gwikh<8J| zTX3*PR*?_HexkCRB#j{ZVpz>W=bN#4mnTV%8cW}OP6@qTVF3SVXe2rz@S=6Ug%r=v)4EHyiZ+uA+=!u;am;#dDm;5)ykFk5_XbQkxB+RONjFS+GhCnq}A`Z0*0 znr)uE7R=Rb<2uVMeSjgYwVi@)jj;ovA~+&5xoTqtlFnwo+|L>_7oZg zlNmkLNTGyR$fa^N)mn$N7B#{_FD@3JYG6Ym$su3usWe+!DT`mSI$j?v1--l}EnTyy zK3c<~W6P%L(N1S{v&PACzYNp=qc`?zs$RjAQ5tK+p9h7 z;FyCeB4w)Hpjj~1F3A^%?5(LuwbCz*!)B{7N>>@_aF%>Y2>eWY8`j?vH}y|SXw|hd z(cvrTy3;tUZoAg*#(QUSf{;+!#m3I2DUkeE zSl6+q`@4*#vszodA-Xuan7?4N;v0zQ7G|rS zd6K?$i5rnkl9oy`+=HWZd_0vZ*1`&#b^-mm^t~QQPl@R3FNQ)MtJx;;JMwD=KEWxiSQ7(bKa%}M zQc%J3r27jCE_ngDNK@Ju`)Pjrk^Bd}{5^?VLmw-pbI;Yrs61FMe8#6N!g7j9j>z@4 zzGS77re4{safHExaQ4nFsvyHF%R**fI1Gefi?uEr)j%tiQ{x~8y>OO#B~BVYM+6Ow1fniv0|81qpj^u zG^;kk!GT|@#PVnrrPm@(7x?2~&lX|Xb(4>GWFNdB{R^sSz5A2^AD^sv=IrH(^8|iUNbI%zjmZ-^SVy?}Qc3_O zr`zu9Nzp3F$|}mgWrm4xG}rEKxv)Suynm!rpOtp$id<@El37)Fc;LC*jSHp(#wAG?m1ILMn2rmu>aaxRd@v*pU(P<1^G;KK*HhE)HBDkHsc~OWz)(ywt&K z=;kn-pccBBJL^_8SGpcR&?k^zBs$HWyO-gWp{t3Geg@|zqTL6lC2ey)7i$F;R#hgp zxf5!kxkAC-Z|%m4(P-3I*|s9qPu2Z7C+hv+U|2MZ6pHd!Z=sz)IS8{!tbBhN%z`^d zr=``w)CzzBWG59yy|6IGom+F_XaM1=cQ!M74;QR~rz!#2UgBf5auV+R^h6?{VBdss zb;z51(~h3@=uHxLDeiXh;_IJj^{w%~(yAuPrhr}poxaMEL8VCueCAECP&~)adgowM z?Q3i-5lGcdMFmBZ2NOMFgl@tN@?OlXP<});i{5S|lkT>`nj{DaMvHH0G1M%Xo!my1@Q(`x1<~)7 zCa&%4^cS>CnZ?E1yEF0=)FB~?L~KdFQ_C`{wPnmTEQfce>$7#L+IyL?1r@kn4t)4B zJjj>hUJS<}*PSZTUNiK)`7=8ka7)goWP7p+<6c`f@tv+xWq_#XWxjG*9}p?8qMNBw zt%v45`bzQ_Xc2#}r(uoPk3>Y2G`Lt&0wJD)B14Xjs$CBuP~!fNWN=6XR!@Hf#DHWG zTyH$uzSPr7AAcxA2Ztio6Pzqvyo~-r*WOJjCB@iSR^C;KvQa>Uz7k8VDHhu z98B-^dTK^BOiyP-;bLH5(5|8zIqWN1xo?-^Lrc42wFz40>O;1Bx!ZFUNkO+?@rvW( zkY*dpUvK&V)~M+$6XvSSL}wW10- z($a`XdDJpKfn!VwX=$@tFZNRd>~Wn(J-YbY3Opvmxfh^T0pjG*B1UEP;Q1;)Q5zJE zsi_)u65)^YGmSE#ca!zbkZu%a(>MnR7dd?`rD#LKImr_z?m1=dfO>~_#xezbC6RS! z^p>6^QUjkI{-j}E?*F7`>J*~>`Gh)t+plywv06$7a}Wt5iW>I{(~o*l^a zmL1;y%zqS?6V!o%j4Uizyf>T|IzC?iSX9i?(&Klh1$oX;Ztm4%%yp&4gOH{&nTHxQ zm4@DdaQc=EsXKPrUxwb9OGvBCJId+rE+Pbc?(~Bq&QU~9*(IQ*KehWNCrSF@kmx2M zo40dVq^k@JmvZ%TblW!wu`qPqm~Dj_aD#)%d$vwFy&nNmY>dNi2DgN!vX+oWxYT}8 zS$WlOqUwMNm`7B&fG1)c79+}4B^no(jNyZ+hQAZGhaPf!A#-q9N`+hImmmbeAs$L0 zmTm36#p>%AaY^T&sd@m(8vKP|vgoR+DlFtY!X=D_bo~Af7jDdw3>uS9RC-p&MXGi; zs{K@OKK^T2%fkZ}3$!X{<8SuhxET}c?>^k^^r%t7`I(nGc`3RC{dbnHMSkB|&Od-e zqMUnp-1L?KLi&82$0q-WS9{7|#NgoE+~iN8099RpE*vp2G51Hc^Ox_U%R$qxSS+GT z2*4c=6PBOpAC1zJZuWov>De0Uj^|3^cvRUMLBjK?%@G^BMZk3vh$)DFDoYqNeGf`- z2NzYmNKsgr*CyTsXvir^%6mxm^!Mkny;Zg^eA$oQ-Q8W7Uk&Fpby11(I4}fPI)t28 z-B&%S9yUx=F@o!0{Uamz&@XcL$~GurK-#PXO5n#{%aFod3ubWcNDhJ894I~`fFwej zC4NIzok9X8>dks2eo2J*5q9W`JC_=^&YmAg`ka6D_(N$zDc}?ZZk7S@!}_F7Q>!4g z4pds!F&Yhg`P7c!~?CfJ;%E3zGl~U54 zkV$P@l$?Z3gSA;qCf-vPj5q(wDiK+Y#%zsOgvSnV03xVQU8g=iHFdhuv`4nj4P++h zPBIJz1NVwrTSbG~o}xjH@SZjd^Az9Hu+T|x19-#Ie8yJXEJp!1?Qs+34mz|%VpO;r_d7BK- zHrDU0t>sZ1B#Sc$a{ssG5qBL~ga2Co)z&5~B&1blP*Obow4>IhpmUm?k;4$2y;C@^TI|j?A76VQgYfY3x-}8p z%%kQ`_1T%bGj|f?dIPS)xw^W7!qn`7PoZ03m7a&|HV0*J(IOKlQbF&N#6ydgWwJr+ zv&JYTFH3k!glr!^({ape?~ypXcY{V#zOcau=Y>L;!ikYTBKs!!e9$SzDY8YgV$eEA5NXEV)Tw44G(_PGdbe z@SM;0e=C%1rZg670`4v8=n%EWbZsU{e+Xk+E^xlT70RMG7Vl19)v#k@Y`lnpgfuS` z#a=Xef4MXuPjEXmCg#`fGav&3^jGu!Wm7Rkx~Sol?DUZBXzNUFSe0j&&vZi9yc|66kVd;=f! zPEwjq#&)0GtxZgwUPznRnm8CbnS3HMawju)a{A2A%KF*b(ALS>#+t?0&IW^xkLbmV zmq8wCnoj@w`xh_VQUX=}DEt*gZ?+rrP5As3PbMtG84cF2kX(%bv7^IHbFQ3c#HKPe ztK;U_RwuyRsOey6!1H`-&=igjK}xEETa)H%crZG_I@e5wWn0jnm0cQmHcE2n0GO>; z=i=|whN(B2R0YD83f*uvf_hGy_pa1ZKH>*{?a3#oE!b2`Ayly#MCE6m(oADw+&ITUUwWp*^@xOZ^`I?9&j#`%R=w{JA6cDZ9irw&$G>8uVcekEgIiGWzH(Str{Ys@wbk^-E-`_D!7>6S4V?r+IrBfa zVfqpo8QI$UwALAx%w@r0J;R3Kw)!`a;X|S52@D*ZMzJP43(L^wTm5Da=fgQ{KkvJ% z%afBjx6`cHrOeSh2wCuOx=7hEE&0#DXYUKHKbWn|GaXDax@+@;&XJ49nU@GcC1(5h zWk$coTFW061vxL?;Dyc1?@Wc%jo_VeFL3W9+?KH|=>eKJ|J-%U;b4Z{%xoL{!pL)~ zSj+qN0%Mi>T`W8r8t%MCnZC1w1HZ#Yu-}EllG}QCq{yQt{$2w7;2Za|4RF_0=A%#? zxPGd63e4)oco(**{LWcfSy&>`EA40^$f&3!>_(E1vrVhXLTr5ef`NE+)r{vbGJ0t` zoOZajc6QWwd<%grws;egTn*7kO$fFW~tdTG~I~R&h9uPgA9Jt4;t<#v(D9M zwsc=H0z1Kf7JCLZ3r+YYLpqY=Ly>xka`syX2M#F3bYW)Z1B7?6bmf+l4BorcLuo=f zIq$HeRR~3!FOL>=tXcIMsvI`^jOI_O^$xJ{@aSsF^cuhYm6w-~H9}y=em;3QJzC|_ zee`Ye!k81CO)(kbrcsXVpCTq(wnlvRE6 zWw_@oYX=0AP0Lf$uoAhiS{FWSkWPEh`3za_1ReTJ64ag){v@2SQM)5R5nNnY7r9YM zaWubP-Arc|T3`Hg_H=P`@!>414lj``?nBp7``ORh^ zirx0n^E9a26$3x2^s`ob$PoyBD1rDSR_HRLY`Rdh$THJ8&mH%6W51iA^4z3OFEF^5 zvo;Wj-qZTkQ#gAbZG&>m@PyB&Kt$oQ>+wcon$mE-{806}6XB8aw>|wMm9w*9r{ zO)qcCiC=vYW9(_tlv%=Ux)2_lDns~#H*HTCW3IYu0b9FVHTEXa-t4;o>whQ?v1ec)7Bw1;3Rk8y`aUP0V>|( zz5<8G#SZyjo5;FX>CXJjLU!yNm%R*SYP96&%Fu!QSaKo3;>|*QZzP$BjIO0!9Xo9! zdrPN2r(88s9hT_nWMUV{2c^(5HSL>(R$W80oU zYaoVE8!Psco~1b?UCH5Wv+C2+?pc^^L4`o=uTD-2BI>IyQs4WR?0Lw8y|Rjm)A*&V z&BiaC>5|$Y)jG;{XpyKm@oC1E&S;EF1fl40UUs$YL2qBBCXpgS+|H-fq=;Lbc4O{4 z1I83Hu_`MoF}}9?9*WAeJm;nRkUBMn(jTE=zr>qE2&Xf8H^$yG)rhqj5*jCu*x>C& zy*5tv@;f}MdDyKWJS4}AgQJmb6vZ%VfGP-@3dk+*w!@+#OHzz=J*s3%*dQZ9ku+-@ zxu!7_KZ66SOp;>y`;g|PJNCIduk0&9JDcN5@1Tu%TQ6(ekvWf2gABt&^1ZODY{O{` z9s3Vy*JC-28N0_M26ac2V7ncg?}UPV%v;?sa_Z?S>)DsSM?(6wRQp6HC^`0FwE-bf-gj>)iH3P$M}S*E!uO<3oYg>(#QbqgW*Fngaq1uBGY$8Vwy;;~sIY6)vZ7dBjY zm3lSfXEAUeBP3hhMP?PQ7st|CzFAO^leRZB`i74i3jc)hh?LTFsu+rHga=aN0gU}H zZ^y!>&E4jk@5hHcG{VPAWb7)Tf5;wt!)Q043BjGgmv8%po5#T=){<#~m`5YYO&gwGNr32q#$j{bQQk)}*T(SZfvSUE;QM_6qOf)Fi<{TEwIiBZ_vt+l1P zI(3PFgmxt=+Yn4N_D`t_l{Yf7!4Od@G2_g!Z`gLtU}qV4ESU<6I8uek39>cmgGjJl zi@UakLc^Y*nC-0|mTn?D1TdmuRp}4120FiqK4pF+|3IEi{>pjjUb^Qw8Fg?3zqvKC zmO%=W&SUzy!)9%LzRRe;LuR1ehDWJ?;p{%|bIPK-L?TUuN{0rILHDb9u}k!7b{H`+ zapkGO{sMU}*UfR{u?3S2Q{oQg(i%n?UeA;5<5q7yxy(mn@?d=^xc3K_)O|#>U2roY z1pv4hL5a_Vgh`{@?A)@?8Ez44W&Uu>X6^dsEdJ`EMR$0LsYNrI{SGH^kgaB;2aFQ1d=)_Q2)|GgX9Nif9PEH(syy6!Czzf*_xEc0C8o4z1E`K8g;r&2NFDN=2oJY-;uH1z!Ok%nyhV~_5 z?PTX;ej9w*)!M0!efWGKu3_1sNT?4&D5g*xM+CYaxb+wT@9v#A92_+F_?i8F8)0t= zAss{1@JFmURPWll*2a@!GxRJ>t{*o|Xj!6H^y@vCBg&$Onak+cyjdcATomx+>J3-k z z_X{ePvie>sg1u6PG3rHnK*-2!j>yT9>bjVg-n{Q~u^sW9YCOdto?})0DeYD*jrR5! z9U89)LiTH8S+vTr#%4bE9uB^@w;pU7|FWBbFFWMhO8bkSi4HN%ROGN<}#k|MohR*od@FGtj2?gsIS)kGlbGs!V zp##+POu3kcyz^ADsTY=Lx7KotX|3{FwzIFliFV)fSe&RS*oHu%Shhq(p{MK(BywWD zeysxjsjAv7H5JH9R>7++0>d5bW6%k|IB{J0dZ7pBz6c7lSP`GF_lE$DyRc}4K3*f` zj4Kt5u8wK~s0TB#O}J{;jHh2Em!9Y2#F7!j7Az;ifpX`h$Cf@hhZ;aBknq=|C5E&uUUN-YJacPO4ot;YnkUd^%9!?dwo))H|;8>_I z3FG+d#P4#TnJb^@ygwy&MNLDKo144T=&G0)%*(5lHx9CuAOI+MT+6^>arE%yG!6jP z^dP2=R50ZW2A}FK)%b!MgrnxOLt8s_><15y?fl05j)<=)0#|IsN)SKMw@0pad zI$FH%9_I$JnDQYlcSi!BxGlzhMMXs&&exI(xO|Uyz6A2tiBAq+IWpK`6&E z*BYH{(4Y5J!xys-zUh)}Dr?PQDHa)Mplm_=L{BVBgzLz?T==1=V;C?NfY`}i-73B_ zevLEX1ps$xX(_<{`i(9&YHDgKDz90O`T0TNP#i)L8^&Ip6t*~-V6>O*!wQJHhvJSp z5Fu8jU&1w{1=ZTw4X7pSJ~+xgC#+ngPE_f8bQ_xT=Y9Zu*%w^*L{r{CPFC9sRP&~a zG|Du~jZg@<5(He2*1IF6`T+PIOcT1xiPvY)t}?H-oO~NjKy5{BIjM)-bBH8XBe*3O%%CA)cH`V02~T|T8;IrPVJ+|4)Iu}QD-Q)2r3$y zqKwSSrXCy&@$$3dS>mGW7aoFy$znG@T);s76NdjI*a&kItXEfWr*^BJePQw$t9K>B zdkleeN4Jl}_jAH)y3D}U-d^mZxw*MQ8d|!|+3Pc5qY%KivF5WUq7@pVORSkghxF16_%)O5jotg;+l1zZT;)`DiGAc zBIoIa&p{(!a`eXc{fD2E2Trbt1We zgQjmj($xk>fvFf`Ig#(z26(aUTonQ#0Fv0Nv$L~c7sX|~yqbbN9wFPC0$YN;Cx9S2 z^?BZlBch?8NXgVYY%cXnEO67cl$ze)h?0_~CvjVyos~*_AvqM6v2H38;jacz=j5R` zKr?yyN2Kw=c9z83LdYVPI9B8|nxMGUu_lT{F}hS`8gt5}1} zqgOcZt7E`$(?}smegnk#@ABDZzd^l&R6v+3lL%l&>z`K8$5?K;mIDwl(Fw7zI{F*0 zkT-}WN%%AL4xz^WuDSb#pc9h4^*+Z94-WU614PIfq(sw}RK!$fED_c-93G}z2#Jsf z*T=RO0acchh$5;gD!&g84pI^mL(*Fxu98(r3JMA+5Q#({YZ(|30JOd}%m#cN*oRNC z6sfVUF5lqMO!GWd*qfjn@bJ}5J%LwqsHqC#-K!Y-hXC1{PD=z2)-~Dqx)s!T4M(qk z9l{jaKHV)ENTfoVjd$jMk1tjgR-Xje$5Jxj5Fi2xd8~ueP5OTZ{=!56NIWPCgim*O z_ZAEhrYMepFOBx=Z#VnmQqNk_X~tZ4$6!Rfy}Y&tlY=gd zyCbC1&h#WXxPg(&G|EU=kLR$*{^~cCJq1Q-3a5w#Gy6?Q>}RhyI@#vsWFv8(JJPTD zw|d3Yo_s9b!gO6a-x# zR)^)sBezfYM)DHZE`;r79q{y`wvR~!n}_i@KRq>&NAaSpdLPFJA+~8z-dhXAGDJ{^ zRxOzlK@E+KzITVUAfy)R9k)SFb(2d+X?yX5%gqw_0UWgH8Hu4dA`W89-!E=BgyRSI zvb7+$jaV>Bgw%|*Y|U4ip$&P>{5IU5Dw(e^k$d+J(QdbrVYdi6J&j`Ul;UAejJqV>UN@vZC!M6|nd5hg@xqzLykVFN_5Je_Y-RR$pu_R=g=D*#?` z{!_S!Lcg$SFz@(hgAjYeNv@Fi!As-~^BCIgZ*-KwE`E)aO>^l6jUVt&xqK9bNz|LF zAI|&n^u=+-G@w7bjNg!vEBtzf0I(6|U0? z3j6`=>D>CgVeyv#T)!tH61lH(UvG1*>L#^9_*?$T!c{eSe6VmOSiTQ;3k4BHsz=v@ zvWD&By_bx64Ld32ncq2kXcf5g1Sa1&vl?-_B-(dQY7XLJV)|1Kg3?(F{hE+4jI+Bn zL#>~x^#D2YFS3Kg>C{a>J~_Ed&rjv?_CnW?qL>_#w!YB=7BE0F&TxS@N6>*MkjuDs}c@zw0wo~@H-GPV!#~mnH^n`q@)-aFK6RpVvP3k zI`bjHw$HcBn{oA;A9Z-y3{S@JMJfcyP~gg36)`|t(*dXviM$-ZR&;o396Y=_+r@_b z{QQoNj)@616AFL89H`Yi19&OoYY!vOuGYKlf(-%bn4M&KuKaKHtE8I$es0_LJ9sI3 zUZP$7?lz^EBU&sU(qr%pX{^^?59~eP0n^Aw@|znN7$f9aG*W7mGy=evqaW^p^+7b4 zZ;X+ZlK}g22sU^=Z7iUF$A7NYUJ>Zs;tO3t2+15@(I4(POx&HSOS_ud1F;7bfVO}` z=4>Jl%C~1@c&|KP!FS4l86hA8gkE26b0D+Fom3gm?>kw|*QCeXOlCqyPDAl&xd4*c7u@P-^ECQE#pQ5(1W1HYXDc{bEL$1S;1N%o|nh6PJO2cFoZ7tJ8LN5BZzcO0KR4lJrUKd zh;1>OaFKV6*2PDMKnT+tDR4S|A7J)-;)yApVv%02_odqa!zt1-GYRLaUjuGdsgOqx z))axDeesM!G6NEVUe40$K>}C&4}P>h+zqU~ALIGAHRwR(TK9gagtuO%QV+qIX;0Z5Lbhz8Hu!qv3VVtbOyx2VtMf%|_x)C|Y6^VL(i_)IId|{X_f=EcqQvKu2ggx{Ez?kW4vHmnAj!LuY`3iQ&v{8HpwJd_~9) zLh0d;ebv=I$InQeS3H=ejgD~h&}<;S0=^tR-+D}gpX`9Ap*m`r$%&ogWyNuxj{)W0 z#Miv0`N#y&-0CHU^#M1uI{669*W?_Dj^h8*C*z3D z#fbO1U)ye`B;Lf?2IaRKwl0Yml zLeB;@#1}zQ^cQF6eD}ke#lxCO^#8ue?Gg}iy9+r18ynY%ZNy^Gl9H6vz!>>(cWo^2 zOzL`^@*#+dn^mZeQ4FT2<#9FKz`YWN>2GY`S-QB)$?N?b+ z2C%TGJCj8c>s^fE}ch`P3 z-jtxd2yw$H7s~gnPj|Mx5p%k;m~4!#7r5n9)uA^DK0iRi;JhVgc_VA9gA(1 z0E%wG^BZa3cwHMMq;zbvc{ZN$yEs3 zU|sb`A&NIPSi7qInCn9kTmFCLTFMIsl5ZFTpK7StGuKZ})=GeWQ*d2TGuv|F`*11K z=)z1Mq<3^CNUgIgvmNQIoT=@(Ij(>L{LT@sm_bgK_aaO zAL4tx=n6{WM8W-m)Ol(GC!@Cp{9vJr``z`PC|m`mSml647Lf71?r+LDCe*PCJ#2IH zHa?k|iMXAJ;_rO{OZ_b~Uz?$G==b$9n2i~`Kz7re-2p}(!_5|Qzg$zSOj{EJ7Il3T zety9;Rv0&YxsPBuYKkE-#H@3RMZZ=KK)zf&OF>+Ev$-&@sP`3D(n4uRP{O#Rx3@Q~ zd_o3tHFjWOTJkPEpP`}QzYZQdJ3AbjHrX(g7&La<8z{Wu6Lo(Pq`iZp)|$W1-gNRgoJbi5k3Zn z{q|7$a`|AdC%n^KmBo5jxM~%y>UeU1P;8#8sp)F+JIlRYMC_)$sS;D6KeWh@))Vn+ zK*g<>nzWi$rC9FrA?B#YSs}Ja+A#?U2{AEWlNh$xLkV)wypzx#79r=`mk(7~|6Pg0 z;4~lze2!c9LQ5Q)y}J%vN-vpq)cML5OWaL3xWY`{R8o*wUH zWo2(#H!8|raHX#)4F7O+=zrKq(fa)5(>3RH!4rx$`I}M6Hwr1l*w}$d43nlNAIANK z2d4q_2JOP#yF@!gN1-?4sty}T!ycM_yAD+v{}wmm0{9X3d^iF3*9wiG*3&LRVt%K# zE@@J4wPue?V2{wxVlFN<_j$qxBa~ZMyMUovP8HMFpb_zc2?YT2QSsPBtyy(yipBh4 zA#zV@2fqd1b2uW3kaAgFRg~EoUs`DPf5p0>({uK z{E8-Wk3PN(bc|S5gBJA??da&}KCerqg#G%&H1X>R(8%U6>1(<_>(7>s{0U2YW3<%d zZgiJy*W!LyJ>AsQ1PCOpF8yn=`+n9|FTm~%+&8f=OsuAg#kxWJxH%yKFB;Y%)ARbM zrxKtwBX5~QCuf3a2%s_#4-Xi&14du27``OhHal#FZcG75tU>k%4TWy$Az4eK({2{( z)6IJ1s5PL6N1$E2=(-#ts#-1)nV|ZsJA&BJ#ifg7CXL%lJzdya(k@cn^L$bpUFE|%Np{&Pq2f=sa=cRcHXa(K8a#|>MPNy{Vg!|DPu-nNC%rqeEsFi4A!^EYeq@4ds*+g;%LlUAx5SjlcP?yze?3N=Oj;6~h|7(() zS+n#Ky+#}d9(sCNCPS6;QKGH^>nw-P-=>2Ec6N5<4{C$7)A++e*SeoS|GpT1@$4n* z1wfd{&b~RUUG7lyPC5=E^)Uw-J6jNlxVZ`^owx>7HuLJ%-5nk9gSe!mkpsz~zMUa} z02_ofj$h`C7Vx5EG$VeD(78UC%^ofpaRIR)uc#R0f(8#C&!!gupts`WO-%VW|$Fx!^45!)sd<)bLWl1`&=q)fnaZ?Xco7XR21{Y0GZutW3 z7{s4$ZkjTLQ5X~5q|Rr`jX|UnlaQoEMUnkvLy|vFg} zEykuRTMkP%aJfLFsMq2JR4*-U`N;xER9td$vL>c>xl!P0(lKZxoRDoTQ0R&uIeA;0 zoKYlV9vw+KavB+ZFhV7;&k0bwWJ7^@9gdIdtO=F z>l^|xJv%#FF-@>QCKprSBA8DKtZ^E|c#gS;H>vfV zJBMDX`pUtXoG<*`(N`XDDpD(;z##_%3e)p;tq*^ZSjmLmQmz~5fB085=9=8k!5CCn zW;3W{U?y^$es>9^N=3!t?Q~xsAVs6Et+AsTbCF(j=-4#}4uD#r)#Bxj?8C>9w=Zv8 zxHS&Kb-aY`gA4?WHa$zpAdS-5V6aTPC{cmBWdV!{DyhO?{Eus43^yWF?2@tJ;bECU zvmg@oJQ-D0Rq||Qk<1x3ZgC0p)sa6@wv&Y){D0eyX2EPJ{Ey1ItQPQyfcm&FKHfgJ zyej(0ix`okI*j^L< zR;F8@h5R4@Pz$E;EhU;sl~_eZ#RPMuq-z54q?f4K5 z%B9+PyRosc2vQOK-|x@vAU_S?NF`r z?wIx`FoR&eJAcrAi%D}ql~bw;aZ`PmzqKdI*F?I(LvFdx(-1Z=E{oB1b4Mi zr)qx2(TGv0c~si9KAn@3IbiUNgycxC@iuE&mA0}uad!%z)AYH@FZ(_{WQh3#&}Dib zLK+b81*{ef}y~~#f_JXH<~P0ugK3|sdwzoZ!fVCd1_+kgLvXaL_F~edez*xpMCnp zUv9{G!kVGXZKEy1Za-P9l?_-Ra%O>oF|Fwz8cRVi`|{?oUjg3}&5mJ_1(I#?++LBy z*BV}p=nCIQkZwmM(alrHW`iDLrej1`oQ1B(&X8~XMgBD83RCB1?>SpRJxVp45;8Qj z|H^W@RHfM?N90jPjkcy7mRp!MTWhWN@(BxEp7L zM^@G<+yLS~Bw;&)bVmWV;lB`-7b{Acgyx)MB1gIb?Uxl%9pinW?5g9#A!U>NX1KjI z1+9QK(a-lK^U2_w{oGa2yX!J_+Df=eYs~7`8b)Y9`iMA}-#Hlu;)6| z7!st}`wVT`Ru5x>x=PPz?|hvZepHYB@jCe>B>%+s-{hpp)?56XV-8K<9Z6i3ef(7Nd<*q@8NIpB`OPRgXNsJ74@bJ965eT9`gz(Y*=ePyB?c z{fkgYd!=f9-YX@Nz}6B&YnVy@rata4aXQM(di^oJ_USTdpGYrBaRzuH7;4fjQa8yH z1e}|#`eQ%>Pgh&U$+G8DgM%5 zzZdUk=W~N>Hk{>4M?NB9YnJJUo2AjKWWUWB8A*hNYP~TUIUkNMQp!B|B}*#D_Z@%y zcwEubW{!)HFIzMS#Zz%X*1sM;*Qqm9NVV06*{K^I0WE#8`Hpagm>!o2FRzE3+z?+4 zprXyZ_jlW>ii+Eb`S0_##%nz-_qLwyL`3e9;BjqaA6R?dFF~!ZwRHOV+}j7<@VT7` z76$HJu?;+))i-xygoTX~@jPZ>z=DT@XK#~KOn zxQrH+O_izb@*SM5h@EwlUV9zYbAY-;aE8%)fx^x|NDvqqgTQzcHsos@{ZmBk49qzE z$hW@bO8+$0kg7e|>qjT4F;Z?EHk$p!N>0D8rCKylr_`#Z}`;jup|mXh%$d>6UwvwWi|NZ!~T#X01w=Iw>CPDwH~K&PzMJF>$n zC@%kZ`vsE3z+mktEZJhfmBtg2!w>D1Q~?nn?Bwd!kKr*$V!?)p`8R~vR8gluM^O6^ z+?b(A?h2S4s8r^P+xJbroea zXsXkjS!@&uIpMa-1^u=*|Cz3+NLyt|e3#ONi^Q3S*~EMuZ=OX4Xi9q^VJofj`UYcv?-TJYKQa3BS5*j95mdYpn9Xlm zr{RRYS5GP)vLjxQO)^@ce!mz#r|54uo_Z4}CuZ}a_r!Q?kb=#`lGfmf>@W8|!q%V^ z=g9uRWp@$97IVCVG=B@RQd47NI5bQ5riPamGg z$6x=XbY6hMfG%NXw8l_A5p=lhGoPuDJ4~d1E(WF{ubMPn#OEc@cx@L%F#=tJ>C4vs zpnCg(c=lINy7$^IQ9o{7*0;`n$8D`C-CzN`PPa8>q_bSjnJ;uN&Cb3o!xu}h`dS-( z{`#2KQ2ybl(bpu|hsILXN*KB^J7{k_50ffM5YGEa+(jR$SaY<<`)-T3*W=PM_#D?% z^L+On>?=O6*}>Pu**He~u}tSA^Q?$ZL+^7AQCJ2~t}njw?LEoa>OJ}l*H!&S+@HTL z>5J>9r7mICZ`xUE?$7xQJrtMRLV#LgP?f)Zvg-XSc4^7CbSP`>kGGaqy`4^5roAmO zKiBY3V=wYUfuHm{;$hXPS`?Q0xIdp^&=;Mo4=AK=Doijje75APhK8tgG>Y~o9^Q@}E*+dl*kSeR)X?N#kO8isX|G0C z*3Kh1)Z%&{p_kv>E#nxlJ0#8$I{6EZPB_tL@|72PY?9hW5O}PnL?wHRa1Zv&g5sF8 zbT2aZ*s*Cj?Xy}FD1rh9#}c8PhVLPMV`AYVnAHW`>3)k65@tB`Yo}-Z&g|3CV?|Fe zy`P5wdJ%sLTrwCHN4649eJb{&gWd+s=5#F!ZBvv1t!;@_LANL-`9}iy0rnMoG_-3) z;a&P=sa4(BJ&=Xslk5284Nyc}nB?`tE6+RpG||6`cC4}$0TMw8H5wvnE8eY`b}u`e zX4f|?W{A2$&luh6EF8<}Rc^oTi?zBVgzmH6rJtHATT`m8Slh>Lma1sr8DO{TB^{E<5miWqc+=ACc8dd7}To}g&F z)A=3$jvqf0A}{pMRI7_YSIFM8dQa{?_Z%vdmPNv@Ectm=z&v6|Um4SO*$pWqr+|lO<{! zagHE@y!0{RXTeFlI@Af#x}2+4-Q3RjS5$hgAG~Vqj)Q!22;WejUmq)%>h4+#rG~Ic z0T-lxzSKgge)#nn!(^*3E`77p>KLBm0!4MtgO`J9Bvh`YFto&Xh03?9+jl;~Vadi; zEds7Se+O2Q?YcsxglFtp59yS%W}?U!rI>O{QUzK|XQYUOmr(p(zC12M%gp}_kndEP zft+==j`e6(=8Uih7uHkfP-^`T3*;Pyx8}mC!;&_ZgJuIqKz8lm3(k25Y0Y4BN9t%* zs+a%@bnZhE+~O0fPI@N;>op+t9%RB0QvQN5NZjl_JrzdI3R}P$xJm|GK$=@;fl;xTiy233av`FA4j{wD{3Kd zrq3TPJ%lH5H*acK*e%|cs};NkxhXX@WMQKu&33SS$gvS|ecwCr@Y9?IoXEjkA4z!=*T|shSlVpmxh4S6dUyO3zQoDtsHifg9Zgt>WjeOzf^y;Cqvf$=xjTly+fe6y zwos2Z@C%paer>Juu%}P>^3ht~DO^1=0a?=gN*$X(G? zN%FT_1ir_cQ?c(OCJAb?y8Oz{sFb=vz>O#gnzQ`kB9+2#Xh_lw_#7wG zsgJO;r=Z(s;<;=*kC6~Dv+txLn(xX~M|L=+nN-e~hyN3g1xc(rPw3g>OgaFT4#r-e zYt0ksoI@bZQ?0B*KLv!eO#S^uZu12im^DLQcXlwUIl*RSMi5Na%3wqqb*70e zhbB=uw7(6*L7A_zI6J89)Ya3wfAEyClWuac!OgWdpMf73^7O-Tk*>YljQ2}?kN)OO%Qj(-lItBP~6iTD`hM-%!-r;qPy`(wP{e8DcVc5JsaL?QS zxTi^Sekc1qgNyN=yGxot$|Gvh(bHosHgTfUG13O{#Qbl=Rr?d0x7@z@|61N^0xE!Gfy#UkYq8f#cVcfYyAAWcc7Q z1T=pZ0bidmtfETJ(b+wudL5KT%5L7s9qULi*ZG!G+)tx;XY=6Td%5h6NVI-l#;BP# zgNkt+DC?r7^(`A#-5ZiLD8hYPTgX01iRh0m^7&(UBI(eWN_3J|8V(m_5S|WB zOLUT=*M-S^4NO9sAr0CoZONc(@12jV^+6VE3*N@=uAfCFKV1E6^pg1XOK8?!qwHN+VOFLcX7Q#gcv+ zFB(3G`FTp(pz8hlC+IOFEz1njwz^*&Ev7Gd6vYJvHM*TvczIVDv<|Vbu&k{LdYeio zu?pGoe4Mcs(MVu7zEL%pf6u5{X@bd3>RGf6Yw0E_sgy39N`?dmI(dyG(}6It$3>H9 z><+qxZZH`-*jRPa6I&xvmO+~qF|!pX=b4a?K#5 zgD4Cn2F_*FfbQHuQ`QJ#pY^QdU>@7FWA|BX+=xERXQ$e^OlhJ9nZxG}i4Km>BbR~8 zlFoRiUWoX);Z}{uWh|KOBY5nPsUfxmQC=5*n8kBmY}X=+D~IC40K)ruonTK^O4;+i7K@AKP;06;Q%bC zvGIK>0hv+G9i$`A9Hq6i^N1_muyQ+8oRRL?>ghl!C+=5O7x%<&8(p*vBC+40sDAEs zSZm#Y+1a4S*#_?GxSLz~5=hI9kD7y}Ib=<2QOx~s(Y<;{e`#S9f z`n!6I-b%o;rn46mhf)(+8Xv~fNKbOHV`nQuk`xU~w<9(|HzC?!* z%=5Jy;NuHYvm9MOnra&6a&OjXmNAAy;!y*ih77gx61!2xjF_R0lmT zO!sCmsd){=wl_hHv-?>N3ebQExEY)e_dHkH2%N1GIjB9ayf}eG2BHNFPXBf}oZIsa zPm0`PGhL^#)cQZ1*v~H;2m>8RKTB+QLxX~(Yi$=No{~Vr*8AFT%seI}`K!&8B(q#* z;Wp|aFoPdr27B$CzjT$_uPdeh^q;x8tHlEyz=Ua|kC0f&7?XKV>)UV!Fm{?G;@fg- zP>DMzosDn7W0mzY2Yt#B?h^R1Jj}>cv2j6BHCACnx9T=I$opN>2f^RbavjaLy8{4zhfze1?3sED`}6(CHPZ z0v-&@@TF_P5{+b3C3+6DT?3erNZ;+pY3P3=Iy-fL$#A$g=~n%Pvo{L*tiYg^=gfK} zLrh623f;?Ux-@XB-QeB|G-Kmg^!m!*41zJRubjcb!C#=&F|y#)(9~Y5VXNUOM-qqq zRUIZ?1O;T2@ypeM_6o_UW;Bi)xL7#S6X-qgPUS0SGia2)Qr_6wngcZ6{TvERI1sd+ z!KLDoJ`rDeoDXh+N%x*Sm7I4#wZ*kWdm07P#lNJ4iBRDNdwP1n#e<9`!q=$C?I`qXzkd4l8H(5*H~I4=P$o zYC=NY4|JgnkBpd%!i>Mc9tWCOX&y&AAc;ljzpUDxg1Qdw3ax7EAEyE_5)~(oi8Naa zbn`eyjUmt)0wd@Tx0hBxGJm*b{mBF={!Zc~XQQljC1x&U$}0X}?+0m6Tlr_DT|{22WF;TQv=?HqLT}85Qb0@B z`*KnPlSV#4#P_jw*@qD;O?SgkI`jOM_#t>yM>`x^_ABf2lK(wx|IH7|za?Nq5IlBa zWV8kz@C`h}B(S~QTaXT+#;1))-Re0qDsT%r~-q ztPBi@q0EAUx-H+~gswL%ezZnOlEjd+|2s;>Gjvl3;lF>cLy2=wwHY59`>OM2RMd;z z8k2q;S*UlB$ucq&XuVN*jUS~CSPF!^rwa=H^FjbbWZCDJTUlii1yogHUwGFPyXNA3yLLM3@&4Si2Ub$1mZ+U|6TR};SB{4 a-{T6IusOl8TktB47qU_clBMDwzy2?=dlNqZ diff --git a/docs/test-deployment/deployment.puml b/docs/test-deployment/deployment.puml index 89aa3bd09..a083e744c 100644 --- a/docs/test-deployment/deployment.puml +++ b/docs/test-deployment/deployment.puml @@ -1,19 +1,16 @@ @startuml package "Docker" { [Acceptance Test Runner] - [Ext-pgw] - [Ocsgw] + [ocsgw] [ESP] [Prime] database DB } -[Acceptance Test Runner] -> [Ext-pgw] : http -[Acceptance Test Runner] -> [Prime] : http -[Ext-pgw] -> [Ocsgw] : diameter -[Ocsgw] -> [ESP] : gRPC -[Ocsgw] -> [Prime] : 8082 -[ESP] -> [Prime] : 8080 +[Acceptance Test Runner] --> [ocsgw] : diameter +[Acceptance Test Runner] -> [Prime] : http @ 8080 +[ocsgw] -> [ESP] : gRPC +[ESP] -> [Prime] : gRPC @ 8082 [Prime] -> DB @enduml diff --git a/embedded-graph-store/src/main/kotlin/org/ostelco/prime/storage/embeddedgraph/GraphStore.kt b/embedded-graph-store/src/main/kotlin/org/ostelco/prime/storage/embeddedgraph/GraphStore.kt deleted file mode 100644 index 35822dc88..000000000 --- a/embedded-graph-store/src/main/kotlin/org/ostelco/prime/storage/embeddedgraph/GraphStore.kt +++ /dev/null @@ -1,183 +0,0 @@ -package org.ostelco.prime.storage.embeddedgraph - -import org.ostelco.prime.model.ApplicationToken -import org.ostelco.prime.model.Entity -import org.ostelco.prime.model.Offer -import org.ostelco.prime.model.Product -import org.ostelco.prime.model.ProductClass -import org.ostelco.prime.model.PurchaseRecord -import org.ostelco.prime.model.Segment -import org.ostelco.prime.model.Subscriber -import org.ostelco.prime.model.Subscription -import org.ostelco.prime.storage.AdminDataStore -import org.ostelco.prime.storage.legacy.Storage -import org.ostelco.prime.storage.legacy.StorageException -import java.util.* -import java.util.stream.Collectors - -class GraphStore : Storage by GraphStoreSingleton, AdminDataStore by GraphStoreSingleton - -object GraphStoreSingleton : Storage, AdminDataStore { - - private val subscriberEntity = EntityType("Subscriber", Subscriber::class.java) - private val subscriberStore = EntityStore(subscriberEntity) - - private val productEntity = EntityType("Product", Product::class.java) - private val productStore = EntityStore(productEntity) - - private val subscriptionEntity = EntityType("Subscription", Subscription::class.java) - private val subscriptionStore = EntityStore(subscriptionEntity) - - private val notificationTokenEntity = EntityType("NotificationToken", ApplicationToken::class.java) - private val notificationTokenStore = EntityStore(notificationTokenEntity) - - private val subscriptionRelation = RelationType( - name = "HAS_SUBSCRIPTION", - from = subscriberEntity, - to = subscriptionEntity, - dataClass = Void::class.java) - private val subscriptionRelationStore = RelationStore(subscriptionRelation) - - private val purchaseRecordRelation = RelationType( - name = "PURCHASED", - from = subscriberEntity, - to = productEntity, - dataClass = PurchaseRecord::class.java) - private val purchaseRecordStore = RelationStore(purchaseRecordRelation) - - override val balances: Map - get() = subscriptionStore.getAll().mapValues { it.value.balance } - - override fun getSubscriber(id: String): Subscriber? = subscriberStore.get(id) - - override fun addSubscriber(subscriber: Subscriber): Boolean = subscriberStore.create(subscriber.id, subscriber) - - override fun updateSubscriber(subscriber: Subscriber): Boolean = subscriberStore.update(subscriber.id, subscriber) - - override fun removeSubscriber(id: String) = subscriberStore.delete(id) - - override fun addSubscription(id: String, msisdn: String): Boolean { - val from = subscriberStore.get(id) ?: return false - subscriptionStore.create(msisdn, Subscription(msisdn, 0L)) - val to = subscriptionStore.get(msisdn) ?: return false - return subscriptionRelationStore.create(from, null, to) - } - - override fun getProducts(subscriberId: String): Map { - val result = GraphServer.graphDb.execute( - """ - MATCH (:${subscriberEntity.name} {id: '$subscriberId'}) - <-[:${segmentToSubscriberRelation.name}]-(:${segmentEntity.name}) - <-[:${offerToSegmentRelation.name}]-(:${offerEntity.name}) - -[:${offerToProductRelation.name}]->(product:${productEntity.name}) - RETURN properties(product) AS product - """.trimIndent()) - - return result.stream() - .map { ObjectHandler.getObject(it["product"] as Map, Product::class.java) } - .collect(Collectors.toMap({ it?.sku }, { it })) - } - - override fun getProduct(subscriberId: String?, sku: String): Product? = productStore.get(sku) - - override fun getBalance(id: String): Long? { - return subscriberStore.getRelated(id, subscriptionRelation, subscriptionEntity) - .first() - .balance - } - - override fun setBalance(msisdn: String, noOfBytes: Long): Boolean = - subscriptionStore.update(msisdn, Subscription(msisdn, balance = noOfBytes)) - - override fun getMsisdn(subscriptionId: String): String? { - return subscriberStore.getRelated(subscriptionId, subscriptionRelation, subscriptionEntity) - .first() - .msisdn - } - - override fun getPurchaseRecords(id: String): Collection { - return subscriberStore.getRelations(id, purchaseRecordRelation) - } - - override fun addPurchaseRecord(id: String, purchase: PurchaseRecord): String? { - val subscriber = subscriberStore.get(id) ?: throw StorageException("Subscriber not found") - val product = productStore.get(purchase.product.sku) ?: throw StorageException("Product not found") - purchase.id = UUID.randomUUID().toString() - purchaseRecordStore.create(subscriber, purchase, product) - return purchase.id - } - - override fun getNotificationTokens(msisdn: String): Collection = notificationTokenStore.getAll().values - - override fun addNotificationToken(msisdn: String, token: ApplicationToken): Boolean = notificationTokenStore.create("$msisdn.${token.applicationID}", token) - - override fun getNotificationToken(msisdn: String, applicationID: String): ApplicationToken? = notificationTokenStore.get("$msisdn.$applicationID") - - override fun removeNotificationToken(msisdn: String, applicationID: String): Boolean = notificationTokenStore.delete("$msisdn.$applicationID") - // - // Admin Store - // - - private val offerEntity = EntityType("Offer", Entity::class.java) - private val offerStore = EntityStore(offerEntity) - - private val segmentEntity = EntityType("Segment", Entity::class.java) - private val segmentStore = EntityStore(segmentEntity) - - private val offerToSegmentRelation = RelationType("offerHasSegment", offerEntity, segmentEntity, Void::class.java) - private val offerToSegmentStore = RelationStore(offerToSegmentRelation) - - private val offerToProductRelation = RelationType("offerHasProduct", offerEntity, productEntity, Void::class.java) - private val offerToProductStore = RelationStore(offerToProductRelation) - - private val segmentToSubscriberRelation = RelationType("segmentToSubscriber", segmentEntity, subscriberEntity, Void::class.java) - private val segmentToSubscriberStore = RelationStore(segmentToSubscriberRelation) - - private val productClassEntity = EntityType("ProductClass", ProductClass::class.java) - private val productClassStore = EntityStore(productClassEntity) - - override fun createProductClass(productClass: ProductClass): Boolean = productClassStore.create(productClass.id, productClass) - - override fun createProduct(product: Product): Boolean = productStore.create(product.sku, product) - - override fun createSegment(segment: Segment) { - segmentStore.create(segment.id, segment) - updateSegment(segment) - } - - override fun createOffer(offer: Offer) { - offerStore.create(offer.id, offer) - offerToSegmentStore.create(offer.id, offer.segments) - offerToProductStore.create(offer.id, offer.products) - } - - override fun updateSegment(segment: Segment) { - segmentToSubscriberStore.create(segment.id, segment.subscribers) - } - - override fun getPaymentId(id: String): String? { - TODO("not implemented") - } - - override fun deletePaymentId(id: String): Boolean { - TODO("not implemented") - } - - override fun createPaymentId(id: String, paymentId: String): Boolean { - TODO("not implemented") - } - - override fun getCustomerId(id: String): String? { - TODO("not implemented") //To change body of created functions use File | Settings | File Templates. - } - - // override fun getOffers(): Collection = offerStore.getAll().values.map { Offer().apply { id = it.id } } - - // override fun getSegments(): Collection = segmentStore.getAll().values.map { Segment().apply { id = it.id } } - - // override fun getOffer(id: String): Offer? = offerStore.get(id)?.let { Offer().apply { this.id = it.id } } - - // override fun getSegment(id: String): Segment? = segmentStore.get(id)?.let { Segment().apply { this.id = it.id } } - - // override fun getProductClass(id: String): ProductClass? = productClassStore.get(id) -} \ No newline at end of file diff --git a/prime/build.gradle b/prime/build.gradle index c295eb5a3..a283a96f8 100644 --- a/prime/build.gradle +++ b/prime/build.gradle @@ -18,7 +18,7 @@ sourceSets { } } -version = "1.13.0" +version = "1.14.0" repositories { maven { @@ -43,7 +43,7 @@ dependencies { implementation "com.fasterxml.jackson.module:jackson-module-kotlin:$jacksonVersion" implementation "io.dropwizard:dropwizard-http2:$dropwizardVersion" - implementation "io.dropwizard:dropwizard-json-logging:$dropwizardVersion" + runtimeOnly "io.dropwizard:dropwizard-json-logging:$dropwizardVersion" implementation 'com.google.guava:guava:25.1-jre' implementation 'org.dhatim:dropwizard-prometheus:2.2.0' diff --git a/prime/config/config.yaml b/prime/config/config.yaml index 1f4bf8d61..9dec2810c 100644 --- a/prime/config/config.yaml +++ b/prime/config/config.yaml @@ -34,6 +34,13 @@ server: port: 8080 maxConcurrentStreams: 1024 initialStreamRecvWindow: 65535 + requestLog: + appenders: + - type: console + layout: + type: json + customFieldNames: + level: severity logging: level: INFO @@ -43,4 +50,5 @@ logging: - type: console layout: type: json - + customFieldNames: + level: severity \ No newline at end of file diff --git a/prime/infra/dev/prime-client-api.yaml b/prime/infra/dev/prime-client-api.yaml index b6e682c36..868bb6ed6 100644 --- a/prime/infra/dev/prime-client-api.yaml +++ b/prime/infra/dev/prime-client-api.yaml @@ -4,6 +4,9 @@ info: description: "The client API for Panacea." version: "1.0.0" host: "api.dev.ostelco.org" +x-google-endpoints: + - name: "api.dev.ostelco.org" + allowCors: true schemes: - "https" paths: @@ -527,4 +530,4 @@ securityDefinitions: type: "oauth2" x-google-issuer: "https://ostelco.eu.auth0.com/" x-google-jwks_uri: "https://ostelco.eu.auth0.com/.well-known/jwks.json" - x-google-audiences: "http://google_api" + x-google-audiences: "http://google_api" \ No newline at end of file diff --git a/prime/script/deploy-dev-direct.sh b/prime/script/deploy-dev-direct.sh index 408df2c44..f25997bbd 100755 --- a/prime/script/deploy-dev-direct.sh +++ b/prime/script/deploy-dev-direct.sh @@ -7,7 +7,7 @@ if [ ! -f prime/script/deploy.sh ]; then exit 1 fi -# TODO vihang: check if the kubectl config points to dev cluster +kubectl config use-context $(kubectl config get-contexts --output name | grep dev-cluster) PROJECT_ID="$(gcloud config get-value project -q)" PRIME_VERSION="$(gradle prime:properties -q | grep "version:" | awk '{print $2}' | tr -d '[:space:]')" diff --git a/prime/script/deploy-direct.sh b/prime/script/deploy-direct.sh index e4416a1ab..6568413dd 100755 --- a/prime/script/deploy-direct.sh +++ b/prime/script/deploy-direct.sh @@ -14,6 +14,8 @@ if [ ! -f ${CHECK_REPO} ]; then exit 1 fi +kubectl config use-context $(kubectl config get-contexts --output name | grep private-cluster) + BRANCH_NAME=$(git branch | grep \* | cut -d ' ' -f2) echo BRANCH_NAME=${BRANCH_NAME} ${CHECK_REPO} ${BRANCH_NAME} From a357dea55050b2c10e5e3ec872f0c2b0c90e902f Mon Sep 17 00:00:00 2001 From: Martin Cederlof Date: Thu, 6 Sep 2018 15:25:56 +0200 Subject: [PATCH 30/78] Use seperate file for logback in dev for ocsgw --- ocsgw/build.gradle | 6 ++++++ ocsgw/config/logback.dev.xml | 35 +++++++++++++++++++++++++++++++++++ scripts/deploy-ocsgw.sh | 9 +++++---- 3 files changed, 46 insertions(+), 4 deletions(-) create mode 100644 ocsgw/config/logback.dev.xml diff --git a/ocsgw/build.gradle b/ocsgw/build.gradle index 3a307fed5..401cf1c8c 100644 --- a/ocsgw/build.gradle +++ b/ocsgw/build.gradle @@ -76,6 +76,12 @@ task packDev(type: Zip, dependsOn: 'shadowJar') { from ('script/') { into(project.name + '/script') } + from ('config/logback.dev.xml') { + into (project.name + '/config/') + rename { String fileName -> + fileName.replace('dev.', '') + } + } from ('config/dictionary.xml') { into (project.name + '/config/') } diff --git a/ocsgw/config/logback.dev.xml b/ocsgw/config/logback.dev.xml new file mode 100644 index 000000000..4c7a49c7a --- /dev/null +++ b/ocsgw/config/logback.dev.xml @@ -0,0 +1,35 @@ + + + + + + %d{dd MMM yyyy HH:mm:ss,SSS} %-5p %c{1} - %m%n + + + + + + INFO + + ocsgw + global + INFO + + + + + 1000 + + + + + + + + + + + + + + \ No newline at end of file diff --git a/scripts/deploy-ocsgw.sh b/scripts/deploy-ocsgw.sh index 30af284ab..ad84ea7c5 100755 --- a/scripts/deploy-ocsgw.sh +++ b/scripts/deploy-ocsgw.sh @@ -7,10 +7,6 @@ # ctr-c # -echo "Starting to deploy OCSGW to test installation" -echo "The last thing this script will do is to look at logs from the ocsgw" -echo "It will continue to do so until terminated by ^C" - variant=dev host_ip=192.168.0.124 if [ "$1" = prod ] ; then @@ -18,6 +14,11 @@ if [ "$1" = prod ] ; then variant=prod fi +echo "Starting to deploy OCSGW to $variant" +echo "The last thing this script will do is to look at logs from the ocsgw" +echo "It will continue to do so until terminated by ^C" + + scp -oProxyJump=loltel@10.6.101.1 build/deploy/ostelco-core-${variant}.zip ubuntu@${host_ip}:. ssh -A -Jloltel@10.6.101.1 ubuntu@${host_ip} < Date: Thu, 6 Sep 2018 21:09:38 +0200 Subject: [PATCH 31/78] Synced jersey and okhttp ATs. --- .../kotlin/org/ostelco/at/jersey/Tests.kt | 38 +++- .../kotlin/org/ostelco/at/okhttp/Tests.kt | 173 +++++++++++++++++- prime/infra/dev/prime-client-api.yaml | 8 +- prime/infra/prod/prime-client-api.yaml | 13 +- 4 files changed, 222 insertions(+), 10 deletions(-) diff --git a/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt b/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt index 9f60bb793..a38428e07 100644 --- a/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt +++ b/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt @@ -12,6 +12,7 @@ import org.ostelco.prime.client.model.ActivePseudonyms import org.ostelco.prime.client.model.ApplicationToken import org.ostelco.prime.client.model.Consent import org.ostelco.prime.client.model.PaymentSource +import org.ostelco.prime.client.model.PaymentSourceList import org.ostelco.prime.client.model.Person import org.ostelco.prime.client.model.Price import org.ostelco.prime.client.model.Product @@ -225,6 +226,38 @@ class GetProductsTest { } } +class SourceTest { + + @Test + fun `jersey test - POST source create`() { + + StripePayment.deleteAllCustomers() + Firebase.deleteAllPaymentCustomers() + + val email = "purchase-${randomInt()}@test.com" + createProfile(name = "Test Payment Source", email = email) + + val sourceId = StripePayment.createPaymentTokenId() + + // Ties source with user profile both local and with Stripe + post { + path = "/paymentSources" + subscriberId = email + } + + Thread.sleep(200) + + val sources: PaymentSourceList = get { + path = "/paymentSources" + subscriberId = email + } + assert(sources.isNotEmpty()) { "Expected at least one payment source for profile $email" } + + val cardId = StripePayment.getCardIdForTokenId(sourceId) + assertNotNull(sources.first { it.id == cardId }, "Expected card $cardId in list of payment sources for profile $email") + } +} + class PurchaseTest { @Test @@ -248,7 +281,7 @@ class PurchaseTest { post { path = "/products/$productSku/purchase" subscriberId = email - queryParams = mapOf( "sourceId" to sourceId) + queryParams = mapOf("sourceId" to sourceId) } Thread.sleep(100) // wait for 100 ms for balance to be updated in db @@ -276,13 +309,14 @@ class PurchaseTest { fun `jersey test - POST products purchase using default source`() { StripePayment.deleteAllCustomers() + Firebase.deleteAllPaymentCustomers() val email = "purchase-${randomInt()}@test.com" createProfile(name = "Test Purchase User with Default Payment Source", email = email) val sourceId = StripePayment.createPaymentTokenId() - val paymentSource:PaymentSource = post { + val paymentSource: PaymentSource = post { path = "/paymentSources" subscriberId = email queryParams = mapOf("sourceId" to sourceId) diff --git a/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt b/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt index 39c3d12ba..f6de6ba50 100644 --- a/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt +++ b/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt @@ -9,13 +9,21 @@ import org.ostelco.at.common.expectedProducts import org.ostelco.at.common.logger import org.ostelco.at.common.randomInt import org.ostelco.at.okhttp.ClientFactory.clientForSubject +import org.ostelco.prime.client.model.ApplicationToken import org.ostelco.prime.client.model.Consent +import org.ostelco.prime.client.model.PaymentSource +import org.ostelco.prime.client.model.Person +import org.ostelco.prime.client.model.PersonList import org.ostelco.prime.client.model.Price import org.ostelco.prime.client.model.Product import org.ostelco.prime.client.model.Profile +import org.ostelco.prime.client.model.SubscriptionStatus import java.time.Instant +import java.util.* import kotlin.test.assertEquals +import kotlin.test.assertFails import kotlin.test.assertNotNull +import kotlin.test.assertNull class ProfileTest { @@ -35,7 +43,7 @@ class ProfileTest { .postCode("") .referralId("") - client.createProfile(createProfile) + client.createProfile(createProfile, null) val profile: Profile = client.profile @@ -73,6 +81,32 @@ class ProfileTest { assertEquals("", clearedProfile.city, "Incorrect 'city' in response after clearing profile") assertEquals("", clearedProfile.country, "Incorrect 'country' in response after clearing profile") } + + @Test + fun `okhttp test - GET application token`() { + + val email = "token-${randomInt()}@test.com" + createProfile("Test Token User", email) + + createSubscription(email) + + val token = UUID.randomUUID().toString() + val applicationId = "testApplicationId" + val tokenType = "FCM" + + val testToken = ApplicationToken() + .token(token) + .applicationID(applicationId) + .tokenType(tokenType) + + val client = clientForSubject(subject = email) + + val reply = client.storeApplicationToken(testToken) + + assertEquals(token, reply.token, "Incorrect token in reply after posting new token") + assertEquals(applicationId, reply.applicationID, "Incorrect applicationId in reply after posting new token") + assertEquals(tokenType, reply.tokenType, "Incorrect tokenType in reply after posting new token") + } } class GetSubscriptions { @@ -170,6 +204,7 @@ class SourceTest { fun `okhttp test - POST source create`() { StripePayment.deleteAllCustomers() + Firebase.deleteAllPaymentCustomers() val email = "purchase-${randomInt()}@test.com" createProfile(name = "Test Payment Source", email = email) @@ -184,7 +219,7 @@ class SourceTest { Thread.sleep(200) val sources = client.listSources() - assert(sources.size > 0) { "Expected at least one payment source for profile $email" } + assert(sources.isNotEmpty()) { "Expected at least one payment source for profile $email" } val cardId = StripePayment.getCardIdForTokenId(sourceId) assertNotNull(sources.first { it.id == cardId }, "Expected card $cardId in list of payment sources for profile $email") @@ -224,6 +259,43 @@ class PurchaseTest { assertEquals(expectedProducts().first(), purchaseRecords.last().product, "Incorrect 'Product' in purchase record") } + @Test + fun `okhttp test - POST products purchase using default source`() { + + StripePayment.deleteAllCustomers() + Firebase.deleteAllPaymentCustomers() + + val email = "purchase-${randomInt()}@test.com" + createProfile(name = "Test Purchase User with Default Payment Source", email = email) + + val sourceId = StripePayment.createPaymentTokenId() + + val client = clientForSubject(subject = email) + + val paymentSource: PaymentSource = client.createSource(sourceId) + + assertNotNull(paymentSource.id, message = "Failed to create payment source") + + val balanceBefore = client.subscriptionStatus.remaining + + val productSku = "1GB_249NOK" + + client.purchaseProduct(productSku, null, null) + + Thread.sleep(200) // wait for 200 ms for balance to be updated in db + + val balanceAfter = client.subscriptionStatus.remaining + + assertEquals(1_000_000_000, balanceAfter - balanceBefore, "Balance did not increased by 1GB after Purchase") + + val purchaseRecords = client.purchaseHistory + + purchaseRecords.sortBy { it.timestamp } + + assert(Instant.now().toEpochMilli() - purchaseRecords.last().timestamp < 10_000) { "Missing Purchase Record" } + assertEquals(expectedProducts().first(), purchaseRecords.last().product, "Incorrect 'Product' in purchase record") + } + @Test fun `okhttp test - POST products purchase without payment`() { @@ -269,7 +341,7 @@ class ConsentTest { assertEquals(consentId, defaultConsent[0].consentId, "Incorrect 'consent id' in fetched consent") // TODO vihang: Update consent operation is missing response entity - // val acceptedConsent: Consent = +// val acceptedConsent: Consent = client.updateConsent(consentId, true) // assertEquals(consentId, acceptedConsent.consentId, "Incorrect 'consent id' in response after accepting consent") @@ -281,4 +353,99 @@ class ConsentTest { // assertEquals(consentId, rejectedConsent.consentId, "Incorrect 'consent id' in response after rejecting consent") // assertFalse(rejectedConsent.isAccepted ?: true, "Accepted consent not reflected in response after rejecting consent") } +} + +class ReferralTest { + + @Test + fun `okhttp test - POST profile with invalid referred by`() { + + val email = "referred_by_invalid-${randomInt()}@test.com" + + val client = clientForSubject(subject = email) + + val invalid = "invalid_referrer@test.com" + + val profile = Profile() + .email(email) + .name("Test Referral Second User") + .address("") + .city("") + .country("") + .postCode("") + .referralId("") + + val failedToCreate = assertFails { + client.createProfile(profile, invalid) + } + + assertEquals(""" +{"description":"Incomplete profile description. Subscriber - $invalid not found."} expected:<201> but was:<403> + """.trimIndent(), failedToCreate.message) + + val failedToGet = assertFails { + client.profile + } + + assertEquals(""" +{"description":"Incomplete profile description. Subscriber - $email not found."} expected:<200> but was:<404> + """.trimIndent(), failedToGet.message) + } + + @Test + fun `okhttp test - POST profile`() { + + val firstEmail = "referral_first-${randomInt()}@test.com" + createProfile(name = "Test Referral First User", email = firstEmail) + + val secondEmail = "referral_second-${randomInt()}@test.com" + + val profile = Profile() + .email(secondEmail) + .name("Test Referral Second User") + .address("") + .city("") + .country("") + .postCode("") + .referralId("") + + val firstEmailClient = clientForSubject(subject = firstEmail) + val secondEmailClient = clientForSubject(subject = secondEmail) + + secondEmailClient.createProfile(profile, firstEmail) + + // for first + val referralsForFirst: PersonList = firstEmailClient.referred + + assertEquals(listOf("Test Referral Second User"), referralsForFirst.map { it.name }) + + val referredByForFirst: Person = firstEmailClient.referredBy + assertNull(referredByForFirst.name) + + // No need to test SubscriptionStatus for first, since it is already tested in GetSubscriptionStatusTest. + + // for referred_by_foo + val referralsForSecond: List = secondEmailClient.referred + + assertEquals(emptyList(), referralsForSecond.map { it.name }) + + val referredByForSecond: Person = secondEmailClient.referredBy + + assertEquals("Test Referral First User", referredByForSecond.name) + + val secondSubscriptionStatus: SubscriptionStatus = secondEmailClient.subscriptionStatus + + assertEquals(1_000_000_000, secondSubscriptionStatus.remaining) + + val freeProductForReferred = Product() + .sku("1GB_FREE_ON_REFERRED") + .price(Price().apply { + this.amount = 0 + this.currency = "NOK" + }) + .properties(mapOf("noOfBytes" to "1_000_000_000")) + .presentation(emptyMap()) + + assertEquals(listOf(freeProductForReferred), secondSubscriptionStatus.purchaseRecords.map { it.product }) + } } \ No newline at end of file diff --git a/prime/infra/dev/prime-client-api.yaml b/prime/infra/dev/prime-client-api.yaml index 07b253692..d8eae28df 100644 --- a/prime/infra/dev/prime-client-api.yaml +++ b/prime/infra/dev/prime-client-api.yaml @@ -33,11 +33,15 @@ paths: - application/json operationId: "createProfile" parameters: - - in: body - name: profile + - name: profile + in: body description: The profile to create. schema: $ref: '#/definitions/Profile' + - name: referred_by + in: query + description: "Referral ID of user who has invited this user" + type: string responses: 201: description: "Successfully created the profile." diff --git a/prime/infra/prod/prime-client-api.yaml b/prime/infra/prod/prime-client-api.yaml index 8993f7d64..1b4e1b0dd 100644 --- a/prime/infra/prod/prime-client-api.yaml +++ b/prime/infra/prod/prime-client-api.yaml @@ -4,6 +4,9 @@ info: description: "The client API for Panacea." version: "1.0.0" host: "api.ostelco.org" +x-google-endpoints: + - name: "api.ostelco.org" + allowCors: true schemes: - "https" paths: @@ -30,11 +33,15 @@ paths: - application/json operationId: "createProfile" parameters: - - in: body - name: profile + - name: profile + in: body description: The profile to create. schema: $ref: '#/definitions/Profile' + - name: referred_by + in: query + description: "Referral ID of user who has invited this user" + type: string responses: 201: description: "Successfully created the profile." @@ -527,4 +534,4 @@ securityDefinitions: type: "oauth2" x-google-issuer: "https://ostelco.eu.auth0.com/" x-google-jwks_uri: "https://ostelco.eu.auth0.com/.well-known/jwks.json" - x-google-audiences: "http://google_api" + x-google-audiences: "http://google_api" \ No newline at end of file From 4bc589798ff013418129db0d0e1470b02885b9e4 Mon Sep 17 00:00:00 2001 From: Martin Cederlof Date: Fri, 7 Sep 2018 10:32:37 +0200 Subject: [PATCH 32/78] Remove duplicate --- ocs/src/main/kotlin/org/ostelco/prime/disruptor/OcsEvent.kt | 1 - 1 file changed, 1 deletion(-) diff --git a/ocs/src/main/kotlin/org/ostelco/prime/disruptor/OcsEvent.kt b/ocs/src/main/kotlin/org/ostelco/prime/disruptor/OcsEvent.kt index 0d5be6883..3ee85c00c 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/disruptor/OcsEvent.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/disruptor/OcsEvent.kt @@ -59,7 +59,6 @@ class OcsEvent { msisdnToppedUp = null bundleBytes = 0 reservedBucketBytes = 0 - bundleBytes = 0 ocsgwStreamId = null request = null topUpBytes = 0; From 9082e6a873aaa9d25e760eb6b95672159203df4e Mon Sep 17 00:00:00 2001 From: Martin Cederlof Date: Fri, 7 Sep 2018 10:38:10 +0200 Subject: [PATCH 33/78] Added ToDo --- .../kotlin/org/ostelco/prime/analytics/DataConsumptionInfo.kt | 1 + 1 file changed, 1 insertion(+) diff --git a/ocs/src/main/kotlin/org/ostelco/prime/analytics/DataConsumptionInfo.kt b/ocs/src/main/kotlin/org/ostelco/prime/analytics/DataConsumptionInfo.kt index 9e7bfd867..2a5fa7991 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/analytics/DataConsumptionInfo.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/analytics/DataConsumptionInfo.kt @@ -36,6 +36,7 @@ class DataConsumptionInfo() : EventHandler { primeMetric = MEGABYTES_CONSUMED, value = (event.request?.getMscc(0)?.used?.totalOctets ?: 0L) / 1_000_000) + //ToDo: Send to analytics and build pipeline event.request?.let { request -> if(request.type == CreditControlRequestType.INITIAL_REQUEST) { logger.info("MSISDN : {} connected apn {} sgsn_mcc_mnc {}", From 20e3f0bbdb521cc499c6834fcdaca95a0afd6a1f Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Fri, 7 Sep 2018 11:02:35 +0200 Subject: [PATCH 34/78] Update docs --- prime/infra/README.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/prime/infra/README.md b/prime/infra/README.md index 11d55723c..dec50a7d2 100644 --- a/prime/infra/README.md +++ b/prime/infra/README.md @@ -223,6 +223,11 @@ kubectl create secret generic metrics-ostelco-ssl \ --from-file=certs/dev.ostelco.org/nginx.crt \ --from-file=certs/dev.ostelco.org/nginx.key ``` +### Cloud Pub/Sub + +```bash +gcloud pubsub topics create purchase-info +``` ### Endpoints From d903fe8ddc89e071945ed5f6941c403965232cab Mon Sep 17 00:00:00 2001 From: Martin Cederlof Date: Fri, 7 Sep 2018 12:47:06 +0200 Subject: [PATCH 35/78] Fix mscc empty list check --- .../org/ostelco/prime/analytics/DataConsumptionInfo.kt | 4 ++-- .../main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt | 4 ++-- ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsState.kt | 2 +- .../org/ostelco/prime/disruptor/PrimeEventProducerTest.kt | 8 ++++---- prime/config/config.yaml | 1 + 5 files changed, 10 insertions(+), 9 deletions(-) diff --git a/ocs/src/main/kotlin/org/ostelco/prime/analytics/DataConsumptionInfo.kt b/ocs/src/main/kotlin/org/ostelco/prime/analytics/DataConsumptionInfo.kt index 2a5fa7991..085499ce8 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/analytics/DataConsumptionInfo.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/analytics/DataConsumptionInfo.kt @@ -30,11 +30,11 @@ class DataConsumptionInfo() : EventHandler { logger.info("Sent DataConsumptionInfo event to analytics") analyticsReporter.reportTrafficInfo( msisdn = event.msisdn!!, - usedBytes = event.request?.getMscc(0)?.used?.totalOctets ?: 0L, + usedBytes = event.request?.msccList?.first()?.used?.totalOctets ?: 0L, bundleBytes = event.bundleBytes) analyticsReporter.reportMetric( primeMetric = MEGABYTES_CONSUMED, - value = (event.request?.getMscc(0)?.used?.totalOctets ?: 0L) / 1_000_000) + value = (event.request?.msccList?.first()?.used?.totalOctets ?: 0L) / 1_000_000) //ToDo: Send to analytics and build pipeline event.request?.let { request -> diff --git a/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt b/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt index f4d283915..1379b16a0 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt @@ -53,8 +53,8 @@ internal class EventHandlerImpl(private val ocsService: OcsService) : EventHandl val logString = """ $msg Msisdn: ${event.msisdn} - Requested bytes: ${event.request?.getMscc(0)?.requested?.totalOctets ?: 0L} - Used bytes: ${event.request?.getMscc(0)?.used?.totalOctets ?: 0L} + Requested bytes: ${event.request?.msccList?.first()?.requested?.totalOctets ?: 0L} + Used bytes: ${event.request?.msccList?.first()?.used?.totalOctets ?: 0L} Bundle bytes: ${event.bundleBytes} Topup bytes: ${event.topUpBytes} Request id: ${event.request?.requestId} diff --git a/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsState.kt b/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsState.kt index 1e2ccd218..cfd3cd2d7 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsState.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsState.kt @@ -44,7 +44,7 @@ class OcsState(val loadSubscriberInfo:Boolean = true) : EventHandler { consumeDataBytes(msisdn, event.request?.getMscc(0)?.used?.totalOctets ?: 0L) event.reservedBucketBytes = reserveDataBytes( msisdn, - event.request?.getMscc(0)?.requested?.totalOctets ?: 0L) + event.request?.msccList?.first()?.requested?.totalOctets ?: 0L) event.bundleId = msisdnToBundleIdMap[msisdn] event.bundleBytes = bundleBalanceMap[event.bundleId] ?: 0 } diff --git a/ocs/src/test/kotlin/org/ostelco/prime/disruptor/PrimeEventProducerTest.kt b/ocs/src/test/kotlin/org/ostelco/prime/disruptor/PrimeEventProducerTest.kt index e986c2f8c..f7dde546e 100644 --- a/ocs/src/test/kotlin/org/ostelco/prime/disruptor/PrimeEventProducerTest.kt +++ b/ocs/src/test/kotlin/org/ostelco/prime/disruptor/PrimeEventProducerTest.kt @@ -98,10 +98,10 @@ class PrimeEventProducerTest { val event = collectedEvent assertEquals(MSISDN, event.msisdn) - assertEquals(REQUESTED_BYTES, event.request?.getMscc(0)?.requested?.totalOctets ?: 0L) - assertEquals(USED_BYTES, event.request?.getMscc(0)?.used?.totalOctets ?: 0L) - assertEquals(RATING_GROUP, event.request?.getMscc(0)?.ratingGroup) - assertEquals(SERVICE_IDENTIFIER, event.request?.getMscc(0)?.serviceIdentifier) + assertEquals(REQUESTED_BYTES, event.request?.msccList?.first()?.requested?.totalOctets ?: 0L) + assertEquals(USED_BYTES, event.request?.msccList?.first()?.used?.totalOctets ?: 0L) + assertEquals(RATING_GROUP, event.request?.msccList?.first()?.ratingGroup) + assertEquals(SERVICE_IDENTIFIER, event.request?.msccList?.first()?.serviceIdentifier) assertEquals(STREAM_ID, event.ocsgwStreamId) assertEquals(CREDIT_CONTROL_REQUEST, event.messageType) } diff --git a/prime/config/config.yaml b/prime/config/config.yaml index 3d56c7e7a..e12a6f1af 100644 --- a/prime/config/config.yaml +++ b/prime/config/config.yaml @@ -47,6 +47,7 @@ logging: level: INFO loggers: org.ostelco: DEBUG + org.dhatim.dropwizard.prometheus.DropwizardMetricsExporter: ERROR appenders: - type: console layout: From 6e44ad8cf417d5396725f555d22ab96f85492f9b Mon Sep 17 00:00:00 2001 From: Martin Cederlof Date: Fri, 7 Sep 2018 13:26:09 +0200 Subject: [PATCH 36/78] Updated DIAMETER dev configuration We will use a different realm for the dev environment for easier debugging. --- ocsgw/config/server-jdiameter-config.dev.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ocsgw/config/server-jdiameter-config.dev.xml b/ocsgw/config/server-jdiameter-config.dev.xml index af551d1ee..e197b71ac 100644 --- a/ocsgw/config/server-jdiameter-config.dev.xml +++ b/ocsgw/config/server-jdiameter-config.dev.xml @@ -5,7 +5,7 @@ - + From 338574a312c83adeb26b6320474cc2dd48f9d6a7 Mon Sep 17 00:00:00 2001 From: Martin Cederlof Date: Fri, 7 Sep 2018 13:39:46 +0200 Subject: [PATCH 37/78] Enabled debug for ocsgw in dev --- ocsgw/config/logback.dev.xml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/ocsgw/config/logback.dev.xml b/ocsgw/config/logback.dev.xml index 4c7a49c7a..d268423e5 100644 --- a/ocsgw/config/logback.dev.xml +++ b/ocsgw/config/logback.dev.xml @@ -21,13 +21,14 @@ 1000 - + + - + From 02f35b0dd336ba00700a9505f779dbf809429601 Mon Sep 17 00:00:00 2001 From: Martin Cederlof Date: Fri, 7 Sep 2018 15:17:47 +0200 Subject: [PATCH 38/78] Using firstOrNull instead of first on lists --- .../org/ostelco/prime/analytics/DataConsumptionInfo.kt | 4 ++-- .../main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt | 4 ++-- ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsState.kt | 4 ++-- .../org/ostelco/prime/disruptor/PrimeEventProducerTest.kt | 8 ++++---- .../java/org/ostelco/ocsgw/data/grpc/GrpcDataSource.java | 4 ++-- 5 files changed, 12 insertions(+), 12 deletions(-) diff --git a/ocs/src/main/kotlin/org/ostelco/prime/analytics/DataConsumptionInfo.kt b/ocs/src/main/kotlin/org/ostelco/prime/analytics/DataConsumptionInfo.kt index 085499ce8..268660faa 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/analytics/DataConsumptionInfo.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/analytics/DataConsumptionInfo.kt @@ -30,11 +30,11 @@ class DataConsumptionInfo() : EventHandler { logger.info("Sent DataConsumptionInfo event to analytics") analyticsReporter.reportTrafficInfo( msisdn = event.msisdn!!, - usedBytes = event.request?.msccList?.first()?.used?.totalOctets ?: 0L, + usedBytes = event.request?.msccList?.firstOrNull()?.used?.totalOctets ?: 0L, bundleBytes = event.bundleBytes) analyticsReporter.reportMetric( primeMetric = MEGABYTES_CONSUMED, - value = (event.request?.msccList?.first()?.used?.totalOctets ?: 0L) / 1_000_000) + value = (event.request?.msccList?.firstOrNull()?.used?.totalOctets ?: 0L) / 1_000_000) //ToDo: Send to analytics and build pipeline event.request?.let { request -> diff --git a/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt b/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt index 1379b16a0..42f34ec18 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt @@ -53,8 +53,8 @@ internal class EventHandlerImpl(private val ocsService: OcsService) : EventHandl val logString = """ $msg Msisdn: ${event.msisdn} - Requested bytes: ${event.request?.msccList?.first()?.requested?.totalOctets ?: 0L} - Used bytes: ${event.request?.msccList?.first()?.used?.totalOctets ?: 0L} + Requested bytes: ${event.request?.msccList?.firstOrNull()?.requested?.totalOctets ?: 0L} + Used bytes: ${event.request?.msccList?.firstOrNull()?.used?.totalOctets ?: 0L} Bundle bytes: ${event.bundleBytes} Topup bytes: ${event.topUpBytes} Request id: ${event.request?.requestId} diff --git a/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsState.kt b/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsState.kt index cfd3cd2d7..6e03b6820 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsState.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/ocs/OcsState.kt @@ -41,10 +41,10 @@ class OcsState(val loadSubscriberInfo:Boolean = true) : EventHandler { logger.error("Received null as msisdn") return } - consumeDataBytes(msisdn, event.request?.getMscc(0)?.used?.totalOctets ?: 0L) + consumeDataBytes(msisdn, event.request?.msccList?.firstOrNull()?.used?.totalOctets ?: 0L) event.reservedBucketBytes = reserveDataBytes( msisdn, - event.request?.msccList?.first()?.requested?.totalOctets ?: 0L) + event.request?.msccList?.firstOrNull()?.requested?.totalOctets ?: 0L) event.bundleId = msisdnToBundleIdMap[msisdn] event.bundleBytes = bundleBalanceMap[event.bundleId] ?: 0 } diff --git a/ocs/src/test/kotlin/org/ostelco/prime/disruptor/PrimeEventProducerTest.kt b/ocs/src/test/kotlin/org/ostelco/prime/disruptor/PrimeEventProducerTest.kt index f7dde546e..63f04a32e 100644 --- a/ocs/src/test/kotlin/org/ostelco/prime/disruptor/PrimeEventProducerTest.kt +++ b/ocs/src/test/kotlin/org/ostelco/prime/disruptor/PrimeEventProducerTest.kt @@ -98,10 +98,10 @@ class PrimeEventProducerTest { val event = collectedEvent assertEquals(MSISDN, event.msisdn) - assertEquals(REQUESTED_BYTES, event.request?.msccList?.first()?.requested?.totalOctets ?: 0L) - assertEquals(USED_BYTES, event.request?.msccList?.first()?.used?.totalOctets ?: 0L) - assertEquals(RATING_GROUP, event.request?.msccList?.first()?.ratingGroup) - assertEquals(SERVICE_IDENTIFIER, event.request?.msccList?.first()?.serviceIdentifier) + assertEquals(REQUESTED_BYTES, event.request?.msccList?.firstOrNull()?.requested?.totalOctets ?: 0L) + assertEquals(USED_BYTES, event.request?.msccList?.firstOrNull()?.used?.totalOctets ?: 0L) + assertEquals(RATING_GROUP, event.request?.msccList?.firstOrNull()?.ratingGroup) + assertEquals(SERVICE_IDENTIFIER, event.request?.msccList?.firstOrNull()?.serviceIdentifier) assertEquals(STREAM_ID, event.ocsgwStreamId) assertEquals(CREDIT_CONTROL_REQUEST, event.messageType) } diff --git a/ocsgw/src/main/java/org/ostelco/ocsgw/data/grpc/GrpcDataSource.java b/ocsgw/src/main/java/org/ostelco/ocsgw/data/grpc/GrpcDataSource.java index 361021785..1b615aa1b 100644 --- a/ocsgw/src/main/java/org/ostelco/ocsgw/data/grpc/GrpcDataSource.java +++ b/ocsgw/src/main/java/org/ostelco/ocsgw/data/grpc/GrpcDataSource.java @@ -229,7 +229,7 @@ public void onNext(CreditControlAnswerInfo answer) { private void handleGrpcCcrAnswer(CreditControlAnswerInfo answer) { try { - LOG.info("[<<] Received data bucket for {}", answer.getMsisdn()); + LOG.info("[<<] CreditControlAnswer for {}", answer.getMsisdn()); final CreditControlContext ccrContext = ccrMap.remove(answer.getRequestId()); if (ccrContext != null) { final ServerCCASession session = OcsServer.getInstance().getStack().getSession(ccrContext.getSessionId(), ServerCCASession.class); @@ -329,7 +329,7 @@ private void updateBlockedList(CreditControlAnswerInfo answer, CreditControlRequ public void handleRequest(final CreditControlContext context) { ccrMap.put(context.getSessionId(), context); addToSessionMap(context); - LOG.info("[>>] Requesting bytes for {}", context.getCreditControlRequest().getMsisdn()); + LOG.info("[>>] creditControlRequest for {}", context.getCreditControlRequest().getMsisdn()); if (creditControlRequest != null) { try { From 54ae9db68df85049261782d5b2d71bbc22cfd347 Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Fri, 7 Sep 2018 15:42:52 +0200 Subject: [PATCH 39/78] Add the DDL for raw_purchases table --- prime/infra/raw_purchases_ddl.json | 86 ++++++++++++++++++++++++++++++ 1 file changed, 86 insertions(+) create mode 100644 prime/infra/raw_purchases_ddl.json diff --git a/prime/infra/raw_purchases_ddl.json b/prime/infra/raw_purchases_ddl.json new file mode 100644 index 000000000..7dc62ed01 --- /dev/null +++ b/prime/infra/raw_purchases_ddl.json @@ -0,0 +1,86 @@ +// Dumped using `bq show --schema --format=prettyjson pantel-2decb:purchases.raw_purchases` +[ + { + "mode": "REQUIRED", + "name": "id", + "type": "STRING" + }, + { + "mode": "REQUIRED", + "name": "subscriberId", + "type": "STRING" + }, + { + "mode": "REQUIRED", + "name": "timestamp", + "type": "INTEGER" + }, + { + "mode": "REQUIRED", + "name": "status", + "type": "STRING" + }, + { + "fields": [ + { + "mode": "REQUIRED", + "name": "sku", + "type": "STRING" + }, + { + "fields": [ + { + "mode": "REQUIRED", + "name": "amount", + "type": "INTEGER" + }, + { + "mode": "REQUIRED", + "name": "currency", + "type": "STRING" + } + ], + "mode": "REQUIRED", + "name": "price", + "type": "RECORD" + }, + { + "fields": [ + { + "mode": "REQUIRED", + "name": "key", + "type": "STRING" + }, + { + "mode": "REQUIRED", + "name": "value", + "type": "STRING" + } + ], + "mode": "REPEATED", + "name": "properties", + "type": "RECORD" + }, + { + "fields": [ + { + "mode": "REQUIRED", + "name": "key", + "type": "STRING" + }, + { + "mode": "NULLABLE", + "name": "value", + "type": "STRING" + } + ], + "mode": "REPEATED", + "name": "presentation", + "type": "RECORD" + } + ], + "mode": "REQUIRED", + "name": "product", + "type": "RECORD" + } +] From 38a75dcb0995dcaa49a1f22ca4f570d93a1e691a Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Fri, 7 Sep 2018 16:23:14 +0200 Subject: [PATCH 40/78] Create DDL using Standard SQL --- prime/infra/raw_purchases_ddl.json | 86 ---------------------------- prime/infra/raw_purchases_schema.ddl | 23 ++++++++ 2 files changed, 23 insertions(+), 86 deletions(-) delete mode 100644 prime/infra/raw_purchases_ddl.json create mode 100644 prime/infra/raw_purchases_schema.ddl diff --git a/prime/infra/raw_purchases_ddl.json b/prime/infra/raw_purchases_ddl.json deleted file mode 100644 index 7dc62ed01..000000000 --- a/prime/infra/raw_purchases_ddl.json +++ /dev/null @@ -1,86 +0,0 @@ -// Dumped using `bq show --schema --format=prettyjson pantel-2decb:purchases.raw_purchases` -[ - { - "mode": "REQUIRED", - "name": "id", - "type": "STRING" - }, - { - "mode": "REQUIRED", - "name": "subscriberId", - "type": "STRING" - }, - { - "mode": "REQUIRED", - "name": "timestamp", - "type": "INTEGER" - }, - { - "mode": "REQUIRED", - "name": "status", - "type": "STRING" - }, - { - "fields": [ - { - "mode": "REQUIRED", - "name": "sku", - "type": "STRING" - }, - { - "fields": [ - { - "mode": "REQUIRED", - "name": "amount", - "type": "INTEGER" - }, - { - "mode": "REQUIRED", - "name": "currency", - "type": "STRING" - } - ], - "mode": "REQUIRED", - "name": "price", - "type": "RECORD" - }, - { - "fields": [ - { - "mode": "REQUIRED", - "name": "key", - "type": "STRING" - }, - { - "mode": "REQUIRED", - "name": "value", - "type": "STRING" - } - ], - "mode": "REPEATED", - "name": "properties", - "type": "RECORD" - }, - { - "fields": [ - { - "mode": "REQUIRED", - "name": "key", - "type": "STRING" - }, - { - "mode": "NULLABLE", - "name": "value", - "type": "STRING" - } - ], - "mode": "REPEATED", - "name": "presentation", - "type": "RECORD" - } - ], - "mode": "REQUIRED", - "name": "product", - "type": "RECORD" - } -] diff --git a/prime/infra/raw_purchases_schema.ddl b/prime/infra/raw_purchases_schema.ddl new file mode 100644 index 000000000..dabb4fd95 --- /dev/null +++ b/prime/infra/raw_purchases_schema.ddl @@ -0,0 +1,23 @@ + CREATE TABLE purchases.raw_purchases + ( + id STRING NOT NULL, + subscriberId STRING NOT NULL, + timestamp INT64 NOT NULL, + status STRING NOT NULL, + product STRUCT< + sku STRING NOT NULL, + price STRUCT< + amount INT64 NOT NULL, + currency STRING NOT NULL + > NOT NULL, + properties ARRAY< STRUCT< + key STRING NOT NULL, + value STRING NOT NULL + > >, + presentation ARRAY< STRUCT< + key STRING NOT NULL, + value STRING NOT NULL + > > + > NOT NULL +) +PARTITION BY DATE(_PARTITIONTIME) From d79191d6695385408e888c873f168a5bd1253ccb Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Fri, 7 Sep 2018 16:34:09 +0200 Subject: [PATCH 41/78] Bring back the `purchase record push to Pub/Sub` This was removed as part of a merge. --- .../org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt | 4 ++++ .../kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt | 5 +++++ 2 files changed, 9 insertions(+) diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt index f9a018c7e..7f5c69e4e 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt @@ -216,6 +216,10 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu } // Notify OCS .flatMap { + analyticsReporter.reportPurchaseInfo( + purchaseRecord = purchaseRecord, + subscriberId = subscriberId, + status = "success") //TODO: Handle errors (when it becomes available) ocsSubscriberService.topup(subscriberId, sku) // TODO vihang: handle currency conversion diff --git a/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt b/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt index 95ab64649..b05734fe7 100644 --- a/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt +++ b/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt @@ -401,6 +401,11 @@ object Neo4jStoreSingleton : GraphStore { } // Notify OCS .flatMap { + //TODO: While aborting transactions, send a record with "reverted" status + analyticsReporter.reportPurchaseInfo( + purchaseRecord = purchaseRecord, + subscriberId = subscriberId, + status = "success") //TODO vihang: Handle errors (when it becomes available) ocs.topup(subscriberId, sku) // TODO vihang: handle currency conversion From e9042bc543e476d4c8848ee421ddc62b0fa5a06f Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Fri, 7 Sep 2018 18:16:59 +0200 Subject: [PATCH 42/78] Fix the JSON syntax --- .../analytics/publishers/PurchaseInfoPublisher.kt | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PurchaseInfoPublisher.kt b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PurchaseInfoPublisher.kt index 376494564..ca2638107 100644 --- a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PurchaseInfoPublisher.kt +++ b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PurchaseInfoPublisher.kt @@ -12,6 +12,8 @@ import com.google.pubsub.v1.PubsubMessage import io.dropwizard.lifecycle.Managed import org.ostelco.prime.analytics.ConfigRegistry import org.ostelco.prime.logger +import org.ostelco.prime.model.Price +import org.ostelco.prime.model.Product import org.ostelco.prime.model.PurchaseRecord import org.ostelco.prime.model.PurchaseRecordInfo import org.ostelco.prime.module.getResource @@ -48,10 +50,12 @@ object PurchaseInfoPublisher : Managed { publisher.shutdown() } - private fun createGson(): Gson { + internal fun createGson(): Gson { val builder = GsonBuilder() - val mapType = object : TypeToken>() {}.type - val serializer = JsonSerializer> { src, _, _ -> + // Type for this conversion is explicitly set to java.util.Map + // This is needed because of kotlin's own Map interface + val mapType = object : TypeToken>() {}.type + val serializer = JsonSerializer> { src, _, _ -> val array = JsonArray() src.forEach { k, v -> val property = JsonObject() @@ -65,7 +69,7 @@ object PurchaseInfoPublisher : Managed { return builder.create() } - private fun convertToJson(purchaseRecordInfo: PurchaseRecordInfo): ByteString = + fun convertToJson(purchaseRecordInfo: PurchaseRecordInfo): ByteString = ByteString.copyFromUtf8(gson.toJson(purchaseRecordInfo)) From 4808dcbaaeeb31f12846998eee162161df20fe06 Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Sat, 8 Sep 2018 09:09:44 +0200 Subject: [PATCH 43/78] Use the pubsub emulator while doing acceptance tests --- analytics-module/build.gradle | 2 ++ .../DataConsumptionInfoPublisher.kt | 20 +++++++++++++++--- .../publishers/PurchaseInfoPublisher.kt | 21 +++++++++++++++---- docker-compose.override.yaml | 1 - prime/script/wait.sh | 1 + 5 files changed, 37 insertions(+), 8 deletions(-) diff --git a/analytics-module/build.gradle b/analytics-module/build.gradle index a14bb9719..b4fdde847 100644 --- a/analytics-module/build.gradle +++ b/analytics-module/build.gradle @@ -10,6 +10,8 @@ dependencies { implementation "io.dropwizard:dropwizard-core:$dropwizardVersion" implementation "com.google.cloud:google-cloud-pubsub:$googleCloudVersion" implementation 'com.google.code.gson:gson:2.8.5' + //compile group: 'com.google.api', name: 'gax-grpc', version: '0.14.0' + testCompile group: 'com.google.api', name: 'gax-grpc', version: '1.30.0' testImplementation "io.dropwizard:dropwizard-testing:$dropwizardVersion" testImplementation 'org.mockito:mockito-core:2.18.3' diff --git a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DataConsumptionInfoPublisher.kt b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DataConsumptionInfoPublisher.kt index dd5a4ac1a..16113fb62 100644 --- a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DataConsumptionInfoPublisher.kt +++ b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DataConsumptionInfoPublisher.kt @@ -2,12 +2,16 @@ package org.ostelco.prime.analytics.publishers import com.google.api.core.ApiFutureCallback import com.google.api.core.ApiFutures +import com.google.api.gax.core.NoCredentialsProvider +import com.google.api.gax.grpc.GrpcTransportChannel import com.google.api.gax.rpc.ApiException +import com.google.api.gax.rpc.FixedTransportChannelProvider import com.google.cloud.pubsub.v1.Publisher import com.google.protobuf.util.Timestamps import com.google.pubsub.v1.ProjectTopicName import com.google.pubsub.v1.PubsubMessage import io.dropwizard.lifecycle.Managed +import io.grpc.ManagedChannelBuilder import org.ostelco.analytics.api.DataTrafficInfo import org.ostelco.prime.analytics.ConfigRegistry.config import org.ostelco.prime.logger @@ -29,11 +33,21 @@ object DataConsumptionInfoPublisher : Managed { @Throws(IOException::class) override fun start() { - val topicName = ProjectTopicName.of(config.projectId, config.dataTrafficTopicId) + val hostport = System.getenv("PUBSUB_EMULATOR_HOST") + if (!hostport.isNullOrEmpty()) { + val channel = ManagedChannelBuilder.forTarget(hostport).usePlaintext(true).build() + // Create a publisher instance with default settings bound to the topic + val channelProvider = FixedTransportChannelProvider.create(GrpcTransportChannel.create(channel)) + val credentialsProvider = NoCredentialsProvider() + publisher = Publisher.newBuilder(topicName) + .setChannelProvider(channelProvider) + .setCredentialsProvider(credentialsProvider) + .build(); + } else { + publisher = Publisher.newBuilder(topicName).build() + } - // Create a publisher instance with default settings bound to the topic - publisher = Publisher.newBuilder(topicName).build() } @Throws(Exception::class) diff --git a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PurchaseInfoPublisher.kt b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PurchaseInfoPublisher.kt index ca2638107..c586ef7da 100644 --- a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PurchaseInfoPublisher.kt +++ b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PurchaseInfoPublisher.kt @@ -2,7 +2,10 @@ package org.ostelco.prime.analytics.publishers import com.google.api.core.ApiFutureCallback import com.google.api.core.ApiFutures +import com.google.api.gax.core.NoCredentialsProvider +import com.google.api.gax.grpc.GrpcTransportChannel import com.google.api.gax.rpc.ApiException +import com.google.api.gax.rpc.FixedTransportChannelProvider import com.google.cloud.pubsub.v1.Publisher import com.google.gson.* import com.google.gson.reflect.TypeToken @@ -10,10 +13,9 @@ import com.google.protobuf.ByteString import com.google.pubsub.v1.ProjectTopicName import com.google.pubsub.v1.PubsubMessage import io.dropwizard.lifecycle.Managed +import io.grpc.ManagedChannelBuilder import org.ostelco.prime.analytics.ConfigRegistry import org.ostelco.prime.logger -import org.ostelco.prime.model.Price -import org.ostelco.prime.model.Product import org.ostelco.prime.model.PurchaseRecord import org.ostelco.prime.model.PurchaseRecordInfo import org.ostelco.prime.module.getResource @@ -39,9 +41,20 @@ object PurchaseInfoPublisher : Managed { override fun start() { val topicName = ProjectTopicName.of(ConfigRegistry.config.projectId, ConfigRegistry.config.purchaseInfoTopicId) + val hostport = System.getenv("PUBSUB_EMULATOR_HOST") + if (!hostport.isNullOrEmpty()) { + val channel = ManagedChannelBuilder.forTarget(hostport).usePlaintext(true).build() + // Create a publisher instance with default settings bound to the topic + val channelProvider = FixedTransportChannelProvider.create(GrpcTransportChannel.create(channel)) + val credentialsProvider = NoCredentialsProvider() + publisher = Publisher.newBuilder(topicName) + .setChannelProvider(channelProvider) + .setCredentialsProvider(credentialsProvider) + .build(); + } else { + publisher = Publisher.newBuilder(topicName).build() + } - // Create a publisher instance with default settings bound to the topic - publisher = Publisher.newBuilder(topicName).build() } @Throws(Exception::class) diff --git a/docker-compose.override.yaml b/docker-compose.override.yaml index f95c8f439..373413ffe 100644 --- a/docker-compose.override.yaml +++ b/docker-compose.override.yaml @@ -8,7 +8,6 @@ services: dockerfile: Dockerfile.test environment: - FIREBASE_ROOT_PATH=test - - GOOGLE_APPLICATION_CREDENTIALS=/secret/pantel-prod.json - PUBSUB_EMULATOR_HOST=pubsub-emulator:8085 - PUBSUB_PROJECT_ID=pantel-2decb - STRIPE_API_KEY=${STRIPE_API_KEY} diff --git a/prime/script/wait.sh b/prime/script/wait.sh index 3ed1947f0..b3bfd9d1e 100755 --- a/prime/script/wait.sh +++ b/prime/script/wait.sh @@ -39,6 +39,7 @@ curl -X PUT pubsub-emulator:8085/v1/projects/pantel-2decb/topics/data-traffic curl -X PUT pubsub-emulator:8085/v1/projects/pantel-2decb/topics/pseudo-traffic curl -X PUT -H "Content-Type: application/json" -d '{"topic":"projects/pantel-2decb/topics/data-traffic","ackDeadlineSeconds":10}' pubsub-emulator:8085/v1/projects/pantel-2decb/subscriptions/test-pseudo curl -X PUT pubsub-emulator:8085/v1/projects/pantel-2decb/topics/purchase-info +curl -X PUT -H "Content-Type: application/json" -d '{"topic":"projects/pantel-2decb/topics/purchase-info","ackDeadlineSeconds":10}' pubsub-emulator:8085/v1/projects/pantel-2decb/subscriptions/purchase-info-sub echo "Done creating topics and subscriptions" From 222b415a42748fad2be5260a9452fe1747650030 Mon Sep 17 00:00:00 2001 From: Vihang Patil Date: Fri, 7 Sep 2018 15:01:55 +0200 Subject: [PATCH 44/78] Neo4j backup and restore --- tools/neo4j-admin-tools/README.md | 44 ++++++++++++++ tools/neo4j-admin-tools/backup_dir/.gitignore | 1 + tools/neo4j-admin-tools/data_dir/.gitignore | 1 + .../docker-compose.backup.yaml | 16 +++++ .../docker-compose.neo4j.yaml | 13 ++++ .../docker-compose.restore.yaml | 16 +++++ .../org/ostelco/tools/migration/MainKt.kt | 60 +++++++++++++------ .../ostelco/tools/migration/Neo4jExporter.kt | 51 ++++++++++++++++ .../src/main/resources/.gitignore | 3 +- 9 files changed, 187 insertions(+), 18 deletions(-) create mode 100644 tools/neo4j-admin-tools/README.md create mode 100644 tools/neo4j-admin-tools/backup_dir/.gitignore create mode 100755 tools/neo4j-admin-tools/data_dir/.gitignore create mode 100644 tools/neo4j-admin-tools/docker-compose.backup.yaml create mode 100644 tools/neo4j-admin-tools/docker-compose.neo4j.yaml create mode 100644 tools/neo4j-admin-tools/docker-compose.restore.yaml create mode 100644 tools/neo4j-admin-tools/src/main/kotlin/org/ostelco/tools/migration/Neo4jExporter.kt diff --git a/tools/neo4j-admin-tools/README.md b/tools/neo4j-admin-tools/README.md new file mode 100644 index 000000000..bb2ae2821 --- /dev/null +++ b/tools/neo4j-admin-tools/README.md @@ -0,0 +1,44 @@ +# Neo4j Admin Tools + +## Online Backup + +This is keep the terminal running until `ctrl+C` +```bash +kubectl config use-context $(kubectl config get-contexts --output name | grep dev-cluster) +kubectl get pods +kubectl port-forward neo4j-core-0 6362:6362 7474:7474 7687:7687 +``` + +```bash +docker-compose -f docker-compose.backup.yaml up +``` + +Then stop port-forwarding terminal using `ctrl+C`. + +### Restore to local neo4j + +```bash +docker-compose -f docker-compose.restore.yaml up +``` + +### Export from local Neo4j to cypher script. + + * Run neo4j locally using restored data. +```bash +docker-compose -f docker-compose.neo4j.yaml up +``` + * Run tool with `neo4jExporterToCypherFile()`. The cypher file will be created at `src/main/resources/backup.cypher`. + +## Online dump to cypher script. + +This is keep the terminal running until `ctrl+C` + +```bash +kubectl config use-context $(kubectl config get-contexts --output name | grep dev-cluster) +kubectl get pods +kubectl port-forward neo4j-core-0 7474:7474 7687:7687 +``` + +Run tool with `neo4jExporterToCypherFile()`. The cypher file will be created at `src/main/resources/backup.cypher`. + +Then stop port-forwarding terminal using `ctrl+C`. \ No newline at end of file diff --git a/tools/neo4j-admin-tools/backup_dir/.gitignore b/tools/neo4j-admin-tools/backup_dir/.gitignore new file mode 100644 index 000000000..08fd690d1 --- /dev/null +++ b/tools/neo4j-admin-tools/backup_dir/.gitignore @@ -0,0 +1 @@ +graph.db-backup \ No newline at end of file diff --git a/tools/neo4j-admin-tools/data_dir/.gitignore b/tools/neo4j-admin-tools/data_dir/.gitignore new file mode 100755 index 000000000..45bb03f98 --- /dev/null +++ b/tools/neo4j-admin-tools/data_dir/.gitignore @@ -0,0 +1 @@ +databases \ No newline at end of file diff --git a/tools/neo4j-admin-tools/docker-compose.backup.yaml b/tools/neo4j-admin-tools/docker-compose.backup.yaml new file mode 100644 index 000000000..f659f058b --- /dev/null +++ b/tools/neo4j-admin-tools/docker-compose.backup.yaml @@ -0,0 +1,16 @@ +version: "3.7" + +services: + neo4j-online-backup: + container_name: neo4j-online-backup + image: neo4j:3.3.4-enterprise + command: > + bin/neo4j-admin backup + --backup-dir=/backup_dir + --name=graph.db-backup + --from=host.docker.internal + --cc-report-dir=/backup_dir + volumes: + - "./backup_dir:/backup_dir" + environment: + - NEO4J_ACCEPT_LICENSE_AGREEMENT=yes \ No newline at end of file diff --git a/tools/neo4j-admin-tools/docker-compose.neo4j.yaml b/tools/neo4j-admin-tools/docker-compose.neo4j.yaml new file mode 100644 index 000000000..e8eacb468 --- /dev/null +++ b/tools/neo4j-admin-tools/docker-compose.neo4j.yaml @@ -0,0 +1,13 @@ +version: "3.7" + +services: + neo4j: + container_name: "neo4j" + image: neo4j:3.4.4 + environment: + - NEO4J_AUTH=none + ports: + - "7687:7687" + - "7474:7474" + volumes: + - "./data_dir:/data" \ No newline at end of file diff --git a/tools/neo4j-admin-tools/docker-compose.restore.yaml b/tools/neo4j-admin-tools/docker-compose.restore.yaml new file mode 100644 index 000000000..be5183c34 --- /dev/null +++ b/tools/neo4j-admin-tools/docker-compose.restore.yaml @@ -0,0 +1,16 @@ +version: "3.7" + +services: + neo4j-online-restore: + container_name: neo4j-online-restore + image: neo4j:3.3.4-enterprise + command: > + bin/neo4j-admin restore + --from=/backup_dir/graph.db-backup + --database=graph.db + --force + volumes: + - "./backup_dir:/backup_dir" + - "./data_dir:/data" + environment: + - NEO4J_ACCEPT_LICENSE_AGREEMENT=yes \ No newline at end of file diff --git a/tools/neo4j-admin-tools/src/main/kotlin/org/ostelco/tools/migration/MainKt.kt b/tools/neo4j-admin-tools/src/main/kotlin/org/ostelco/tools/migration/MainKt.kt index 860a31246..1b4f54e12 100644 --- a/tools/neo4j-admin-tools/src/main/kotlin/org/ostelco/tools/migration/MainKt.kt +++ b/tools/neo4j-admin-tools/src/main/kotlin/org/ostelco/tools/migration/MainKt.kt @@ -1,53 +1,79 @@ package org.ostelco.tools.migration import org.neo4j.driver.v1.AccessMode +import java.nio.file.Files +import java.nio.file.Paths fun main(args: Array) { - cypherFileToNeo4jImporter() + neo4jExporterToCypherFile() } -fun cypherFileAndFirebaseToNeo4jMigration() { - initFirebase() +fun neo4jExporterToCypherFile() { Neo4jClient.init() - Neo4jClient.driver.session(AccessMode.WRITE).use { + Neo4jClient.driver.session(AccessMode.READ).use { session -> - val txn = it.beginTransaction() + val txn = session.beginTransaction() - println("Import from file to Neo4j") + println("Import from Neo4j to file") - importFromCypherFile("src/main/resources/init.cypher") { - query -> txn.run(query) + importFromNeo4j(txn) { str -> + Files.write(Paths.get("src/main/resources/backup.cypher"), str.toByteArray()) } - println("Exporting from firebase and import it to Neo4j") - importFromFirebase { - createQuery -> txn.run(createQuery) + println("Done") + txn.success() + } + + Neo4jClient.stop() +} + +fun cypherFileToNeo4jImporter() { + + Neo4jClient.init() + + Neo4jClient.driver.session(AccessMode.WRITE).use { session -> + + val txn = session.beginTransaction() + + println("Import from file to Neo4j") + + importFromCypherFile("src/main/resources/init.cypher") { query -> + txn.run(query) } println("Done") txn.success() } + Neo4jClient.stop() } -fun cypherFileToNeo4jImporter() { +fun cypherFileAndFirebaseToNeo4jMigration() { + initFirebase() Neo4jClient.init() - Neo4jClient.driver.session(AccessMode.WRITE).use { + Neo4jClient.driver.session(AccessMode.WRITE).use { session -> - val txn = it.beginTransaction() + val txn = session.beginTransaction() println("Import from file to Neo4j") - importFromCypherFile("src/main/resources/init.cypher") { - query -> txn.run(query) + importFromCypherFile("src/main/resources/init.cypher") { query -> + txn.run(query) + } + + println("Exporting from firebase and import it to Neo4j") + importFromFirebase { createQuery -> + txn.run(createQuery) } println("Done") txn.success() } + Neo4jClient.stop() -} \ No newline at end of file +} + diff --git a/tools/neo4j-admin-tools/src/main/kotlin/org/ostelco/tools/migration/Neo4jExporter.kt b/tools/neo4j-admin-tools/src/main/kotlin/org/ostelco/tools/migration/Neo4jExporter.kt new file mode 100644 index 000000000..60542b9b8 --- /dev/null +++ b/tools/neo4j-admin-tools/src/main/kotlin/org/ostelco/tools/migration/Neo4jExporter.kt @@ -0,0 +1,51 @@ +package org.ostelco.tools.migration + +import org.neo4j.driver.v1.Transaction + +fun importFromNeo4j(txn: Transaction, handleCypher: (String) -> Unit) { + + val sb = StringBuilder() + + run { + val stmtResult = txn.run("MATCH (n) RETURN n;") + stmtResult.forEach { record -> + val node = record["n"].asNode() + val labels = node.labels().joinToString(separator = "", prefix = ":") + + val props = node.asMap().map { entry -> + "`${entry.key}`: '${entry.value}'" + }.joinToString(separator = ",\n") + + sb.append("CREATE ($labels {$props});\n\n") + } + } + + run { + val stmtResult = txn.run("MATCH (n)-[r]->(m) RETURN n,r,m;") + stmtResult.forEach { record -> + val fromNode = record["n"].asNode() + val relation = record["r"].asRelationship() + val toNode = record["m"].asNode() + + val type = relation.type() + + var props = relation.asMap().map { entry -> + "`${entry.key}`: '${entry.value}'" + }.joinToString(separator = ",\n") + + props = if (props.isNotBlank()) { + " {$props}" + } else { + props + } + + sb.append( +""" +MATCH (n:${fromNode.labels().first()} {id: '${fromNode.get("id")}'}), (m:${toNode.labels().first()} {id: '${toNode.get("id")}'}) +CREATE (n)-[:$type$props]->(m); +""") + } + } + + handleCypher(sb.toString()) +} \ No newline at end of file diff --git a/tools/neo4j-admin-tools/src/main/resources/.gitignore b/tools/neo4j-admin-tools/src/main/resources/.gitignore index b329cc412..3cb2ee750 100644 --- a/tools/neo4j-admin-tools/src/main/resources/.gitignore +++ b/tools/neo4j-admin-tools/src/main/resources/.gitignore @@ -1,2 +1,3 @@ prod.cypher -test.cypher \ No newline at end of file +test.cypher +backup.cypher \ No newline at end of file From aff5ace7fa77cfad2bb7397df63963acf6d9adc2 Mon Sep 17 00:00:00 2001 From: Martin Cederlof Date: Mon, 10 Sep 2018 10:46:06 +0200 Subject: [PATCH 45/78] Handle msccList correctly Extended acceptance test to handle the case where a user with no balance is trying to go online. And after getting no balance manually disconnecting and forcing a CCR-Terminate --- .../main/kotlin/org/ostelco/at/pgw/OcsTest.kt | 24 +++++++++++++++---- .../org/ostelco/diameter/test/TestHelper.kt | 7 ++++++ .../org/ostelco/prime/ocs/EventHandlerImpl.kt | 4 +--- 3 files changed, 28 insertions(+), 7 deletions(-) diff --git a/acceptance-tests/src/main/kotlin/org/ostelco/at/pgw/OcsTest.kt b/acceptance-tests/src/main/kotlin/org/ostelco/at/pgw/OcsTest.kt index 29297d83f..5e21982ac 100644 --- a/acceptance-tests/src/main/kotlin/org/ostelco/at/pgw/OcsTest.kt +++ b/acceptance-tests/src/main/kotlin/org/ostelco/at/pgw/OcsTest.kt @@ -150,7 +150,7 @@ class OcsTest { @Test - fun creditControlRequestInitNoCredit() { + fun creditControlRequestInitTerminateNoCredit() { val client = testClient ?: fail("Test client is null") @@ -181,8 +181,6 @@ class OcsTest { } // There is 2 step in graceful shutdown. First OCS send terminate, then P-GW report used units in a final update - assertEquals(INITIAL_BALANCE, getBalance(), message = "Incorrect balance after init using wrong msisdn") - val updateRequest = client.createRequest( DEST_REALM, DEST_HOST, @@ -208,7 +206,25 @@ class OcsTest { assertEquals(86400L, validTime.unsigned32) } - assertEquals(INITIAL_BALANCE, getBalance(), message = "Incorrect balance after update using wrong msisdn") + // Last step is user disconnecting connection forcing a terminate + val terminateRequest = client.createRequest( + DEST_REALM, + DEST_HOST, + session + ) ?: fail("Failed to create request") + TestHelper.createTerminateRequest(terminateRequest.avps, "4333333333") + + client.sendNextRequest(terminateRequest, session) + + waitForAnswer() + + run { + assertEquals(2001L, client.resultCodeAvp?.integer32?.toLong()) + val resultAvps = client.resultAvps ?: fail("Missing AVPs") + assertEquals(DEST_HOST, resultAvps.getAvp(Avp.ORIGIN_HOST).utF8String) + assertEquals(DEST_REALM, resultAvps.getAvp(Avp.ORIGIN_REALM).utF8String) + assertEquals(RequestType.TERMINATION_REQUEST.toLong(), resultAvps.getAvp(Avp.CC_REQUEST_TYPE).integer32.toLong()) + } } diff --git a/diameter-test/src/main/kotlin/org/ostelco/diameter/test/TestHelper.kt b/diameter-test/src/main/kotlin/org/ostelco/diameter/test/TestHelper.kt index ddaeb83a2..8b0bb8dde 100644 --- a/diameter-test/src/main/kotlin/org/ostelco/diameter/test/TestHelper.kt +++ b/diameter-test/src/main/kotlin/org/ostelco/diameter/test/TestHelper.kt @@ -153,4 +153,11 @@ object TestHelper { addTerminateRequest(ccrAvps, ratingGroup = 10, serviceIdentifier = 1, bucketSize = bucketSize) addServiceInformation(ccrAvps, apn = APN, sgsnMncMcc = SGSN_MCC_MNC) } + + @JvmStatic + fun createTerminateRequest(ccrAvps: AvpSet, msisdn: String) { + buildBasicRequest(ccrAvps, RequestType.TERMINATION_REQUEST, requestNumber = 2) + addUser(ccrAvps, msisdn = msisdn, imsi = IMSI) + addServiceInformation(ccrAvps, apn = APN, sgsnMncMcc = SGSN_MCC_MNC) + } } \ No newline at end of file diff --git a/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt b/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt index 42f34ec18..a7f2ba282 100644 --- a/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt +++ b/ocs/src/main/kotlin/org/ostelco/prime/ocs/EventHandlerImpl.kt @@ -75,9 +75,7 @@ internal class EventHandlerImpl(private val ocsService: OcsService) : EventHandl .setMsisdn(event.msisdn) event.request?.let { request -> - // This is a hack to know when we have received an MSCC in the request or not. - // For Terminate request we might not have any MSCC and therefore no serviceIdentifier. - if (request.getMscc(0).serviceIdentifier > 0) { + if (request.msccCount > 0) { val msccBuilder = MultipleServiceCreditControl.newBuilder() msccBuilder.setServiceIdentifier(request.getMscc(0).serviceIdentifier) .setRatingGroup(request.getMscc(0).ratingGroup) From efd90b459876dbce003012a290eb84162c1e7f85 Mon Sep 17 00:00:00 2001 From: Vihang Patil Date: Sun, 9 Sep 2018 11:56:39 +0200 Subject: [PATCH 46/78] Moved common publisher code to delegate class --- .../DataConsumptionInfoPublisher.kt | 46 ++-------------- .../publishers/DelegatePubSubPublisher.kt | 50 +++++++++++++++++ .../analytics/publishers/PubSubPublisher.kt | 11 ++++ .../publishers/PurchaseInfoPublisher.kt | 55 ++++--------------- 4 files changed, 79 insertions(+), 83 deletions(-) create mode 100644 analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DelegatePubSubPublisher.kt create mode 100644 analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PubSubPublisher.kt diff --git a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DataConsumptionInfoPublisher.kt b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DataConsumptionInfoPublisher.kt index 16113fb62..0ea3850af 100644 --- a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DataConsumptionInfoPublisher.kt +++ b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DataConsumptionInfoPublisher.kt @@ -2,66 +2,32 @@ package org.ostelco.prime.analytics.publishers import com.google.api.core.ApiFutureCallback import com.google.api.core.ApiFutures -import com.google.api.gax.core.NoCredentialsProvider -import com.google.api.gax.grpc.GrpcTransportChannel import com.google.api.gax.rpc.ApiException -import com.google.api.gax.rpc.FixedTransportChannelProvider -import com.google.cloud.pubsub.v1.Publisher import com.google.protobuf.util.Timestamps -import com.google.pubsub.v1.ProjectTopicName import com.google.pubsub.v1.PubsubMessage -import io.dropwizard.lifecycle.Managed -import io.grpc.ManagedChannelBuilder import org.ostelco.analytics.api.DataTrafficInfo -import org.ostelco.prime.analytics.ConfigRegistry.config +import org.ostelco.prime.analytics.ConfigRegistry import org.ostelco.prime.logger import org.ostelco.prime.module.getResource import org.ostelco.prime.pseudonymizer.PseudonymizerService -import java.io.IOException import java.time.Instant /** * This class publishes the data consumption information events to the Google Cloud Pub/Sub. */ -object DataConsumptionInfoPublisher : Managed { +object DataConsumptionInfoPublisher : + PubSubPublisher by DelegatePubSubPublisher(topicId = ConfigRegistry.config.dataTrafficTopicId) { private val logger by logger() private val pseudonymizerService by lazy { getResource() } - private lateinit var publisher: Publisher - - @Throws(IOException::class) - override fun start() { - val topicName = ProjectTopicName.of(config.projectId, config.dataTrafficTopicId) - val hostport = System.getenv("PUBSUB_EMULATOR_HOST") - if (!hostport.isNullOrEmpty()) { - val channel = ManagedChannelBuilder.forTarget(hostport).usePlaintext(true).build() - // Create a publisher instance with default settings bound to the topic - val channelProvider = FixedTransportChannelProvider.create(GrpcTransportChannel.create(channel)) - val credentialsProvider = NoCredentialsProvider() - publisher = Publisher.newBuilder(topicName) - .setChannelProvider(channelProvider) - .setCredentialsProvider(credentialsProvider) - .build(); - } else { - publisher = Publisher.newBuilder(topicName).build() - } - - } - - @Throws(Exception::class) - override fun stop() { - // When finished with the publisher, shutdown to free up resources. - publisher.shutdown() - } - fun publish(msisdn: String, usedBucketBytes: Long, bundleBytes: Long) { if (usedBucketBytes == 0L) { return } - + val now = Instant.now().toEpochMilli() val pseudonym = pseudonymizerService.getMsisdnPseudonym(msisdn, now).pseudonym @@ -78,7 +44,7 @@ object DataConsumptionInfoPublisher : Managed { .build() //schedule a message to be published, messages are automatically batched - val future = publisher.publish(pubsubMessage) + val future = publishPubSubMessage(pubsubMessage) // add an asynchronous callback to handle success / failure ApiFutures.addCallback(future, object : ApiFutureCallback { @@ -96,6 +62,6 @@ object DataConsumptionInfoPublisher : Managed { // Once published, returns server-assigned message ids (unique within the topic) logger.debug("Published message $messageId") } - }) + }, singleThreadScheduledExecutor) } } diff --git a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DelegatePubSubPublisher.kt b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DelegatePubSubPublisher.kt new file mode 100644 index 000000000..7db538c90 --- /dev/null +++ b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/DelegatePubSubPublisher.kt @@ -0,0 +1,50 @@ +package org.ostelco.prime.analytics.publishers + +import com.google.api.core.ApiFuture +import com.google.api.gax.core.NoCredentialsProvider +import com.google.api.gax.grpc.GrpcTransportChannel +import com.google.api.gax.rpc.FixedTransportChannelProvider +import com.google.cloud.pubsub.v1.Publisher +import com.google.pubsub.v1.ProjectTopicName +import com.google.pubsub.v1.PubsubMessage +import io.grpc.ManagedChannelBuilder +import org.ostelco.prime.analytics.ConfigRegistry +import java.util.concurrent.Executors +import java.util.concurrent.ScheduledExecutorService + +class DelegatePubSubPublisher( + private val topicId: String, + private val projectId: String = ConfigRegistry.config.projectId) : PubSubPublisher { + + private lateinit var publisher: Publisher + + override lateinit var singleThreadScheduledExecutor: ScheduledExecutorService + + override fun start() { + + singleThreadScheduledExecutor = Executors.newSingleThreadScheduledExecutor() + + val topicName = ProjectTopicName.of(projectId, topicId) + val strSocketAddress = System.getenv("PUBSUB_EMULATOR_HOST") + publisher = if (!strSocketAddress.isNullOrEmpty()) { + val channel = ManagedChannelBuilder.forTarget(strSocketAddress).usePlaintext().build() + // Create a publisher instance with default settings bound to the topic + val channelProvider = FixedTransportChannelProvider.create(GrpcTransportChannel.create(channel)) + val credentialsProvider = NoCredentialsProvider() + Publisher.newBuilder(topicName) + .setChannelProvider(channelProvider) + .setCredentialsProvider(credentialsProvider) + .build(); + } else { + Publisher.newBuilder(topicName).build() + } + } + + override fun stop() { + // When finished with the publisher, shutdown to free up resources. + publisher.shutdown() + singleThreadScheduledExecutor.shutdown() + } + + override fun publishPubSubMessage(pubsubMessage: PubsubMessage): ApiFuture = publisher.publish(pubsubMessage) +} \ No newline at end of file diff --git a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PubSubPublisher.kt b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PubSubPublisher.kt new file mode 100644 index 000000000..16c7d648d --- /dev/null +++ b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PubSubPublisher.kt @@ -0,0 +1,11 @@ +package org.ostelco.prime.analytics.publishers + +import com.google.api.core.ApiFuture +import com.google.pubsub.v1.PubsubMessage +import io.dropwizard.lifecycle.Managed +import java.util.concurrent.ScheduledExecutorService + +interface PubSubPublisher : Managed { + var singleThreadScheduledExecutor: ScheduledExecutorService + fun publishPubSubMessage(pubsubMessage: PubsubMessage): ApiFuture +} \ No newline at end of file diff --git a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PurchaseInfoPublisher.kt b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PurchaseInfoPublisher.kt index c586ef7da..ac8abe10a 100644 --- a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PurchaseInfoPublisher.kt +++ b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/publishers/PurchaseInfoPublisher.kt @@ -2,32 +2,29 @@ package org.ostelco.prime.analytics.publishers import com.google.api.core.ApiFutureCallback import com.google.api.core.ApiFutures -import com.google.api.gax.core.NoCredentialsProvider -import com.google.api.gax.grpc.GrpcTransportChannel import com.google.api.gax.rpc.ApiException -import com.google.api.gax.rpc.FixedTransportChannelProvider -import com.google.cloud.pubsub.v1.Publisher -import com.google.gson.* +import com.google.gson.Gson +import com.google.gson.GsonBuilder +import com.google.gson.JsonArray +import com.google.gson.JsonObject +import com.google.gson.JsonSerializer import com.google.gson.reflect.TypeToken import com.google.protobuf.ByteString -import com.google.pubsub.v1.ProjectTopicName import com.google.pubsub.v1.PubsubMessage -import io.dropwizard.lifecycle.Managed -import io.grpc.ManagedChannelBuilder import org.ostelco.prime.analytics.ConfigRegistry import org.ostelco.prime.logger import org.ostelco.prime.model.PurchaseRecord import org.ostelco.prime.model.PurchaseRecordInfo import org.ostelco.prime.module.getResource import org.ostelco.prime.pseudonymizer.PseudonymizerService -import java.io.IOException import java.net.URLEncoder /** * This class publishes the purchase information events to the Google Cloud Pub/Sub. */ -object PurchaseInfoPublisher : Managed { +object PurchaseInfoPublisher : + PubSubPublisher by DelegatePubSubPublisher(topicId = ConfigRegistry.config.purchaseInfoTopicId) { private val logger by logger() @@ -35,35 +32,7 @@ object PurchaseInfoPublisher : Managed { private var gson: Gson = createGson() - private lateinit var publisher: Publisher - - @Throws(IOException::class) - override fun start() { - - val topicName = ProjectTopicName.of(ConfigRegistry.config.projectId, ConfigRegistry.config.purchaseInfoTopicId) - val hostport = System.getenv("PUBSUB_EMULATOR_HOST") - if (!hostport.isNullOrEmpty()) { - val channel = ManagedChannelBuilder.forTarget(hostport).usePlaintext(true).build() - // Create a publisher instance with default settings bound to the topic - val channelProvider = FixedTransportChannelProvider.create(GrpcTransportChannel.create(channel)) - val credentialsProvider = NoCredentialsProvider() - publisher = Publisher.newBuilder(topicName) - .setChannelProvider(channelProvider) - .setCredentialsProvider(credentialsProvider) - .build(); - } else { - publisher = Publisher.newBuilder(topicName).build() - } - - } - - @Throws(Exception::class) - override fun stop() { - // When finished with the publisher, shutdown to free up resources. - publisher.shutdown() - } - - internal fun createGson(): Gson { + private fun createGson(): Gson { val builder = GsonBuilder() // Type for this conversion is explicitly set to java.util.Map // This is needed because of kotlin's own Map interface @@ -82,13 +51,13 @@ object PurchaseInfoPublisher : Managed { return builder.create() } - fun convertToJson(purchaseRecordInfo: PurchaseRecordInfo): ByteString = + private fun convertToJson(purchaseRecordInfo: PurchaseRecordInfo): ByteString = ByteString.copyFromUtf8(gson.toJson(purchaseRecordInfo)) fun publish(purchaseRecord: PurchaseRecord, subscriberId: String, status: String) { - val encodedSubscriberId = URLEncoder.encode(subscriberId,"UTF-8") + val encodedSubscriberId = URLEncoder.encode(subscriberId, "UTF-8") val pseudonym = pseudonymizerService.getSubscriberIdPseudonym(encodedSubscriberId, purchaseRecord.timestamp).pseudonym val pubsubMessage = PubsubMessage.newBuilder() @@ -96,7 +65,7 @@ object PurchaseInfoPublisher : Managed { .build() //schedule a message to be published, messages are automatically batched - val future = publisher.publish(pubsubMessage) + val future = publishPubSubMessage(pubsubMessage) // add an asynchronous callback to handle success / failure ApiFutures.addCallback(future, object : ApiFutureCallback { @@ -114,6 +83,6 @@ object PurchaseInfoPublisher : Managed { // Once published, returns server-assigned message ids (unique within the topic) logger.debug(messageId) } - }) + }, singleThreadScheduledExecutor) } } From 7e53828b05f730321eeff35f00b31c2c9ed47e7e Mon Sep 17 00:00:00 2001 From: Vihang Patil Date: Sun, 9 Sep 2018 14:17:41 +0200 Subject: [PATCH 47/78] Tried swagger codegen for kotlin (again). --- prime-client-api/build.gradle | 35 +++++++++++++++++++++-------------- 1 file changed, 21 insertions(+), 14 deletions(-) diff --git a/prime-client-api/build.gradle b/prime-client-api/build.gradle index 9bbc9c03d..d78bb4bd1 100644 --- a/prime-client-api/build.gradle +++ b/prime-client-api/build.gradle @@ -7,22 +7,12 @@ plugins { // gradle generateSwaggerCode swaggerSources { - - // if they ever fix kotlin -// 'kotlin-client' { -// inputFile = file("${projectDir}/../prime/infra/prod/prime-client-api.yaml") -// code { -// language = 'kotlin' -// configFile = file("${projectDir}/config.json") -// } -// } 'java-client' { inputFile = file("${projectDir}/../prime/infra/dev/prime-client-api.yaml") code { language = 'java' configFile = file("${projectDir}/config.json") - // for creating only model - // components = ["models"] +// components = ["models"] } } } @@ -31,22 +21,39 @@ compileJava.dependsOn swaggerSources.'java-client'.code sourceSets.main.java.srcDir "${swaggerSources.'java-client'.code.outputDir}/src/main/java" sourceSets.main.resources.srcDir "${swaggerSources.'java-client'.code.outputDir}/src/main/resources" +// if they ever fix kotlin +//swaggerSources { +// 'kotlin-client' { +// inputFile = file("${projectDir}/../prime/infra/prod/prime-client-api.yaml") +// code { +// language = 'kotlin' +// configFile = file("${projectDir}/config.json") +// } +// } +//} +// +//compileKotlin.dependsOn swaggerSources.'kotlin-client'.code +//sourceSets.main.kotlin.srcDir "${swaggerSources.'kotlin-client'.code.outputDir}/src/main/java" +//sourceSets.main.resources.srcDir "${swaggerSources.'kotlin-client'.code.outputDir}/src/main/resources" + dependencies { implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlinVersion" swaggerCodegen 'io.swagger:swagger-codegen-cli:2.3.1' // taken from build/swagger-code-java-client/build.gradle - // for model implementation 'io.swagger:swagger-annotations:1.5.15' implementation 'com.google.code.gson:gson:2.8.5' - - // for all implementation 'com.squareup.okhttp:okhttp:2.7.5' implementation 'com.squareup.okhttp:logging-interceptor:2.7.5' implementation 'io.gsonfire:gson-fire:1.8.3' implementation 'org.threeten:threetenbp:1.3.7' testImplementation 'junit:junit:4.12' + + // taken from build/swagger-code-kotlin-client/build.gradle +// implementation "com.squareup.okhttp3:okhttp:3.8.0" +// implementation "com.squareup.moshi:moshi-kotlin:1.5.0" +// implementation "com.squareup.moshi:moshi-adapters:1.5.0" } idea { From 64fbd34d88a1e8d0a5c73ae304df175e216b840e Mon Sep 17 00:00:00 2001 From: Vihang Patil Date: Sun, 9 Sep 2018 17:53:12 +0200 Subject: [PATCH 48/78] Updated versions --- acceptance-tests/build.gradle | 2 +- analytics-module/build.gradle | 4 ++-- build.gradle | 6 +++++- client-api/build.gradle | 17 +++++++---------- .../prime/client/api/auth/GetUserInfoTest.kt | 2 +- neo4j-store/build.gradle | 2 +- ocsgw/build.gradle | 10 ++++++---- ostelco-lib/build.gradle | 4 ++-- payment-processor/build.gradle | 2 +- prime/build.gradle | 2 +- pseudonym-server/build.gradle | 2 +- 11 files changed, 28 insertions(+), 25 deletions(-) diff --git a/acceptance-tests/build.gradle b/acceptance-tests/build.gradle index a8074a553..312e6a21b 100644 --- a/acceptance-tests/build.gradle +++ b/acceptance-tests/build.gradle @@ -16,7 +16,7 @@ dependencies { implementation 'com.google.firebase:firebase-admin:6.4.0' - implementation "com.stripe:stripe-java:6.8.0" + implementation "com.stripe:stripe-java:$stripeVersion" implementation 'io.jsonwebtoken:jjwt:0.9.1' // tests fail when updated to 2.27 implementation "org.glassfish.jersey.media:jersey-media-json-jackson:2.25.1" diff --git a/analytics-module/build.gradle b/analytics-module/build.gradle index b4fdde847..74bea1607 100644 --- a/analytics-module/build.gradle +++ b/analytics-module/build.gradle @@ -14,8 +14,8 @@ dependencies { testCompile group: 'com.google.api', name: 'gax-grpc', version: '1.30.0' testImplementation "io.dropwizard:dropwizard-testing:$dropwizardVersion" - testImplementation 'org.mockito:mockito-core:2.18.3' - testImplementation 'org.assertj:assertj-core:3.10.0' + testImplementation "org.mockito:mockito-core:$mockitoVersion" + testImplementation "org.assertj:assertj-core:$assertJVersion" } apply from: '../jacoco.gradle' diff --git a/build.gradle b/build.gradle index e890c8522..9e0df77e7 100644 --- a/build.gradle +++ b/build.gradle @@ -30,8 +30,12 @@ subprojects { ext { kotlinVersion = "1.2.61" dropwizardVersion = "1.3.5" - googleCloudVersion = "1.41.0" + googleCloudVersion = "1.43.0" jacksonVersion = "2.9.6" + stripeVersion = "6.12.0" + guavaVersion = "26.0-jre" + assertJVersion = "3.11.1" + mockitoVersion = "2.21.0" } } diff --git a/client-api/build.gradle b/client-api/build.gradle index c41c249d3..1f355da83 100644 --- a/client-api/build.gradle +++ b/client-api/build.gradle @@ -11,25 +11,22 @@ dependencies { implementation "io.dropwizard:dropwizard-auth:$dropwizardVersion" implementation "io.dropwizard:dropwizard-client:$dropwizardVersion" - implementation 'com.google.guava:guava:25.1-jre' + implementation "com.google.guava:guava:$guavaVersion" implementation 'io.jsonwebtoken:jjwt:0.9.1' testImplementation "io.dropwizard:dropwizard-client:$dropwizardVersion" testImplementation "io.dropwizard:dropwizard-testing:$dropwizardVersion" testImplementation "com.fasterxml.jackson.module:jackson-module-kotlin:$jacksonVersion" - testImplementation "org.mockito:mockito-core:2.18.3" - testImplementation 'org.assertj:assertj-core:3.10.0' - - // from filter - // https://mvnrepository.com/artifact/org.glassfish.jersey.test-framework.providers/jersey-test-framework-provider-grizzly2 - // Updating from 2.25.1 to 2.27 causes error - testCompile (group: 'org.glassfish.jersey.test-framework.providers', name: 'jersey-test-framework-provider-grizzly2', version: '2.25.1') { - // 2.26 (latest) + testImplementation "org.mockito:mockito-core:$mockitoVersion" + testImplementation "org.assertj:assertj-core:$assertJVersion" + + testImplementation (group: 'org.glassfish.jersey.test-framework.providers', name: 'jersey-test-framework-provider-grizzly2', version: '2.25.1') { + because "Updating from 2.25.1 to 2.27.1 causes error. Keep the version matched with 'jersey-server' version from dropwizard." exclude group: 'javax.servlet', module: 'javax.servlet-api' exclude group: 'junit', module: 'junit' } - testCompile "com.nhaarman:mockito-kotlin:1.6.0" + testImplementation "com.nhaarman:mockito-kotlin:1.6.0" } tasks.withType(org.jetbrains.kotlin.gradle.tasks.KotlinCompile).all { diff --git a/client-api/src/test/kotlin/org/ostelco/prime/client/api/auth/GetUserInfoTest.kt b/client-api/src/test/kotlin/org/ostelco/prime/client/api/auth/GetUserInfoTest.kt index ff497953a..b8bd22a42 100644 --- a/client-api/src/test/kotlin/org/ostelco/prime/client/api/auth/GetUserInfoTest.kt +++ b/client-api/src/test/kotlin/org/ostelco/prime/client/api/auth/GetUserInfoTest.kt @@ -71,7 +71,7 @@ class GetUserInfoTest { } if (counter == 0) { - fail("Couldn't connect to RULE server") + fail("Couldn't connect to RULE server") } } diff --git a/neo4j-store/build.gradle b/neo4j-store/build.gradle index 8ebf3be0e..a3db2a884 100644 --- a/neo4j-store/build.gradle +++ b/neo4j-store/build.gradle @@ -32,7 +32,7 @@ dependencies { testImplementation "org.jetbrains.kotlin:kotlin-test:$kotlinVersion" testImplementation "org.jetbrains.kotlin:kotlin-test-junit:$kotlinVersion" - testImplementation "org.mockito:mockito-core:2.18.3" + testImplementation "org.mockito:mockito-core:$mockitoVersion" } apply from: '../jacoco.gradle' \ No newline at end of file diff --git a/ocsgw/build.gradle b/ocsgw/build.gradle index 401cf1c8c..0ce19f6f4 100644 --- a/ocsgw/build.gradle +++ b/ocsgw/build.gradle @@ -3,6 +3,8 @@ plugins { id "com.github.johnrengelman.shadow" version "2.0.4" } +ext.junit5Version = "5.3.0" + dependencies { implementation project(':ocs-grpc-api') implementation project(':analytics-grpc-api') @@ -13,14 +15,14 @@ dependencies { implementation 'ch.qos.logback:logback-classic:1.2.3' // log to gcp stack-driver - implementation 'com.google.cloud:google-cloud-logging-logback:0.59.0-alpha' + implementation 'com.google.cloud:google-cloud-logging-logback:0.61.0-alpha' testImplementation project(':diameter-test') - testImplementation 'org.junit.jupiter:junit-jupiter-api:5.2.0' - testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.2.0' + testImplementation "org.junit.jupiter:junit-jupiter-api:$junit5Version" + testRuntimeOnly "org.junit.jupiter:junit-jupiter-engine:$junit5Version" testImplementation 'junit:junit:4.12' - testRuntimeOnly 'org.junit.vintage:junit-vintage-engine:5.2.0' + testRuntimeOnly "org.junit.vintage:junit-vintage-engine:$junit5Version" } test { diff --git a/ostelco-lib/build.gradle b/ostelco-lib/build.gradle index a320261f4..3c54eb8da 100644 --- a/ostelco-lib/build.gradle +++ b/ostelco-lib/build.gradle @@ -9,11 +9,11 @@ dependencies { // Match netty via ocs-api implementation 'com.google.firebase:firebase-admin:6.4.0' implementation 'com.lmax:disruptor:3.4.2' - implementation 'com.google.guava:guava:25.1-jre' + implementation "com.google.guava:guava:$guavaVersion" testImplementation "io.dropwizard:dropwizard-testing:$dropwizardVersion" testImplementation "org.mockito:mockito-core:2.18.3" - testImplementation 'org.assertj:assertj-core:3.10.0' + testImplementation "org.assertj:assertj-core:$assertJVersion" // https://mvnrepository.com/artifact/org.glassfish.jersey.test-framework.providers/jersey-test-framework-provider-grizzly2 testCompile("org.glassfish.jersey.test-framework.providers:jersey-test-framework-provider-grizzly2:2.27") { diff --git a/payment-processor/build.gradle b/payment-processor/build.gradle index 727e5e3e0..5d018028a 100644 --- a/payment-processor/build.gradle +++ b/payment-processor/build.gradle @@ -21,7 +21,7 @@ dependencies { implementation project(":prime-api") - implementation "com.stripe:stripe-java:6.8.0" + implementation "com.stripe:stripe-java:$stripeVersion" testImplementation "org.jetbrains.kotlin:kotlin-test:$kotlinVersion" testImplementation "org.jetbrains.kotlin:kotlin-test-junit:$kotlinVersion" diff --git a/prime/build.gradle b/prime/build.gradle index a283a96f8..d7f591f40 100644 --- a/prime/build.gradle +++ b/prime/build.gradle @@ -44,7 +44,7 @@ dependencies { implementation "com.fasterxml.jackson.module:jackson-module-kotlin:$jacksonVersion" implementation "io.dropwizard:dropwizard-http2:$dropwizardVersion" runtimeOnly "io.dropwizard:dropwizard-json-logging:$dropwizardVersion" - implementation 'com.google.guava:guava:25.1-jre' + implementation "com.google.guava:guava:$guavaVersion" implementation 'org.dhatim:dropwizard-prometheus:2.2.0' testImplementation "io.dropwizard:dropwizard-testing:$dropwizardVersion" diff --git a/pseudonym-server/build.gradle b/pseudonym-server/build.gradle index 6f5af1d2f..926ea958c 100644 --- a/pseudonym-server/build.gradle +++ b/pseudonym-server/build.gradle @@ -12,7 +12,7 @@ dependencies { implementation project(':analytics-grpc-api') implementation "io.dropwizard:dropwizard-client:$dropwizardVersion" - implementation 'com.google.guava:guava:25.1-jre' + implementation "com.google.guava:guava:$guavaVersion" // Match with grpc-netty-shaded via PubSub // removing io.grpc:grpc-netty-shaded:1.14.0 causes ALPN error implementation 'io.grpc:grpc-netty-shaded:1.14.0' From 66f6fb51607b2424327eb64127594a5a49bb4905 Mon Sep 17 00:00:00 2001 From: Vihang Patil Date: Mon, 10 Sep 2018 11:22:27 +0200 Subject: [PATCH 49/78] Suppress exception logged while connecting to real bigQuery 3 times before connecting to emulator --- prime/config/test.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/prime/config/test.yaml b/prime/config/test.yaml index 7f1850185..b67aa3372 100644 --- a/prime/config/test.yaml +++ b/prime/config/test.yaml @@ -45,3 +45,5 @@ logging: level: INFO loggers: org.ostelco: DEBUG + # suppress exception logged while connecting to real bigQuery 3 times before connecting to emulator + com.google.auth.oauth2.ComputeEngineCredentials: ERROR \ No newline at end of file From 44098711fdc45290b01f2ad70e5a2418a17b9fc9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B8rn=20Remseth?= Date: Mon, 10 Sep 2018 11:41:06 +0200 Subject: [PATCH 50/78] Feature/bq metrics extractor (#258) * Initial checkin of yeoman generated gradle-based dropwizard application * Adding a readme outlining what we should do * Adding README.md * Adding autogenerated java code, after autoconverting it to kotlin * Fixing typos in README.md * Adding docs about libraries used for implementation * Adding dependencies for prometheus and bigquery * Indicate what needs to be done in a TODO list * Populate the program with example code from documentation of the libraries we're using. The idea is now to refactor these into functional code * Intermediate result, before refactoring gradle setup. * Simple gradle build, getting ready to make queries etc. * Add source of code that can run a sample command line command. * Ooops, forgot to add the code. * Adding docker-compose to start pushgw/prometheus/grafana combo * Updating todo * Make it possible to push something to graphana using the push gateway * Add warning * Pulling the google pubsub emulator into the docker compose setup * Steps towards pubsub, but also current prioritizing getting something out of bigquery to prepare deployment into cluster today or tomorrow. * Refactor into code more easily extended into getting stuff from bigquery * Refactoring a little * Comment about next step * Now able to perform queries against bigquery * Update todo list * Prepare to push to pushgw * Trying to connect dots * Adding comment * Publishing a summary instead of a gauge * Some steps ahead, not logging properly * Adding test and prod dockerfiles, also making sure logging works by reading config file parameters. * Refactor, make more generic and easier to debug * Update TODO with what has been done today * Add a crontab file for running the jobs in the cluster as a crontab, and some notes on how that was done from the workstation * Updating notes and readme * Update the next step, provided that this thing is actually working(ish) now * We're now reading one metric in the grafana (dev), declaring a bit of success. * Ho ho ho, now we have a fully configurable metrics extractor * Updating the README with some maintenance procedures * Updating based on review comments * Making all classes private, since they are not referred to anywhere else * Fixing things sugggested by codacy analysis * Adding more comments * Fixing stuff * whitespace etc. --- bq-metrics-extractor/.gitignore | 35 +++ bq-metrics-extractor/Dockerfile | 10 + bq-metrics-extractor/Dockerfile.test | 11 + bq-metrics-extractor/README.md | 107 +++++++ bq-metrics-extractor/build.gradle | 42 +++ bq-metrics-extractor/config/.gitignore | 1 + bq-metrics-extractor/config/config.yaml | 20 ++ bq-metrics-extractor/cronjob/config.yaml | 15 + bq-metrics-extractor/docker-compose.yml | 64 ++++ bq-metrics-extractor/prometheus.yml | 27 ++ bq-metrics-extractor/script/start.sh | 6 + .../BqMetricsExtractorApplication.kt | 286 ++++++++++++++++++ settings.gradle | 3 + 13 files changed, 627 insertions(+) create mode 100644 bq-metrics-extractor/.gitignore create mode 100644 bq-metrics-extractor/Dockerfile create mode 100644 bq-metrics-extractor/Dockerfile.test create mode 100644 bq-metrics-extractor/README.md create mode 100644 bq-metrics-extractor/build.gradle create mode 100644 bq-metrics-extractor/config/.gitignore create mode 100644 bq-metrics-extractor/config/config.yaml create mode 100644 bq-metrics-extractor/cronjob/config.yaml create mode 100644 bq-metrics-extractor/docker-compose.yml create mode 100644 bq-metrics-extractor/prometheus.yml create mode 100755 bq-metrics-extractor/script/start.sh create mode 100644 bq-metrics-extractor/src/main/java/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt diff --git a/bq-metrics-extractor/.gitignore b/bq-metrics-extractor/.gitignore new file mode 100644 index 000000000..57be5a48e --- /dev/null +++ b/bq-metrics-extractor/.gitignore @@ -0,0 +1,35 @@ +### Eclipse ### +.checkstyle +.classpath +.metadata +.loadpath +.project +.settings/ + +### Gradle ### +/.gradle/ +/build/ + +### Intellij ### +.idea/ +*.iml +*.ipr +*.iws +out/ + +### Mac OSX + Windows ### +.DS_Store +Thumbs.db + +### Node ### +/node_modules/ +npm-debug.log + +### SublimeText + TextMate ### +*.sublime-workspace +*.sublime-project +*.tmproj +*.tmproject + +### Vim ### +*.sw[op] diff --git a/bq-metrics-extractor/Dockerfile b/bq-metrics-extractor/Dockerfile new file mode 100644 index 000000000..3d0e959a8 --- /dev/null +++ b/bq-metrics-extractor/Dockerfile @@ -0,0 +1,10 @@ +FROM openjdk:8u171 + +MAINTAINER CSI "csi@telenordigital.com" + +COPY script/start.sh /start.sh +COPY config/config.yaml /config/config.yaml + +COPY build/libs/bq-metrics-extractor-uber.jar /bq-metrics-extractor.jar + +CMD ["/start.sh"] diff --git a/bq-metrics-extractor/Dockerfile.test b/bq-metrics-extractor/Dockerfile.test new file mode 100644 index 000000000..22568bb72 --- /dev/null +++ b/bq-metrics-extractor/Dockerfile.test @@ -0,0 +1,11 @@ +FROM openjdk:8u171 + +MAINTAINER CSI "csi@telenordigital.com" + +COPY script/start.sh /start.sh +COPY config/pantel-prod.json /secret/pantel-prod.json +COPY config/config.yaml /config/config.yaml + +COPY build/libs/bq-metrics-extractor-uber.jar /bq-metrics-extractor.jar + +CMD ["/start.sh"] diff --git a/bq-metrics-extractor/README.md b/bq-metrics-extractor/README.md new file mode 100644 index 000000000..7eece4ec1 --- /dev/null +++ b/bq-metrics-extractor/README.md @@ -0,0 +1,107 @@ +BigQuery metrics extractor +======= + + +This module is a standalone, command-line launched dropwizard application +that will: + +* Talk to google [BigQuery](https://cloud.google.com/bigquery/) and + extract metrics using [Googles BigQuery java library](https://cloud.google.com/bigquery/docs/reference/libraries) +* Talk to [Prometheus](https://prometheus.io) + [Pushgateway](https://github.com/prometheus/pushgateway) and push + those metrics there. Prometheus servers can then scrape those + metrics at their leisure. We will use the + [Prometheus java client](https://github.com/prometheus/client_java) + to talk to the pushgateway. + +The component will be built as a docker component, and will then be periodically +run as a command line application, as a +[Kubernetes cron job](https://kubernetes.io/docs/concepts/workloads/controllers/cron-jobs/). + + +The component is packaged as an individual docker artefact (details below), +and deployed as a cronjob (also described below). + +To run the program from the command line, which is useful when debugging and +necessary to know when constructing a Docker file, do this: + + java -jar /bq-metrics-extractor.jar query --pushgateway pushgateway:8080 config/config.yaml + +the pushgateway:8080 is the hostname (dns resolvable) and portnumber of the Prometheus Push Gateway. + +The config.yaml file contains specifications of queries and how they map to metrics: + + bqmetrics: + - type: summary + name: active_users + help: Number of active users + resultColumn: count + sql: > + SELECT count(distinct user_pseudo_id) AS count FROM `pantel-2decb.analytics_160712959.events_*` + WHERE event_name = "first_open" + LIMIT 1000 + +The idea being that to add queries of a type that is already know by the extractor program, +only an addition to the bqmetrics list. +Use [standardSQL syntax (not legacy)](https://cloud.google.com/bigquery/sql-reference/) for queries. + +If not running in a google kubernetes cluster (e.g. in docker compose, or from the command line), +it's necessary to set the environment variable GOOGLE_APPLICATION_CREDENTIALS to point to +a credentials file that will provide access for the BigQuery library. + + + +How to build and deploy the cronjob manually +=== + +##First get credentials (upgrade gcloud for good measure): + + gcloud components update + gcloud container clusters get-credentials dev-cluster --zone europe-west1-b --project pantel-2decb + +##Build the artefact: + + gradle build + docker build . + +##Authorize tag and push to docker registry in google cloud: + + gcloud auth configure-docker + docker tag foobarbaz eu.gcr.io/pantel-2decb/bq-metrics-extractor + docker push eu.gcr.io/pantel-2decb/bq-metrics-extractor + +... where foobarbaz is the id of the container built by docker build. + +## Then start the cronjob in kubernetes + kubectl apply -f cronjob/config.yaml + kubectl describe cronjob bq-metrics-extractor + +## To talk to the prometheus in the monitoring namespace & watch the users metrics evolve + kubectl port-forward --namespace=monitoring $(kubectl get pods --namespace=monitoring | grep prometheus-core | awk '{print $1}') 9090 + watch 'curl -s localhost:9090/metrics | grep users' + + +TODO +=== + +* Rewrite the SQL so that it will pick up only today/yesterday's data, + use a template language, either premade or ad-hoc. + As of now, the sql in config is static. + We need to make it as a template. + Table name to be changed from events_* to events_${yyyyMMdd} + + Here, the suffix is yesterday’s date in yyyyMMdd format. (edited) + There are some libraries which we can use, which enable f + reemarker expressions in dropwizard config file + (this comment is @vihangpatil 's I just nicked it from + slack where he made the comment) + + +* Add more metrics. +* Make an acceptance tests that runs a roundtrip test ub + in docker compose, based on something like this: curl http://localhost:9091/metrics | grep -i active +* Push the first metric to production, use Kubernetes crontab + to ensure periodic execution. +* Make it testable to send send metrics to pushgateway. +* Extend to more metrics. +* remove the TODO list and declare victory :-) \ No newline at end of file diff --git a/bq-metrics-extractor/build.gradle b/bq-metrics-extractor/build.gradle new file mode 100644 index 000000000..e8256569e --- /dev/null +++ b/bq-metrics-extractor/build.gradle @@ -0,0 +1,42 @@ +plugins { + id "org.jetbrains.kotlin.jvm" version "1.2.61" + id "application" + id "com.github.johnrengelman.shadow" version "2.0.4" + id "idea" +} + + +dependencies { + + implementation "io.dropwizard:dropwizard-core:$dropwizardVersion" + implementation "io.dropwizard:dropwizard-client:$dropwizardVersion" + + testImplementation "io.dropwizard:dropwizard-client:$dropwizardVersion" + testImplementation "io.dropwizard:dropwizard-testing:$dropwizardVersion" + testImplementation "com.fasterxml.jackson.module:jackson-module-kotlin:$jacksonVersion" + testImplementation "org.mockito:mockito-core:2.18.3" + testImplementation 'org.assertj:assertj-core:3.10.0' + + implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlinVersion" + // Bigquery dependency + compile 'com.google.cloud:google-cloud-bigquery:1.40.0' + +runtimeOnly "io.dropwizard:dropwizard-json-logging:$dropwizardVersion" + + // Prometheus pushgateway dependencies (we might not need all of these) + // compile 'io.prometheus:simpleclient:0.5.0' + // compile 'io.prometheus:simpleclient_hotspot:0.5.0' + // compile 'io.prometheus:simpleclient_httpserver:0.5.0' + compile 'io.prometheus:simpleclient_pushgateway:0.5.0' + compile 'com.google.apis:google-api-services-pubsub:v1-rev399-1.25.0' + +} + +shadowJar { + mainClassName = 'org.ostelco.bqmetrics.BqMetricsExtractorApplicationKt' + mergeServiceFiles() + classifier = "uber" + version = null +} + +apply from: '../jacoco.gradle' \ No newline at end of file diff --git a/bq-metrics-extractor/config/.gitignore b/bq-metrics-extractor/config/.gitignore new file mode 100644 index 000000000..bf045303f --- /dev/null +++ b/bq-metrics-extractor/config/.gitignore @@ -0,0 +1 @@ +pantel-prod.json \ No newline at end of file diff --git a/bq-metrics-extractor/config/config.yaml b/bq-metrics-extractor/config/config.yaml new file mode 100644 index 000000000..2201a8417 --- /dev/null +++ b/bq-metrics-extractor/config/config.yaml @@ -0,0 +1,20 @@ +logging: + level: INFO + loggers: + org.ostelco: DEBUG + appenders: + - type: console + layout: + type: json + customFieldNames: + level: severity + +bqmetrics: + - type: summary + name: active_users + help: Number of active users + resultColumn: count + sql: > + SELECT count(distinct user_pseudo_id) AS count FROM `pantel-2decb.analytics_160712959.events_*` + WHERE event_name = "first_open" + LIMIT 1000 \ No newline at end of file diff --git a/bq-metrics-extractor/cronjob/config.yaml b/bq-metrics-extractor/cronjob/config.yaml new file mode 100644 index 000000000..65bc91486 --- /dev/null +++ b/bq-metrics-extractor/cronjob/config.yaml @@ -0,0 +1,15 @@ +apiVersion: batch/v1beta1 +kind: CronJob +metadata: + name: bq-metrics-extractor +spec: + schedule: "*/30 * * * *" + jobTemplate: + spec: + template: + spec: + containers: + - name: bq-metrics-extractor + image: eu.gcr.io/pantel-2decb/bq-metrics-extractor:latest + imagePullPolicy: Always + restartPolicy: Never diff --git a/bq-metrics-extractor/docker-compose.yml b/bq-metrics-extractor/docker-compose.yml new file mode 100644 index 000000000..33c79698a --- /dev/null +++ b/bq-metrics-extractor/docker-compose.yml @@ -0,0 +1,64 @@ +version: '3.3' +services: + # application: + # image: ... + # environment: + # - PUBSUB_EMULATOR_HOST="emulator:8085" + # # ...other configurations... + # depends_on: + # - emulator + # - push-gateway + + + metrics-extrator: + container_name: metrics-extrator + build: + context: . + dockerfile: Dockerfile.test + depends_on: + - pushgateway + environment: + - GOOGLE_APPLICATION_CREDENTIALS=/secret/pantel-prod.json + + emulator: + container_name: emulator + image: adilsoncarvalho/gcloud-pubsub-emulator + ports: + - "8085:8085" + + prometheus: + container_name: prometheus + image: prom/prometheus + volumes: + - './prometheus.yml:/etc/prometheus/prometheus.yml' + - 'prometheus_data:/prometheus' + ports: + - '9090:9090' + + # Pushgateway exposes external port 8080, since that is the port + # that is exposed by the pushgateway in the kubernetes clusters + pushgateway: + container_name: pushgateway + image: prom/pushgateway + ports: + - '8080:9091' + + grafana: + container_name: grafana + image: grafana/grafana + environment: + # Please note that setting the password only works the _FIRST_TIME_ + # the image is built. After that, it's cached and won't change + # if you change it in this docker-compose.yml file. You have + # been warned! + - GF_SECURITY_ADMIN_PASSWORD=pass + depends_on: + - prometheus + ports: + - '3000:3000' + volumes: + - 'grafana_data:/var/lib/grafana' + +volumes: + prometheus_data: {} + grafana_data: {} diff --git a/bq-metrics-extractor/prometheus.yml b/bq-metrics-extractor/prometheus.yml new file mode 100644 index 000000000..836bbfba3 --- /dev/null +++ b/bq-metrics-extractor/prometheus.yml @@ -0,0 +1,27 @@ +global: + scrape_interval: 15s # By default, scrape targets every 15 seconds. + + # Attach these labels to any time series or alerts when communicating with + # external systems (federation, remote storage, Alertmanager). + external_labels: + monitor: 'codelab-monitor' + +# A scrape configuration containing exactly one endpoint to scrape: +# Here it's Prometheus itself. +scrape_configs: + # The job name is added as a label `job=` to any timeseries scraped from this config. + - job_name: 'prometheus' + + # Override the global default and scrape targets from this job every 5 seconds. + scrape_interval: 5s + + static_configs: + - targets: ['prometheus:9090'] + + - job_name: 'push-gateway' + + scrape_interval: 5s + honor_labels: true + + static_configs: + - targets: ['push-gateway:9091'] \ No newline at end of file diff --git a/bq-metrics-extractor/script/start.sh b/bq-metrics-extractor/script/start.sh new file mode 100755 index 000000000..250e6c3b3 --- /dev/null +++ b/bq-metrics-extractor/script/start.sh @@ -0,0 +1,6 @@ +#!/bin/bash -x + +# Start app +exec java \ + -Dfile.encoding=UTF-8 \ + -jar /bq-metrics-extractor.jar query --pushgateway pushgateway:8080 config/config.yaml diff --git a/bq-metrics-extractor/src/main/java/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt b/bq-metrics-extractor/src/main/java/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt new file mode 100644 index 000000000..11454bc09 --- /dev/null +++ b/bq-metrics-extractor/src/main/java/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt @@ -0,0 +1,286 @@ +package org.ostelco.bqmetrics + + +import com.fasterxml.jackson.annotation.JsonProperty +import com.google.cloud.bigquery.* +import io.dropwizard.Application +import io.dropwizard.setup.Bootstrap +import io.dropwizard.setup.Environment +import io.prometheus.client.exporter.PushGateway +import io.prometheus.client.CollectorRegistry +import io.dropwizard.Configuration +import io.dropwizard.cli.ConfiguredCommand +import io.prometheus.client.Summary +import net.sourceforge.argparse4j.inf.Namespace +import net.sourceforge.argparse4j.inf.Subparser +import org.slf4j.LoggerFactory +import java.util.* +import org.slf4j.Logger +import javax.validation.Valid +import javax.validation.constraints.NotNull + +/** + * Bridge between "latent metrics" stored in BigQuery and Prometheus + * metrics available for instrumentation ana alerting services. + * + * Common usecase: + * + * java -jar /bq-metrics-extractor.jar query --pushgateway pushgateway:8080 config/config.yaml + * + * the pushgateway:8080 is the hostname (dns resolvable) and portnumber of the + * Prometheus Push Gateway. + * + * The config.yaml file contains specifications of queries and how they map + * to metrics: + * + * bqmetrics: + * - type: summary + * name: active_users + * help: Number of active users + * resultColumn: count + * sql: > + * SELECT count(distinct user_pseudo_id) AS count FROM `pantel-2decb.analytics_160712959.events_*` + * WHERE event_name = "first_open" + * LIMIT 1000 + * + * Use standard SQL syntax (not legacy) for queries. + * See: https://cloud.google.com/bigquery/sql-reference/ + * + * If not running in a google kubernetes cluster (e.g. in docker compose, or from the command line), + * it's necessary to set the environment variable GOOGLE_APPLICATION_CREDENTIALS to point to + * a credentials file that will provide access for the BigQuery library. + * + */ + + +/** + * Main entry point, invoke dropwizard application. + */ +fun main(args: Array) { + BqMetricsExtractorApplication().run(*args) +} + +/** + * Config of a single metric that will be extracted using a BigQuery + * query. + */ +private class MetricConfig { + + /** + * Type of the metric. Currently the only permitted type is + * "summary", the intent is to extend this as more types + * of metrics (counters, gauges, ...) are added. + */ + @Valid + @NotNull + @JsonProperty + lateinit var type: String + + /** + * The name of the metric, as it will be seen by Prometheus. + */ + @Valid + @NotNull + @JsonProperty + lateinit var name: String + + /** + * A help string, used to describe the metric. + */ + @Valid + @NotNull + @JsonProperty + lateinit var help: String + + /** + * When running the query, the result should be placed in a named + * column, and this field contains the name of that column. + */ + @Valid + @NotNull + @JsonProperty + lateinit var resultColumn: String + + /** + * The SQL used to extract the value of the metric from BigQuery. + */ + @Valid + @NotNull + @JsonProperty + lateinit var sql: String +} + + +/** + * Configuration for the extractor, default config + * plus a list of metrics descriptions. + */ +private class BqMetricsExtractorConfig: Configuration() { + @Valid + @NotNull + @JsonProperty("bqmetrics") + lateinit var metrics: List +} + + +/** + * Main entry point to the bq-metrics-extractor API server. + */ +private class BqMetricsExtractorApplication : Application() { + + override fun initialize(bootstrap: Bootstrap) { + bootstrap.addCommand(CollectAndPushMetrics()) + } + + override fun run( + configuration: BqMetricsExtractorConfig, + environment: Environment) { + } +} + + +private interface MetricBuilder { + fun buildMetric(registry: CollectorRegistry) +} + + +private class SummaryMetricBuilder( + val metricName: String, + val help: String, + val sql: String, + val resultColumn: String) : MetricBuilder { + + private val log: Logger = LoggerFactory.getLogger(SummaryMetricBuilder::class.java) + + fun getSummaryViaSql(): Long { + // Instantiate a client. If you don't specify credentials when constructing a client, the + // client library will look for credentials in the environment, such as the + // GOOGLE_APPLICATION_CREDENTIALS environment variable. + val bigquery = BigQueryOptions.getDefaultInstance().service + val queryConfig: QueryJobConfiguration = + QueryJobConfiguration.newBuilder( + sql.trimIndent()) + .setUseLegacySql(false) + .build(); + + // Create a job ID so that we can safely retry. + val jobId: JobId = JobId.of(UUID.randomUUID().toString()); + var queryJob: Job = bigquery.create(JobInfo.newBuilder(queryConfig).setJobId(jobId).build()); + + // Wait for the query to complete. + queryJob = queryJob.waitFor(); + + // Check for errors + if (queryJob == null) { + throw BqMetricsExtractionException("Job no longer exists"); + } else if (queryJob.getStatus().getError() != null) { + // You can also look at queryJob.getStatus().getExecutionErrors() for all + // errors, not just the latest one. + throw BqMetricsExtractionException(queryJob.getStatus().getError().toString()); + } + val result = queryJob.getQueryResults() + if (result.totalRows != 1L) { + throw BqMetricsExtractionException("Number of results was ${result.totalRows} which is different from the expected single row") + } + + val count = result.iterateAll().iterator().next().get(resultColumn).longValue + + return count + } + + + override fun buildMetric(registry: CollectorRegistry) { + val activeUsersSummary: Summary = Summary.build() + .name(metricName) + .help(help).register(registry) + val value: Long = getSummaryViaSql() + + log.info("Summarizing metric $metricName to be $value") + + activeUsersSummary.observe(value * 1.0) + } +} + +/** + * Thrown when something really bad is detected and it's necessary to terminate + * execution immediately. No cleanup of anything will be done. + */ +private class BqMetricsExtractionException: RuntimeException { + constructor(message: String, ex: Exception?): super(message, ex) + constructor(message: String): super(message) + constructor(ex: Exception): super(ex) +} + + +/** + * Adapter class that will push metrics to the Prometheus push gateway. + */ +private class PrometheusPusher(val pushGateway: String, val job: String) { + + private val log: Logger = LoggerFactory.getLogger(PrometheusPusher::class.java) + + val registry = CollectorRegistry() + + @Throws(Exception::class) + fun publishMetrics(metrics: List) { + + val metricSources: MutableList = mutableListOf() + metrics.forEach { + val typeString: String = it.type.trim().toUpperCase() + when (typeString) { + "SUMMARY" -> { + metricSources.add(SummaryMetricBuilder( + it.name, + it.help, + it.sql, + it.resultColumn)) + } + else -> { + log.error("Unknown metrics type '${it.type}'") + } + } + } + + log.info("Querying bigquery for metric values") + val pg = PushGateway(pushGateway) + metricSources.forEach({ it.buildMetric(registry) }) + + log.info("Pushing metrics to pushgateway") + pg.pushAdd(registry, job) + log.info("Done transmitting metrics to pushgateway") + } +} + +private class CollectAndPushMetrics : ConfiguredCommand( + "query", + "query BigQuery for a metric") { + override fun run(bootstrap: Bootstrap?, namespace: Namespace?, configuration: BqMetricsExtractorConfig?) { + + if (configuration == null) { + throw BqMetricsExtractionException("Configuration is null") + } + + + if (namespace == null) { + throw BqMetricsExtractionException("Namespace from config is null") + } + + val pgw = namespace.get(pushgatewayKey) + PrometheusPusher(pgw, + "bq_metrics_extractor").publishMetrics(configuration.metrics) + } + + val pushgatewayKey = "pushgateway" + + override fun configure(subparser: Subparser?) { + super.configure(subparser) + if (subparser == null) { + throw BqMetricsExtractionException("subparser is null") + } + subparser.addArgument("-p", "--pushgateway") + .dest(pushgatewayKey) + .type(String::class.java) + .required(true) + .help("The pushgateway to report metrics to, format is hostname:portnumber") + } +} diff --git a/settings.gradle b/settings.gradle index cc44e9b47..a7ee39d4c 100644 --- a/settings.gradle +++ b/settings.gradle @@ -6,6 +6,7 @@ include ':admin-api' include ':analytics-grpc-api' include ':analytics-module' include ':auth-server' +include ':bq-metrics-extractor' include ':client-api' include ':dataflow-pipelines' include ':diameter-stack' @@ -25,12 +26,14 @@ include ':prime-api' include ':prime-client-api' include ':pseudonym-server' + project(':acceptance-tests').projectDir = "$rootDir/acceptance-tests" as File project(':app-notifier').projectDir = "$rootDir/app-notifier" as File project(':admin-api').projectDir = "$rootDir/admin-api" as File project(':analytics-grpc-api').projectDir = "$rootDir/analytics-grpc-api" as File project(':analytics-module').projectDir = "$rootDir/analytics-module" as File project(':auth-server').projectDir = "$rootDir/auth-server" as File +project(':bq-metrics-extractor').projectDir = "$rootDir/bq-metrics-extractor" as File project(':client-api').projectDir = "$rootDir/client-api" as File project(':dataflow-pipelines').projectDir = "$rootDir/dataflow-pipelines" as File project(':diameter-stack').projectDir = "$rootDir/diameter-stack" as File From bd38740382db85310ecfe75517ac8998a7aa2790 Mon Sep 17 00:00:00 2001 From: "Kjell M. Myksvoll" Date: Fri, 7 Sep 2018 13:43:29 +0200 Subject: [PATCH 51/78] Adds test for 'set default source' to payment acceptance tests --- .../org/ostelco/at/common/StripePayment.kt | 19 +++++++++ .../kotlin/org/ostelco/at/okhttp/Tests.kt | 41 ++++++++++++++++++- 2 files changed, 59 insertions(+), 1 deletion(-) diff --git a/acceptance-tests/src/main/kotlin/org/ostelco/at/common/StripePayment.kt b/acceptance-tests/src/main/kotlin/org/ostelco/at/common/StripePayment.kt index 57ca065dc..c1259eba2 100644 --- a/acceptance-tests/src/main/kotlin/org/ostelco/at/common/StripePayment.kt +++ b/acceptance-tests/src/main/kotlin/org/ostelco/at/common/StripePayment.kt @@ -43,6 +43,25 @@ object StripePayment { return token.card.id } + fun getDefaultSourceForCustomer(customerId: String) : String { + + // https://stripe.com/docs/api/java#create_source + Stripe.apiKey = System.getenv("STRIPE_API_KEY") + + val customer = Customer.retrieve(customerId) + return customer.defaultSource + } + + fun getCustomerIdForEmail(email: String) : String { + + // https://stripe.com/docs/api/java#create_card_token + Stripe.apiKey = System.getenv("STRIPE_API_KEY") + + val customers = Customer.list(emptyMap()).data + + return customers.filter { it.email.equals(email) }.first().id + } + fun deleteAllCustomers() { // https://stripe.com/docs/api/java#create_card_token Stripe.apiKey = System.getenv("STRIPE_API_KEY") diff --git a/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt b/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt index f6de6ba50..4797f6a04 100644 --- a/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt +++ b/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt @@ -212,6 +212,7 @@ class SourceTest { val client = clientForSubject(subject = email) val sourceId = StripePayment.createPaymentTokenId() + val cardId = StripePayment.getCardIdForTokenId(sourceId) // Ties source with user profile both local and with Stripe client.createSource(sourceId) @@ -219,10 +220,48 @@ class SourceTest { Thread.sleep(200) val sources = client.listSources() + assert(sources.isNotEmpty()) { "Expected at least one payment source for profile $email" } + assertNotNull(sources.first { it.id == cardId }, + "Expected card $cardId in list of payment sources for profile $email") + } + + @Test + fun `okhttp test - PUT source set default`() { + + StripePayment.deleteAllCustomers() + Firebase.deleteAllPaymentCustomers() + + val email = "purchase-${randomInt()}@test.com" + createProfile(name = "Test Payment Source", email = email) + + val client = clientForSubject(subject = email) + val sourceId = StripePayment.createPaymentTokenId() val cardId = StripePayment.getCardIdForTokenId(sourceId) - assertNotNull(sources.first { it.id == cardId }, "Expected card $cardId in list of payment sources for profile $email") + + // Ties source with user profile both local and with Stripe + client.createSource(sourceId) + + Thread.sleep(200) + + val newSourceId = StripePayment.createPaymentTokenId() + val newCardId = StripePayment.getCardIdForTokenId(newSourceId) + + client.createSource(newSourceId) + + // TODO: Update to fetch the Stripe customerId from 'admin' API when ready. + val customerId = StripePayment.getCustomerIdForEmail(email) + + // Verify that original 'sourceId/card' is default. + assertEquals(cardId, StripePayment.getDefaultSourceForCustomer(customerId), + "Expected $cardId to be default source for $customerId") + + // Set new default card. + client.setDefaultSource(newCardId) + + assertEquals(newCardId, StripePayment.getDefaultSourceForCustomer(customerId), + "Expected $newCardId to be default source for $customerId") } } From acee3f65873398f3f31eee7e08bba9eb1acdf8e0 Mon Sep 17 00:00:00 2001 From: "Kjell M. Myksvoll" Date: Fri, 7 Sep 2018 14:26:11 +0200 Subject: [PATCH 52/78] Adds 'jersey' part of 'set default source' payment acceptance test --- .../kotlin/org/ostelco/at/jersey/Tests.kt | 49 +++++++++++++++++++ 1 file changed, 49 insertions(+) diff --git a/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt b/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt index a38428e07..5aab34c4b 100644 --- a/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt +++ b/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt @@ -243,6 +243,7 @@ class SourceTest { post { path = "/paymentSources" subscriberId = email + queryParams = mapOf("sourceId" to sourceId) } Thread.sleep(200) @@ -256,6 +257,54 @@ class SourceTest { val cardId = StripePayment.getCardIdForTokenId(sourceId) assertNotNull(sources.first { it.id == cardId }, "Expected card $cardId in list of payment sources for profile $email") } + + @Test + fun `jersey test - PUT source set default`() { + + StripePayment.deleteAllCustomers() + Firebase.deleteAllPaymentCustomers() + + val email = "purchase-${randomInt()}@test.com" + createProfile(name = "Test Payment Source", email = email) + + val sourceId = StripePayment.createPaymentTokenId() + val cardId = StripePayment.getCardIdForTokenId(sourceId) + + // Ties source with user profile both local and with Stripe + post { + path = "/paymentSources" + subscriberId = email + queryParams = mapOf("sourceId" to sourceId) + } + + Thread.sleep(200) + + val newSourceId = StripePayment.createPaymentTokenId() + val newCardId = StripePayment.getCardIdForTokenId(newSourceId) + + post { + path = "/paymentSources" + subscriberId = email + queryParams = mapOf("sourceId" to newSourceId) + } + + // TODO: Update to fetch the Stripe customerId from 'admin' API when ready. + val customerId = StripePayment.getCustomerIdForEmail(email) + + // Verify that original 'sourceId/card' is default. + assertEquals(cardId, StripePayment.getDefaultSourceForCustomer(customerId), + "Expected $cardId to be default source for $customerId") + + // Set new default card. + put { + path = "/paymentSources" + subscriberId = email + queryParams = mapOf("sourceId" to newCardId) + } + + assertEquals(newCardId, StripePayment.getDefaultSourceForCustomer(customerId), + "Expected $newCardId to be default source for $customerId") + } } class PurchaseTest { From cde307e919cbeef0c503e36c4f8230dff5f100fa Mon Sep 17 00:00:00 2001 From: "Kjell M. Myksvoll" Date: Mon, 10 Sep 2018 09:52:16 +0200 Subject: [PATCH 53/78] Ensure that the jersey version of the source tests is included in the acceptance tests --- acceptance-tests/script/wait.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/acceptance-tests/script/wait.sh b/acceptance-tests/script/wait.sh index 64400794a..1d4dcbae6 100755 --- a/acceptance-tests/script/wait.sh +++ b/acceptance-tests/script/wait.sh @@ -29,6 +29,7 @@ java -cp '/acceptance-tests.jar' org.junit.runner.JUnitCore \ org.ostelco.at.jersey.GetPseudonymsTest \ org.ostelco.at.jersey.GetProductsTest \ org.ostelco.at.jersey.GetSubscriptionStatusTest \ + org.ostelco.at.jersey.SourceTest \ org.ostelco.at.jersey.PurchaseTest \ org.ostelco.at.jersey.AnalyticsTest \ org.ostelco.at.jersey.ConsentTest \ From 8daee1832827a7bf226068ee1cb9c34c49e1278d Mon Sep 17 00:00:00 2001 From: "Kjell M. Myksvoll" Date: Mon, 10 Sep 2018 09:53:46 +0200 Subject: [PATCH 54/78] Fixes incorrect HTTP status codes in the '/paymentSources' client API --- .../org/ostelco/prime/client/api/resources/PaymentResource.kt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/PaymentResource.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/PaymentResource.kt index 657ed4710..52f1b0ac0 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/PaymentResource.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/PaymentResource.kt @@ -52,7 +52,7 @@ class PaymentResource(private val dao: SubscriberDAO) { return dao.listSources(token.name) .fold( { apiError -> Response.status(apiError.status).entity(asJson(apiError.description)) }, - { sourceList -> Response.status(Response.Status.CREATED).entity(sourceList)} + { sourceList -> Response.status(Response.Status.OK).entity(sourceList)} ).build() } @@ -69,7 +69,7 @@ class PaymentResource(private val dao: SubscriberDAO) { return dao.setDefaultSource(token.name, sourceId) .fold( { apiError -> Response.status(apiError.status).entity(asJson(apiError.description)) }, - { sourceInfo -> Response.status(Response.Status.CREATED).entity(sourceInfo)} + { sourceInfo -> Response.status(Response.Status.OK).entity(sourceInfo)} ).build() } } From c1a6136a45bd857e16a30a6863a6db1217fa664f Mon Sep 17 00:00:00 2001 From: "Kjell M. Myksvoll" Date: Mon, 10 Sep 2018 09:55:41 +0200 Subject: [PATCH 55/78] Adds test of 'return list of sources' to 'sources' acceptance test --- .../kotlin/org/ostelco/at/jersey/Tests.kt | 40 +++++++++++++++++++ .../kotlin/org/ostelco/at/okhttp/Tests.kt | 31 ++++++++++++++ 2 files changed, 71 insertions(+) diff --git a/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt b/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt index 5aab34c4b..fa5695237 100644 --- a/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt +++ b/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt @@ -258,6 +258,46 @@ class SourceTest { assertNotNull(sources.first { it.id == cardId }, "Expected card $cardId in list of payment sources for profile $email") } + @Test + fun `okhttp test - GET list sources`() { + + StripePayment.deleteAllCustomers() + Firebase.deleteAllPaymentCustomers() + + val email = "purchase-${randomInt()}@test.com" + createProfile(name = "Test Payment Source", email = email) + + val sourceId = StripePayment.createPaymentTokenId() + val cardId = StripePayment.getCardIdForTokenId(sourceId) + + // Ties source with user profile both local and with Stripe + post { + path = "/paymentSources" + subscriberId = email + queryParams = mapOf("sourceId" to sourceId) + } + + Thread.sleep(200) + + val newSourceId = StripePayment.createPaymentTokenId() + val newCardId = StripePayment.getCardIdForTokenId(newSourceId) + + post { + path = "/paymentSources" + subscriberId = email + queryParams = mapOf("sourceId" to newSourceId) + } + + val sources : PaymentSourceList = get { + path = "/paymentSources" + subscriberId = email + } + + assert(sources.isNotEmpty()) { "Expected at least one payment source for profile $email" } + assert(sources.map{ it.id }.containsAll(listOf(cardId, newCardId))) + { "Expected to find both $cardId and $newCardId in list of sources for profile $email" } + } + @Test fun `jersey test - PUT source set default`() { diff --git a/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt b/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt index 4797f6a04..4d5da6387 100644 --- a/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt +++ b/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt @@ -226,6 +226,37 @@ class SourceTest { "Expected card $cardId in list of payment sources for profile $email") } + @Test + fun `okhttp test - GET list sources`() { + + StripePayment.deleteAllCustomers() + Firebase.deleteAllPaymentCustomers() + + val email = "purchase-${randomInt()}@test.com" + createProfile(name = "Test Payment Source", email = email) + + val client = clientForSubject(subject = email) + + val sourceId = StripePayment.createPaymentTokenId() + val cardId = StripePayment.getCardIdForTokenId(sourceId) + + // Ties source with user profile both local and with Stripe + client.createSource(sourceId) + + Thread.sleep(200) + + val newSourceId = StripePayment.createPaymentTokenId() + val newCardId = StripePayment.getCardIdForTokenId(newSourceId) + + client.createSource(newSourceId) + + val sources = client.listSources() + + assert(sources.isNotEmpty()) { "Expected at least one payment source for profile $email" } + assert(sources.map{ it.id }.containsAll(listOf(cardId, newCardId))) + { "Expected to find both $cardId and $newCardId in list of sources for profile $email" } + } + @Test fun `okhttp test - PUT source set default`() { From 8e3bdad3c2ea39a755d4c7463ed7404c64b07f9f Mon Sep 17 00:00:00 2001 From: "Kjell M. Myksvoll" Date: Mon, 10 Sep 2018 15:08:24 +0200 Subject: [PATCH 56/78] Adds 'active sims last 24h' metrics to BQ extractor --- bq-metrics-extractor/config/config.yaml | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/bq-metrics-extractor/config/config.yaml b/bq-metrics-extractor/config/config.yaml index 2201a8417..2c8d82d13 100644 --- a/bq-metrics-extractor/config/config.yaml +++ b/bq-metrics-extractor/config/config.yaml @@ -17,4 +17,11 @@ bqmetrics: sql: > SELECT count(distinct user_pseudo_id) AS count FROM `pantel-2decb.analytics_160712959.events_*` WHERE event_name = "first_open" - LIMIT 1000 \ No newline at end of file + LIMIT 1000 + - type: summary + name: sims_who_have_used_data + help: Number of SIMs that har used data last 24 hours + resultColumn: count + sql: > + SELECT count(DISTINCT msisdn) FROM `pantel-2decb.data_consumption.raw_consumption` + WHERE timestamp >= TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 1 DAY) From 4645f2de141d16fc1589fb71ef4d8762bd850383 Mon Sep 17 00:00:00 2001 From: "Kjell M. Myksvoll" Date: Mon, 10 Sep 2018 16:13:03 +0200 Subject: [PATCH 57/78] Updates of variable naming and comments to 'sources' acceptance test --- .../org/ostelco/at/common/StripePayment.kt | 8 +++++ .../kotlin/org/ostelco/at/jersey/Tests.kt | 30 +++++++++---------- .../kotlin/org/ostelco/at/okhttp/Tests.kt | 30 +++++++++---------- 3 files changed, 38 insertions(+), 30 deletions(-) diff --git a/acceptance-tests/src/main/kotlin/org/ostelco/at/common/StripePayment.kt b/acceptance-tests/src/main/kotlin/org/ostelco/at/common/StripePayment.kt index c1259eba2..55351aa4d 100644 --- a/acceptance-tests/src/main/kotlin/org/ostelco/at/common/StripePayment.kt +++ b/acceptance-tests/src/main/kotlin/org/ostelco/at/common/StripePayment.kt @@ -43,6 +43,11 @@ object StripePayment { return token.card.id } + /** + * Obtains 'default source' directly from Stripe. Use in tests to + * verify that the correspondng 'setDefaultSource' API works as + * intended. + */ fun getDefaultSourceForCustomer(customerId: String) : String { // https://stripe.com/docs/api/java#create_source @@ -52,6 +57,9 @@ object StripePayment { return customer.defaultSource } + /** + * Obtains the Stripe 'customerId' directly from Stripe. + */ fun getCustomerIdForEmail(email: String) : String { // https://stripe.com/docs/api/java#create_card_token diff --git a/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt b/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt index fa5695237..3d0f96c4a 100644 --- a/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt +++ b/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt @@ -237,13 +237,13 @@ class SourceTest { val email = "purchase-${randomInt()}@test.com" createProfile(name = "Test Payment Source", email = email) - val sourceId = StripePayment.createPaymentTokenId() + val tokenId = StripePayment.createPaymentTokenId() // Ties source with user profile both local and with Stripe post { path = "/paymentSources" subscriberId = email - queryParams = mapOf("sourceId" to sourceId) + queryParams = mapOf("sourceId" to tokenId) } Thread.sleep(200) @@ -254,7 +254,7 @@ class SourceTest { } assert(sources.isNotEmpty()) { "Expected at least one payment source for profile $email" } - val cardId = StripePayment.getCardIdForTokenId(sourceId) + val cardId = StripePayment.getCardIdForTokenId(tokenId) assertNotNull(sources.first { it.id == cardId }, "Expected card $cardId in list of payment sources for profile $email") } @@ -267,25 +267,25 @@ class SourceTest { val email = "purchase-${randomInt()}@test.com" createProfile(name = "Test Payment Source", email = email) - val sourceId = StripePayment.createPaymentTokenId() - val cardId = StripePayment.getCardIdForTokenId(sourceId) + val tokenId = StripePayment.createPaymentTokenId() + val cardId = StripePayment.getCardIdForTokenId(tokenId) // Ties source with user profile both local and with Stripe post { path = "/paymentSources" subscriberId = email - queryParams = mapOf("sourceId" to sourceId) + queryParams = mapOf("sourceId" to tokenId) } Thread.sleep(200) - val newSourceId = StripePayment.createPaymentTokenId() - val newCardId = StripePayment.getCardIdForTokenId(newSourceId) + val newTokenId = StripePayment.createPaymentTokenId() + val newCardId = StripePayment.getCardIdForTokenId(newTokenId) post { path = "/paymentSources" subscriberId = email - queryParams = mapOf("sourceId" to newSourceId) + queryParams = mapOf("sourceId" to newTokenId) } val sources : PaymentSourceList = get { @@ -307,25 +307,25 @@ class SourceTest { val email = "purchase-${randomInt()}@test.com" createProfile(name = "Test Payment Source", email = email) - val sourceId = StripePayment.createPaymentTokenId() - val cardId = StripePayment.getCardIdForTokenId(sourceId) + val tokenId = StripePayment.createPaymentTokenId() + val cardId = StripePayment.getCardIdForTokenId(tokenId) // Ties source with user profile both local and with Stripe post { path = "/paymentSources" subscriberId = email - queryParams = mapOf("sourceId" to sourceId) + queryParams = mapOf("sourceId" to tokenId) } Thread.sleep(200) - val newSourceId = StripePayment.createPaymentTokenId() - val newCardId = StripePayment.getCardIdForTokenId(newSourceId) + val newTokenId = StripePayment.createPaymentTokenId() + val newCardId = StripePayment.getCardIdForTokenId(newTokenId) post { path = "/paymentSources" subscriberId = email - queryParams = mapOf("sourceId" to newSourceId) + queryParams = mapOf("sourceId" to newTokenId) } // TODO: Update to fetch the Stripe customerId from 'admin' API when ready. diff --git a/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt b/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt index 4d5da6387..dcf3b45b6 100644 --- a/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt +++ b/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt @@ -211,11 +211,11 @@ class SourceTest { val client = clientForSubject(subject = email) - val sourceId = StripePayment.createPaymentTokenId() - val cardId = StripePayment.getCardIdForTokenId(sourceId) + val tokenId = StripePayment.createPaymentTokenId() + val cardId = StripePayment.getCardIdForTokenId(tokenId) // Ties source with user profile both local and with Stripe - client.createSource(sourceId) + client.createSource(tokenId) Thread.sleep(200) @@ -237,18 +237,18 @@ class SourceTest { val client = clientForSubject(subject = email) - val sourceId = StripePayment.createPaymentTokenId() - val cardId = StripePayment.getCardIdForTokenId(sourceId) + val tokenId = StripePayment.createPaymentTokenId() + val cardId = StripePayment.getCardIdForTokenId(tokenId) // Ties source with user profile both local and with Stripe - client.createSource(sourceId) + client.createSource(tokenId) Thread.sleep(200) - val newSourceId = StripePayment.createPaymentTokenId() - val newCardId = StripePayment.getCardIdForTokenId(newSourceId) + val newTokenId = StripePayment.createPaymentTokenId() + val newCardId = StripePayment.getCardIdForTokenId(newTokenId) - client.createSource(newSourceId) + client.createSource(newTokenId) val sources = client.listSources() @@ -268,18 +268,18 @@ class SourceTest { val client = clientForSubject(subject = email) - val sourceId = StripePayment.createPaymentTokenId() - val cardId = StripePayment.getCardIdForTokenId(sourceId) + val tokenId = StripePayment.createPaymentTokenId() + val cardId = StripePayment.getCardIdForTokenId(tokenId) // Ties source with user profile both local and with Stripe - client.createSource(sourceId) + client.createSource(tokenId) Thread.sleep(200) - val newSourceId = StripePayment.createPaymentTokenId() - val newCardId = StripePayment.getCardIdForTokenId(newSourceId) + val newTokenId = StripePayment.createPaymentTokenId() + val newCardId = StripePayment.getCardIdForTokenId(newTokenId) - client.createSource(newSourceId) + client.createSource(newTokenId) // TODO: Update to fetch the Stripe customerId from 'admin' API when ready. val customerId = StripePayment.getCustomerIdForEmail(email) From 28581983f6685345fedf38a2b4048698aa55d98a Mon Sep 17 00:00:00 2001 From: Vihang Patil Date: Mon, 10 Sep 2018 15:45:01 +0200 Subject: [PATCH 58/78] Directly export from neo4j to cypher file --- docker-compose.override.yaml | 2 +- neo4j-store/src/test/resources/docker-compose.yaml | 2 +- prime/infra/dev/neo4j.yaml | 2 +- prime/infra/dev/prime.yaml | 1 + prime/infra/prod/neo4j.yaml | 2 +- prime/infra/prod/prime.yaml | 1 + .../integration-tests/resources/docker-compose.yaml | 2 +- tools/neo4j-admin-tools/docker-compose.backup.yaml | 2 +- tools/neo4j-admin-tools/docker-compose.neo4j.yaml | 2 +- tools/neo4j-admin-tools/docker-compose.restore.yaml | 2 +- tools/neo4j-admin-tools/docker-compose.yaml | 12 ++++++++++++ .../kotlin/org/ostelco/tools/migration/MainKt.kt | 3 ++- .../org/ostelco/tools/migration/Neo4jExporter.kt | 4 +++- .../neo4j-admin-tools/src/main/resources/.gitignore | 4 +++- .../src/main/resources/docker-compose.yaml | 2 +- 15 files changed, 31 insertions(+), 12 deletions(-) create mode 100644 tools/neo4j-admin-tools/docker-compose.yaml diff --git a/docker-compose.override.yaml b/docker-compose.override.yaml index 373413ffe..16b2785f0 100644 --- a/docker-compose.override.yaml +++ b/docker-compose.override.yaml @@ -100,7 +100,7 @@ services: neo4j: container_name: "neo4j" - image: neo4j:3.4.4 + image: neo4j:3.4.7 environment: - NEO4J_AUTH=none ports: diff --git a/neo4j-store/src/test/resources/docker-compose.yaml b/neo4j-store/src/test/resources/docker-compose.yaml index 28aff66dc..633d77427 100644 --- a/neo4j-store/src/test/resources/docker-compose.yaml +++ b/neo4j-store/src/test/resources/docker-compose.yaml @@ -3,7 +3,7 @@ version: "3.3" services: neo4j: container_name: "neo4j" - image: neo4j:3.4.4 + image: neo4j:3.4.7 environment: - NEO4J_AUTH=none ports: diff --git a/prime/infra/dev/neo4j.yaml b/prime/infra/dev/neo4j.yaml index e6b720b77..0ec4cc2f8 100644 --- a/prime/infra/dev/neo4j.yaml +++ b/prime/infra/dev/neo4j.yaml @@ -41,7 +41,7 @@ spec: spec: containers: - name: neo4j - image: "neo4j:3.3.5-enterprise" + image: "neo4j:3.4.7-enterprise" imagePullPolicy: "IfNotPresent" env: - name: NEO4J_dbms_mode diff --git a/prime/infra/dev/prime.yaml b/prime/infra/dev/prime.yaml index e78ea3e16..f2ad741c5 100644 --- a/prime/infra/dev/prime.yaml +++ b/prime/infra/dev/prime.yaml @@ -127,6 +127,7 @@ spec: readOnly: true - name: prime image: gcr.io/pantel-2decb/prime:PRIME_VERSION + imagePullPolicy: Always env: - name: FIREBASE_ROOT_PATH value: dev diff --git a/prime/infra/prod/neo4j.yaml b/prime/infra/prod/neo4j.yaml index 2fc8eea4a..0ec4cc2f8 100644 --- a/prime/infra/prod/neo4j.yaml +++ b/prime/infra/prod/neo4j.yaml @@ -41,7 +41,7 @@ spec: spec: containers: - name: neo4j - image: "neo4j:3.3.4-enterprise" + image: "neo4j:3.4.7-enterprise" imagePullPolicy: "IfNotPresent" env: - name: NEO4J_dbms_mode diff --git a/prime/infra/prod/prime.yaml b/prime/infra/prod/prime.yaml index de74f8fc2..725015ddc 100644 --- a/prime/infra/prod/prime.yaml +++ b/prime/infra/prod/prime.yaml @@ -91,6 +91,7 @@ spec: readOnly: true - name: prime image: gcr.io/pantel-2decb/prime:PRIME_VERSION + imagePullPolicy: Always env: - name: FIREBASE_ROOT_PATH value: v2 diff --git a/prime/src/integration-tests/resources/docker-compose.yaml b/prime/src/integration-tests/resources/docker-compose.yaml index 28aff66dc..633d77427 100644 --- a/prime/src/integration-tests/resources/docker-compose.yaml +++ b/prime/src/integration-tests/resources/docker-compose.yaml @@ -3,7 +3,7 @@ version: "3.3" services: neo4j: container_name: "neo4j" - image: neo4j:3.4.4 + image: neo4j:3.4.7 environment: - NEO4J_AUTH=none ports: diff --git a/tools/neo4j-admin-tools/docker-compose.backup.yaml b/tools/neo4j-admin-tools/docker-compose.backup.yaml index f659f058b..2c128b62e 100644 --- a/tools/neo4j-admin-tools/docker-compose.backup.yaml +++ b/tools/neo4j-admin-tools/docker-compose.backup.yaml @@ -3,7 +3,7 @@ version: "3.7" services: neo4j-online-backup: container_name: neo4j-online-backup - image: neo4j:3.3.4-enterprise + image: neo4j:3.4.7-enterprise command: > bin/neo4j-admin backup --backup-dir=/backup_dir diff --git a/tools/neo4j-admin-tools/docker-compose.neo4j.yaml b/tools/neo4j-admin-tools/docker-compose.neo4j.yaml index e8eacb468..4258e14aa 100644 --- a/tools/neo4j-admin-tools/docker-compose.neo4j.yaml +++ b/tools/neo4j-admin-tools/docker-compose.neo4j.yaml @@ -3,7 +3,7 @@ version: "3.7" services: neo4j: container_name: "neo4j" - image: neo4j:3.4.4 + image: neo4j:3.4.7 environment: - NEO4J_AUTH=none ports: diff --git a/tools/neo4j-admin-tools/docker-compose.restore.yaml b/tools/neo4j-admin-tools/docker-compose.restore.yaml index be5183c34..685876573 100644 --- a/tools/neo4j-admin-tools/docker-compose.restore.yaml +++ b/tools/neo4j-admin-tools/docker-compose.restore.yaml @@ -3,7 +3,7 @@ version: "3.7" services: neo4j-online-restore: container_name: neo4j-online-restore - image: neo4j:3.3.4-enterprise + image: neo4j:3.4.7-enterprise command: > bin/neo4j-admin restore --from=/backup_dir/graph.db-backup diff --git a/tools/neo4j-admin-tools/docker-compose.yaml b/tools/neo4j-admin-tools/docker-compose.yaml new file mode 100644 index 000000000..8be1a6627 --- /dev/null +++ b/tools/neo4j-admin-tools/docker-compose.yaml @@ -0,0 +1,12 @@ +version: "3.7" + +services: + neo4j: + container_name: "neo4j" + image: neo4j:3.4.7 + environment: + - NEO4J_AUTH=none + ports: + - "7687:7687" + - "7474:7474" + tmpfs: "/data" diff --git a/tools/neo4j-admin-tools/src/main/kotlin/org/ostelco/tools/migration/MainKt.kt b/tools/neo4j-admin-tools/src/main/kotlin/org/ostelco/tools/migration/MainKt.kt index 1b4f54e12..bb57ed0bd 100644 --- a/tools/neo4j-admin-tools/src/main/kotlin/org/ostelco/tools/migration/MainKt.kt +++ b/tools/neo4j-admin-tools/src/main/kotlin/org/ostelco/tools/migration/MainKt.kt @@ -6,6 +6,7 @@ import java.nio.file.Paths fun main(args: Array) { neo4jExporterToCypherFile() + // cypherFileToNeo4jImporter() } fun neo4jExporterToCypherFile() { @@ -39,7 +40,7 @@ fun cypherFileToNeo4jImporter() { println("Import from file to Neo4j") - importFromCypherFile("src/main/resources/init.cypher") { query -> + importFromCypherFile("src/main/resources/backup.prod.cypher") { query -> txn.run(query) } diff --git a/tools/neo4j-admin-tools/src/main/kotlin/org/ostelco/tools/migration/Neo4jExporter.kt b/tools/neo4j-admin-tools/src/main/kotlin/org/ostelco/tools/migration/Neo4jExporter.kt index 60542b9b8..914ff70f0 100644 --- a/tools/neo4j-admin-tools/src/main/kotlin/org/ostelco/tools/migration/Neo4jExporter.kt +++ b/tools/neo4j-admin-tools/src/main/kotlin/org/ostelco/tools/migration/Neo4jExporter.kt @@ -41,7 +41,9 @@ fun importFromNeo4j(txn: Transaction, handleCypher: (String) -> Unit) { sb.append( """ -MATCH (n:${fromNode.labels().first()} {id: '${fromNode.get("id")}'}), (m:${toNode.labels().first()} {id: '${toNode.get("id")}'}) +MATCH (n:${fromNode.labels().first()} {id: '${fromNode.asMap()["id"]}'}) + WITH n +MATCH (m:${toNode.labels().first()} {id: '${toNode.asMap()["id"]}'}) CREATE (n)-[:$type$props]->(m); """) } diff --git a/tools/neo4j-admin-tools/src/main/resources/.gitignore b/tools/neo4j-admin-tools/src/main/resources/.gitignore index 3cb2ee750..82b1ba983 100644 --- a/tools/neo4j-admin-tools/src/main/resources/.gitignore +++ b/tools/neo4j-admin-tools/src/main/resources/.gitignore @@ -1,3 +1,5 @@ prod.cypher test.cypher -backup.cypher \ No newline at end of file +backup.cypher +backup.dev.cypher +backup.prod.cypher \ No newline at end of file diff --git a/tools/neo4j-admin-tools/src/main/resources/docker-compose.yaml b/tools/neo4j-admin-tools/src/main/resources/docker-compose.yaml index c3b46e20f..e8f1e59fa 100644 --- a/tools/neo4j-admin-tools/src/main/resources/docker-compose.yaml +++ b/tools/neo4j-admin-tools/src/main/resources/docker-compose.yaml @@ -3,7 +3,7 @@ version: "3.7" services: neo4j: container_name: "neo4j" - image: neo4j:3.4.4-enterprise + image: neo4j:3.4.7-enterprise environment: - NEO4J_AUTH=none - NEO4J_ACCEPT_LICENSE_AGREEMENT=yes From 1d34ca51f865e516d73edbb0049d7fd8e597c84e Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Mon, 10 Sep 2018 17:19:48 +0200 Subject: [PATCH 59/78] Add revenue for last 24 hours --- bq-metrics-extractor/config/config.yaml | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/bq-metrics-extractor/config/config.yaml b/bq-metrics-extractor/config/config.yaml index 2201a8417..394d1239e 100644 --- a/bq-metrics-extractor/config/config.yaml +++ b/bq-metrics-extractor/config/config.yaml @@ -17,4 +17,12 @@ bqmetrics: sql: > SELECT count(distinct user_pseudo_id) AS count FROM `pantel-2decb.analytics_160712959.events_*` WHERE event_name = "first_open" - LIMIT 1000 \ No newline at end of file + LIMIT 1000 + - type: summary + name: revenue_last24hours + help: Revenue for last 24 hours + resultColumn: count + sql: > + SELECT SUM(product.price.amount) as revenue FROM `pantel-2decb.purchases.raw_purchases` + WHERE TIMESTAMP_MILLIS(timestamp) > TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 24 HOUR) + AND TIMESTAMP_MILLIS(timestamp) < CURRENT_TIMESTAMP() From bc7898497a098b1df584fcc8daf3418d6a850de7 Mon Sep 17 00:00:00 2001 From: "Kjell M. Myksvoll" Date: Mon, 10 Sep 2018 17:19:52 +0200 Subject: [PATCH 60/78] Adds 'total data used last 24h' metrics to BQ extractor --- bq-metrics-extractor/config/config.yaml | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/bq-metrics-extractor/config/config.yaml b/bq-metrics-extractor/config/config.yaml index 2c8d82d13..91335c807 100644 --- a/bq-metrics-extractor/config/config.yaml +++ b/bq-metrics-extractor/config/config.yaml @@ -20,8 +20,15 @@ bqmetrics: LIMIT 1000 - type: summary name: sims_who_have_used_data - help: Number of SIMs that har used data last 24 hours + help: Number of SIMs that has used data last 24 hours resultColumn: count sql: > SELECT count(DISTINCT msisdn) FROM `pantel-2decb.data_consumption.raw_consumption` WHERE timestamp >= TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 1 DAY) + - type: summary + name: total_data_used + help: Total data used last 24 hours + resultColumn: count + sql: > + SELECT sum(bucketBytes) FROM `pantel-2decb.data_consumption.raw_consumption` + WHERE timestamp >= TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 1 DAY) From 3d915a6d1b73fc5afd6bd68d69efa8cd46b33eed Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Mon, 10 Sep 2018 17:23:46 +0200 Subject: [PATCH 61/78] Fix the column name --- bq-metrics-extractor/config/config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bq-metrics-extractor/config/config.yaml b/bq-metrics-extractor/config/config.yaml index 394d1239e..ab4a5d59c 100644 --- a/bq-metrics-extractor/config/config.yaml +++ b/bq-metrics-extractor/config/config.yaml @@ -21,7 +21,7 @@ bqmetrics: - type: summary name: revenue_last24hours help: Revenue for last 24 hours - resultColumn: count + resultColumn: revenue sql: > SELECT SUM(product.price.amount) as revenue FROM `pantel-2decb.purchases.raw_purchases` WHERE TIMESTAMP_MILLIS(timestamp) > TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 24 HOUR) From 67240650ef814a75619b3cc7cec937ec62b842d4 Mon Sep 17 00:00:00 2001 From: Martin Cederlof Date: Tue, 11 Sep 2018 10:28:03 +0200 Subject: [PATCH 62/78] WIP PaymentError Refactoring payment API to us its own error codes. --- .../client/api/store/SubscriberDAOImpl.kt | 10 ++-- .../ostelco/prime/storage/graph/Neo4jStore.kt | 17 +++--- .../StripePaymentProcessor.kt | 59 ++++++++----------- .../paymentprocessor/PaymentProcessor.kt | 38 ++++++------ .../paymentprocessor/core/PaymentError.kt | 19 ++++++ .../org/ostelco/prime/storage/Variants.kt | 4 +- 6 files changed, 77 insertions(+), 70 deletions(-) create mode 100644 prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/core/PaymentError.kt diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt index 7f5c69e4e..dd6c97f4e 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt @@ -1,7 +1,6 @@ package org.ostelco.prime.client.api.store import arrow.core.Either -import arrow.core.Tuple4 import arrow.core.flatMap import org.ostelco.prime.analytics.AnalyticsService import org.ostelco.prime.analytics.PrimeMetric.REVENUE @@ -238,7 +237,7 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu subscriberId, sku, sourceId, - saveCard) + saveCard).mapLeft { NotFoundError(it.description) } override fun getReferrals(subscriberId: String): Either> { return try { @@ -302,7 +301,7 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu { createAndStorePaymentProfile(subscriberId) }, { profileInfo -> Either.right(profileInfo) } ) - .flatMap { profileInfo -> paymentProcessor.addSource(profileInfo.id, sourceId) } + .flatMap { profileInfo -> paymentProcessor.addSource(profileInfo.id, sourceId).mapLeft { NotFoundError(it.description) } } } override fun setDefaultSource(subscriberId: String, sourceId: String): Either { @@ -311,7 +310,7 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu { createAndStorePaymentProfile(subscriberId) }, { profileInfo -> Either.right(profileInfo) } ) - .flatMap { profileInfo -> paymentProcessor.setDefaultSource(profileInfo.id, sourceId) } + .flatMap { profileInfo -> paymentProcessor.setDefaultSource(profileInfo.id, sourceId).mapLeft { NotFoundError(it.description) } } } override fun listSources(subscriberId: String): Either> { @@ -320,7 +319,8 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu { createAndStorePaymentProfile(subscriberId) }, { profileInfo -> Either.right(profileInfo) } ) - .flatMap { profileInfo -> paymentProcessor.getSavedSources(profileInfo.id) } + .flatMap { profileInfo -> paymentProcessor.getSavedSources(profileInfo.id).mapLeft { NotFoundError(it.description) } } + } diff --git a/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt b/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt index b05734fe7..08cbc4451 100644 --- a/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt +++ b/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt @@ -8,7 +8,6 @@ import org.ostelco.prime.analytics.AnalyticsService import org.ostelco.prime.analytics.PrimeMetric.REVENUE import org.ostelco.prime.analytics.PrimeMetric.USERS_PAID_AT_LEAST_ONCE import org.ostelco.prime.core.ApiError -import org.ostelco.prime.core.BadGatewayError import org.ostelco.prime.logger import org.ostelco.prime.model.Bundle import org.ostelco.prime.model.Offer @@ -22,6 +21,8 @@ import org.ostelco.prime.module.getResource import org.ostelco.prime.ocs.OcsAdminService import org.ostelco.prime.ocs.OcsSubscriberService import org.ostelco.prime.paymentprocessor.PaymentProcessor +import org.ostelco.prime.paymentprocessor.core.BadGatewayError +import org.ostelco.prime.paymentprocessor.core.PaymentError import org.ostelco.prime.paymentprocessor.core.ProductInfo import org.ostelco.prime.paymentprocessor.core.ProfileInfo import org.ostelco.prime.storage.DocumentStore @@ -330,12 +331,12 @@ object Neo4jStoreSingleton : GraphStore { private val ocs by lazy { getResource() } private val analyticsReporter by lazy { getResource() } - private fun getPaymentProfile(name: String): Either = + private fun getPaymentProfile(name: String): Either = documentStore.getPaymentId(name) ?.let { profileInfoId -> Either.right(ProfileInfo(profileInfoId)) } ?: Either.left(BadGatewayError("Failed to fetch payment customer ID")) - private fun createAndStorePaymentProfile(name: String): Either { + private fun createAndStorePaymentProfile(name: String): Either { return paymentProcessor.createPaymentProfile(name) .flatMap { profileInfo -> setPaymentProfile(name, profileInfo) @@ -343,7 +344,7 @@ object Neo4jStoreSingleton : GraphStore { } } - private fun setPaymentProfile(name: String, profileInfo: ProfileInfo): Either = + private fun setPaymentProfile(name: String, profileInfo: ProfileInfo): Either = Either.cond( test = documentStore.createPaymentId(name, profileInfo.id), ifTrue = { Unit }, @@ -353,11 +354,11 @@ object Neo4jStoreSingleton : GraphStore { subscriberId: String, sku: String, sourceId: String?, - saveCard: Boolean): Either = writeTransaction { + saveCard: Boolean): Either = writeTransaction { val result = getProduct(subscriberId, sku, transaction) // If we can't find the product, return not-found - .mapLeft { org.ostelco.prime.core.NotFoundError("Product unavailable") } + .mapLeft { org.ostelco.prime.paymentprocessor.core.NotFoundError("Product unavailable") } .flatMap { product: Product -> // Fetch/Create stripe payment profile for the subscriber. getPaymentProfile(subscriberId) @@ -431,9 +432,9 @@ object Neo4jStoreSingleton : GraphStore { // Remove the payment source if (!saveCard && savedSourceId != null) { paymentProcessor.removeSource(profileInfo.id, savedSourceId) - .mapLeft { apiError -> + .mapLeft { paymentError -> logger.error("Failed to remove card, for customerId ${profileInfo.id}, sourceId $sourceId") - apiError + paymentError } } } diff --git a/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessor.kt b/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessor.kt index 76837bb6f..46e0d1efc 100644 --- a/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessor.kt +++ b/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessor.kt @@ -7,26 +7,16 @@ import com.stripe.model.Customer import com.stripe.model.Plan import com.stripe.model.Product import com.stripe.model.Subscription -import org.ostelco.prime.core.ApiError -import org.ostelco.prime.core.BadGatewayError -import org.ostelco.prime.core.ForbiddenError -import org.ostelco.prime.core.NotFoundError import org.ostelco.prime.logger -import org.ostelco.prime.paymentprocessor.core.PlanInfo -import org.ostelco.prime.paymentprocessor.core.ProductInfo -import org.ostelco.prime.paymentprocessor.core.ProfileInfo -import org.ostelco.prime.paymentprocessor.core.SourceInfo -import org.ostelco.prime.paymentprocessor.core.SubscriptionInfo import com.stripe.model.Refund -import java.util.HashMap - +import org.ostelco.prime.paymentprocessor.core.* class StripePaymentProcessor : PaymentProcessor { private val logger by logger() - override fun getSavedSources(customerId: String): Either> = + override fun getSavedSources(customerId: String): Either> = either(NotFoundError("Failed to get sources for customer $customerId")) { val sources = mutableListOf() val customer = Customer.retrieve(customerId) @@ -36,13 +26,13 @@ class StripePaymentProcessor : PaymentProcessor { sources } - override fun createPaymentProfile(userEmail: String): Either = + override fun createPaymentProfile(userEmail: String): Either = either(ForbiddenError("Failed to create profile for user $userEmail")) { val customerParams = mapOf("email" to userEmail) ProfileInfo(Customer.create(customerParams).id) } - override fun createPlan(productId: String, amount: Int, currency: String, interval: PaymentProcessor.Interval): Either = + override fun createPlan(productId: String, amount: Int, currency: String, interval: PaymentProcessor.Interval): Either = either(ForbiddenError("Failed to create plan with product id $productId amount $amount currency $currency interval ${interval.value}")) { val planParams = mapOf( "amount" to amount, @@ -52,13 +42,13 @@ class StripePaymentProcessor : PaymentProcessor { PlanInfo(Plan.create(planParams).id) } - override fun removePlan(planId: String): Either = + override fun removePlan(planId: String): Either = either(NotFoundError("Failed to delete plan $planId")) { val plan = Plan.retrieve(planId) PlanInfo(plan.delete().id) } - override fun createProduct(sku: String): Either = + override fun createProduct(sku: String): Either = either(ForbiddenError("Failed to create product with sku $sku")) { val productParams = mapOf( "name" to sku, @@ -66,39 +56,39 @@ class StripePaymentProcessor : PaymentProcessor { ProductInfo(Product.create(productParams).id) } - override fun removeProduct(productId: String): Either = + override fun removeProduct(productId: String): Either = either(NotFoundError("Failed to delete product $productId")) { val product = Product.retrieve(productId) ProductInfo(product.delete().id) } - override fun addSource(customerId: String, sourceId: String): Either = - either(ForbiddenError("Failed to add source $sourceId to customer $customerId")) { + override fun addSource(customerId: String, sourceId: String): Either = + either(NotFoundError("Failed to add source $sourceId to customer $customerId")) { val customer = Customer.retrieve(customerId) val params = mapOf("source" to sourceId) SourceInfo(customer.sources.create(params).id) } - override fun setDefaultSource(customerId: String, sourceId: String): Either = - either(ForbiddenError("Failed to set default source $sourceId for customer $customerId")) { + override fun setDefaultSource(customerId: String, sourceId: String): Either = + either(NotFoundError("Failed to set default source $sourceId for customer $customerId")) { val customer = Customer.retrieve(customerId) val updateParams = mapOf("default_source" to sourceId) val customerUpdated = customer.update(updateParams) SourceInfo(customerUpdated.defaultSource) } - override fun getDefaultSource(customerId: String): Either = + override fun getDefaultSource(customerId: String): Either = either(NotFoundError("Failed to get default source for customer $customerId")) { SourceInfo(Customer.retrieve(customerId).defaultSource) } - override fun deletePaymentProfile(customerId: String): Either = + override fun deletePaymentProfile(customerId: String): Either = either(NotFoundError("Failed to delete customer $customerId")) { val customer = Customer.retrieve(customerId) ProfileInfo(customer.delete().id) } - override fun subscribeToPlan(planId: String, customerId: String): Either = + override fun subscribeToPlan(planId: String, customerId: String): Either = either(ForbiddenError("Failed to subscribe customer $customerId to plan $planId")) { val item = mapOf("plan" to planId) val params = mapOf( @@ -108,15 +98,15 @@ class StripePaymentProcessor : PaymentProcessor { SubscriptionInfo(Subscription.create(params).id) } - override fun cancelSubscription(subscriptionId: String, atIntervalEnd: Boolean): Either = - either(ForbiddenError("Failed to unsubscribe subscription Id : $subscriptionId atIntervalEnd $atIntervalEnd")) { + override fun cancelSubscription(subscriptionId: String, atIntervalEnd: Boolean): Either = + either(NotFoundError("Failed to unsubscribe subscription Id : $subscriptionId atIntervalEnd $atIntervalEnd")) { val subscription = Subscription.retrieve(subscriptionId) val subscriptionParams = mapOf("at_period_end" to atIntervalEnd) SubscriptionInfo(subscription.cancel(subscriptionParams).id) } - override fun authorizeCharge(customerId: String, sourceId: String?, amount: Int, currency: String): Either { + override fun authorizeCharge(customerId: String, sourceId: String?, amount: Int, currency: String): Either { val errorMessage = "Failed to authorize the charge for customerId $customerId sourceId $sourceId amount $amount currency $currency" return either(ForbiddenError(errorMessage)) { val chargeParams = mutableMapOf( @@ -138,9 +128,9 @@ class StripePaymentProcessor : PaymentProcessor { } } - override fun captureCharge(chargeId: String, customerId: String): Either { + override fun captureCharge(chargeId: String, customerId: String): Either { val errorMessage = "Failed to capture charge for customerId $customerId chargeId $chargeId" - return either(ForbiddenError(errorMessage)) { + return either(NotFoundError(errorMessage)) { Charge.retrieve(chargeId) }.flatMap { charge: Charge -> val review = charge.review @@ -160,23 +150,24 @@ class StripePaymentProcessor : PaymentProcessor { } } - override fun refundCharge(chargeId: String): Either = + override fun refundCharge(chargeId: String): Either = either(NotFoundError("Failed to refund charge $chargeId")) { val refundParams = mapOf("charge" to chargeId) Refund.create(refundParams).charge } - override fun removeSource(customerId: String, sourceId: String): Either = + override fun removeSource(customerId: String, sourceId: String): Either = either(ForbiddenError("Failed to remove source $sourceId from customer $customerId")) { Customer.retrieve(customerId).sources.retrieve(sourceId).delete().id } - private fun either(apiError: ApiError, action: () -> RETURN): Either { + private fun either(paymentError: PaymentError, action: () -> RETURN): Either { return try { Either.right(action()) } catch (e: Exception) { - logger.warn(apiError.description, e) - Either.left(apiError) + logger.warn(paymentError.description, e) + Either.left(paymentError) } } } + diff --git a/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/PaymentProcessor.kt b/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/PaymentProcessor.kt index 4948d8ae2..78d27867c 100644 --- a/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/PaymentProcessor.kt +++ b/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/PaymentProcessor.kt @@ -2,11 +2,7 @@ package org.ostelco.prime.paymentprocessor import arrow.core.Either import org.ostelco.prime.core.ApiError -import org.ostelco.prime.paymentprocessor.core.PlanInfo -import org.ostelco.prime.paymentprocessor.core.ProductInfo -import org.ostelco.prime.paymentprocessor.core.ProfileInfo -import org.ostelco.prime.paymentprocessor.core.SourceInfo -import org.ostelco.prime.paymentprocessor.core.SubscriptionInfo +import org.ostelco.prime.paymentprocessor.core.* interface PaymentProcessor { @@ -22,19 +18,19 @@ interface PaymentProcessor { * @param sourceId Stripe source id * @return Stripe sourceId if created */ - fun addSource(customerId: String, sourceId: String): Either + fun addSource(customerId: String, sourceId: String): Either /** * @param userEmail: user email (Prime unique identifier for customer) * @return Stripe customerId if created */ - fun createPaymentProfile(userEmail: String): Either + fun createPaymentProfile(userEmail: String): Either /** * @param customerId Stripe customer id * @return Stripe customerId if deleted */ - fun deletePaymentProfile(customerId: String): Either + fun deletePaymentProfile(customerId: String): Either /** * @param productId Stripe product id @@ -43,58 +39,58 @@ interface PaymentProcessor { * @param interval The frequency with which a subscription should be billed. * @return Stripe planId if created */ - fun createPlan(productId: String, amount: Int, currency: String, interval: Interval): Either + fun createPlan(productId: String, amount: Int, currency: String, interval: Interval): Either /** * @param Stripe Plan Id * @param Stripe Customer Id * @return Stripe SubscriptionId if subscribed */ - fun subscribeToPlan(planId: String, customerId: String): Either + fun subscribeToPlan(planId: String, customerId: String): Either /** * @param Stripe Plan Id * @return Stripe PlanId if deleted */ - fun removePlan(planId: String): Either + fun removePlan(planId: String): Either /** * @param Stripe Subscription Id * @param Stripe atIntervalEnd set to true if the subscription shall remain active until the end of the Plan interval * @return Stripe SubscriptionId if unsubscribed */ - fun cancelSubscription(subscriptionId: String, atIntervalEnd: Boolean = true): Either + fun cancelSubscription(subscriptionId: String, atIntervalEnd: Boolean = true): Either /** * @param sku Prime product SKU * @return Stripe productId if created */ - fun createProduct(sku: String): Either + fun createProduct(sku: String): Either /** * @param productId Stripe product Id * @return Stripe productId if removed */ - fun removeProduct(productId: String): Either + fun removeProduct(productId: String): Either /** * @param customerId Stripe customer id * @return List of Stripe sourceId */ - fun getSavedSources(customerId: String): Either> + fun getSavedSources(customerId: String): Either> /** * @param customerId Stripe customer id * @return Stripe default sourceId */ - fun getDefaultSource(customerId: String): Either + fun getDefaultSource(customerId: String): Either /** * @param customerId Stripe customer id * @param sourceId Stripe source id * @return SourceInfo if created */ - fun setDefaultSource(customerId: String, sourceId: String): Either + fun setDefaultSource(customerId: String, sourceId: String): Either /** * @param customerId Customer id in the payment system @@ -103,26 +99,26 @@ interface PaymentProcessor { * @param currency Three-letter ISO currency code in lowercase * @return id of the charge if authorization was successful */ - fun authorizeCharge(customerId: String, sourceId: String?, amount: Int, currency: String): Either + fun authorizeCharge(customerId: String, sourceId: String?, amount: Int, currency: String): Either /** * @param chargeId ID of the of the authorized charge from authorizeCharge() * @param customerId Customer id in the payment system * @return id of the charge if authorization was successful */ - fun captureCharge(chargeId: String, customerId: String): Either + fun captureCharge(chargeId: String, customerId: String): Either /** * @param chargeId ID of the of the authorized charge to refund from authorizeCharge() * @return id of the charge */ - fun refundCharge(chargeId: String): Either + fun refundCharge(chargeId: String): Either /** * @param customerId Customer id in the payment system * @param sourceId id of the payment source * @return id if removed */ - fun removeSource(customerId: String, sourceId: String): Either + fun removeSource(customerId: String, sourceId: String): Either } \ No newline at end of file diff --git a/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/core/PaymentError.kt b/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/core/PaymentError.kt new file mode 100644 index 000000000..ffc764b0f --- /dev/null +++ b/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/core/PaymentError.kt @@ -0,0 +1,19 @@ +package org.ostelco.prime.paymentprocessor.core + +import javax.ws.rs.core.Response + +sealed class PaymentError(val description: String) { + open var status : Int = 0 +} + +class ForbiddenError(description: String) : PaymentError(description) { + override var status : Int = Response.Status.NOT_FOUND.getStatusCode() +} + +class NotFoundError(description: String) : PaymentError(description) { + override var status : Int = Response.Status.NOT_FOUND.getStatusCode() +} + +class BadGatewayError(description: String) : PaymentError(description) { + override var status : Int = Response.Status.BAD_REQUEST.getStatusCode() +} \ No newline at end of file diff --git a/prime-api/src/main/kotlin/org/ostelco/prime/storage/Variants.kt b/prime-api/src/main/kotlin/org/ostelco/prime/storage/Variants.kt index 06c2bbe0b..c4b0d491d 100644 --- a/prime-api/src/main/kotlin/org/ostelco/prime/storage/Variants.kt +++ b/prime-api/src/main/kotlin/org/ostelco/prime/storage/Variants.kt @@ -1,7 +1,6 @@ package org.ostelco.prime.storage import arrow.core.Either -import org.ostelco.prime.core.ApiError import org.ostelco.prime.model.ApplicationToken import org.ostelco.prime.model.Bundle import org.ostelco.prime.model.Offer @@ -11,6 +10,7 @@ import org.ostelco.prime.model.PurchaseRecord import org.ostelco.prime.model.Segment import org.ostelco.prime.model.Subscriber import org.ostelco.prime.model.Subscription +import org.ostelco.prime.paymentprocessor.core.PaymentError import org.ostelco.prime.paymentprocessor.core.ProductInfo interface ClientDocumentStore { @@ -124,7 +124,7 @@ interface ClientGraphStore { /** * Temporary method to perform purchase as atomic transaction */ - fun purchaseProduct(subscriberId: String, sku: String, sourceId: String?, saveCard: Boolean): Either + fun purchaseProduct(subscriberId: String, sku: String, sourceId: String?, saveCard: Boolean): Either } interface AdminGraphStore { From f9d30c4e94e17f87f1ef86d139b9dfc52f9437f5 Mon Sep 17 00:00:00 2001 From: havard Date: Tue, 11 Sep 2018 10:43:14 +0200 Subject: [PATCH 63/78] feat: update readme about monitoring --- prime/infra/MONITORING.md | 1 + prime/infra/README.md | 23 +++++++++++- prime/infra/dev/monitoring.yaml | 63 +++++++++++++++++++++++---------- 3 files changed, 68 insertions(+), 19 deletions(-) diff --git a/prime/infra/MONITORING.md b/prime/infra/MONITORING.md index fd6e59598..97e54c81b 100644 --- a/prime/infra/MONITORING.md +++ b/prime/infra/MONITORING.md @@ -79,6 +79,7 @@ nodes, endpoints, services, pods Given that they set the required annotations which tells prometheus that they should be scaped, see below: +`add the annotations to the pods, that means the config with type Deployment, StatefulSet, DaemonSet` ```yaml metadata: annotations: diff --git a/prime/infra/README.md b/prime/infra/README.md index 823d161a0..cf8865c30 100644 --- a/prime/infra/README.md +++ b/prime/infra/README.md @@ -269,13 +269,34 @@ gcloud endpoints services deploy prime/infra/dev/prime-client-api.yaml ## Deploy to Dev cluster ### Deploy monitoring -Based on https://github.com/giantswarm/kubernetes-prometheus ```bash kubectl apply -f prime/infra/dev/monitoring.yaml + +# kubectl apply -f prime/infra/dev/monitoring-pushgateway.yaml ``` +#### Prometheus dashboard +```bash +kubectl port-forward --namespace=monitoring $(kubectl get pods --namespace=monitoring | grep prometheus-core | awk '{print $1}') 9090 +``` + +#### Grafana dashboard +__`Has own its own load balancer and can be accessed directly. Discuss if this is OK or find and implement a different way of accessing the grafana dashboard.`__ + +Can be accessed directly from external ip +```bash +kubectl get services --namespace=monitoring | grep grafana | awk '{print $4}' +``` + +#### Push gateway +```bash +# Push a metric to pushgateway:8080 (specified in the service declaration for pushgateway) +kubectl run curl-it --image=radial/busyboxplus:curl -i --tty --rm +echo "some_metric 4.71" | curl -v --data-binary @- http://pushgateway:8080/metrics/job/some_job +``` + ### Setup Neo4j ```bash diff --git a/prime/infra/dev/monitoring.yaml b/prime/infra/dev/monitoring.yaml index cbcf2a979..0bbe6401a 100644 --- a/prime/infra/dev/monitoring.yaml +++ b/prime/infra/dev/monitoring.yaml @@ -1,4 +1,5 @@ -# Derived from ./manifests +# Based on https://github.com/giantswarm/kubernetes-prometheus +# According to Praqma this is an outdated way to setup prometheus stack --- apiVersion: v1 kind: Namespace @@ -230,7 +231,7 @@ data: smtp_auth_password: 'barfoo' # The API URL to use for Slack notifications. - slack_api_url: 'https://hooks.slack.com/services/some/api/token' + slack_api_url: 'https://hooks.slack.com/services/insert-secret-token-here/ # # The directory from which notification templates are read. templates: @@ -309,8 +310,16 @@ spec: name: alertmanager labels: app: alertmanager + triggerUpdate: "3" spec: containers: + - name: watch + image: weaveworks/watch:master-5b2a6e5 + imagePullPolicy: IfNotPresent + args: ["-v", "-t", "-p=/etc/alertmanager", "curl", "-X", "POST", "--fail", "-o", "-", "-sS", "http://alertmanager:9093/-/reload"] + volumeMounts: + - name: config-volume + mountPath: /etc/alertmanager - name: alertmanager image: quay.io/prometheus/alertmanager:v0.7.1 args: @@ -2438,6 +2447,21 @@ data: - source_labels: [__meta_kubernetes_service_name] target_label: kubernetes_name + - job_name: 'kubernetes-cadvisor' + metrics_path: /metrics/cadvisor + #metrics_path: /cadvisor + tls_config: + ca_file: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt + bearer_token_file: /var/run/secrets/kubernetes.io/serviceaccount/token + kubernetes_sd_configs: + - role: node + relabel_configs: + - source_labels: [__address__] + regex: '(.*):10250' + replacement: '${1}:10255' + #replacement: /api/v1/nodes/${1}/proxy/metrics/cadvisor + target_label: __address__ + # https://github.com/prometheus/prometheus/blob/master/documentation/examples/prometheus-kubernetes.yml#L156 - job_name: 'kubernetes-pods' kubernetes_sd_configs: @@ -2480,7 +2504,12 @@ metadata: labels: app: prometheus component: core + triggerUpdate: "2" spec: + strategy: + rollingUpdate: + maxSurge: 1 + type: RollingUpdate replicas: 1 template: metadata: @@ -2491,6 +2520,13 @@ spec: spec: serviceAccountName: prometheus-k8s containers: + - name: watch + image: weaveworks/watch:master-5b2a6e5 + imagePullPolicy: IfNotPresent + args: ["-v", "-t", "-p=/etc/prometheus-rules", "curl", "-X", "POST", "--fail", "-o", "-", "-sS", "--max-time", "3602", "http://prometheus:9090/-/reload"] + volumeMounts: + - name: config-volume + mountPath: /etc/prometheus-rules - name: prometheus image: prom/prometheus:v1.7.0 args: @@ -2723,7 +2759,7 @@ apiVersion: v1 data: cpu-usage.rules: | ALERT NodeCPUUsage - IF (100 - (avg by (instance) (irate(node_cpu{name="node-exporter",mode="idle"}[5m])) * 100)) > 75 + IF (100 - (avg by (instance) (irate(node_cpu{kubernetes_name="prometheus-node-exporter",mode="idle"}[5m])) * 100)) > 75 FOR 2m LABELS { severity="page" @@ -2742,30 +2778,20 @@ data: description = "{{ $labels.instance }} of job {{ $labels.job }} has been down for more than 1 minute.", } low-disk-space.rules: | - ALERT NodeLowRootDisk - IF ((node_filesystem_size{mountpoint="/root-disk"} - node_filesystem_free{mountpoint="/root-disk"} ) / node_filesystem_size{mountpoint="/root-disk"} * 100) > 75 - FOR 2m - LABELS { - severity="page" - } - ANNOTATIONS { - SUMMARY = "{{$labels.instance}}: Low root disk space", - DESCRIPTION = "{{$labels.instance}}: Root disk usage is above 75% (current value is: {{ $value }})" - } - ALERT NodeLowDataDisk - IF ((node_filesystem_size{mountpoint="/data-disk"} - node_filesystem_free{mountpoint="/data-disk"} ) / node_filesystem_size{mountpoint="/data-disk"} * 100) > 75 + ALERT NodeLowDisk + IF ((node_filesystem_size{mountpoint="/"} - node_filesystem_free{mountpoint="/"} ) / node_filesystem_size{mountpoint="/"} * 100) > 75 FOR 2m LABELS { severity="page" } ANNOTATIONS { - SUMMARY = "{{$labels.instance}}: Low data disk space", - DESCRIPTION = "{{$labels.instance}}: Data disk usage is above 75% (current value is: {{ $value }})" + SUMMARY = "{{$labels.instance}}: Low disk space", + DESCRIPTION = "{{$labels.instance}}: Disk usage is above 75% (current value is: {{ $value }})" } mem-usage.rules: | ALERT NodeSwapUsage - IF (((node_memory_SwapTotal-node_memory_SwapFree)/node_memory_SwapTotal)*100) > 75 + IF (((node_memory_SwapTotal-node_memory_SwapFree)/node_memory_SwapTotal)*100) > 7 FOR 2m LABELS { severity="page" @@ -2815,6 +2841,7 @@ rules: - services - endpoints - pods + - nodes/metrics verbs: ["get", "list", "watch"] - apiGroups: [""] resources: From f2cb7060aad2c6e875d2c201f7e9ddaa12877c03 Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Tue, 11 Sep 2018 10:44:25 +0200 Subject: [PATCH 64/78] Rename cronjob deployment file - Also remove the limit in query --- bq-metrics-extractor/README.md | 2 +- bq-metrics-extractor/config/config.yaml | 1 - bq-metrics-extractor/cronjob/{config.yaml => extractor.yaml} | 0 3 files changed, 1 insertion(+), 2 deletions(-) rename bq-metrics-extractor/cronjob/{config.yaml => extractor.yaml} (100%) diff --git a/bq-metrics-extractor/README.md b/bq-metrics-extractor/README.md index 7eece4ec1..ef069b2b1 100644 --- a/bq-metrics-extractor/README.md +++ b/bq-metrics-extractor/README.md @@ -73,7 +73,7 @@ How to build and deploy the cronjob manually ... where foobarbaz is the id of the container built by docker build. ## Then start the cronjob in kubernetes - kubectl apply -f cronjob/config.yaml + kubectl apply -f cronjob/extractor.yaml kubectl describe cronjob bq-metrics-extractor ## To talk to the prometheus in the monitoring namespace & watch the users metrics evolve diff --git a/bq-metrics-extractor/config/config.yaml b/bq-metrics-extractor/config/config.yaml index ab4a5d59c..2b3770e7e 100644 --- a/bq-metrics-extractor/config/config.yaml +++ b/bq-metrics-extractor/config/config.yaml @@ -17,7 +17,6 @@ bqmetrics: sql: > SELECT count(distinct user_pseudo_id) AS count FROM `pantel-2decb.analytics_160712959.events_*` WHERE event_name = "first_open" - LIMIT 1000 - type: summary name: revenue_last24hours help: Revenue for last 24 hours diff --git a/bq-metrics-extractor/cronjob/config.yaml b/bq-metrics-extractor/cronjob/extractor.yaml similarity index 100% rename from bq-metrics-extractor/cronjob/config.yaml rename to bq-metrics-extractor/cronjob/extractor.yaml From f98a9fbf049f1ed88e7f5b54abb95f9175b347c5 Mon Sep 17 00:00:00 2001 From: "Kjell M. Myksvoll" Date: Tue, 11 Sep 2018 11:19:09 +0200 Subject: [PATCH 65/78] Add missing column names to SQL metrics expressions --- bq-metrics-extractor/config/config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bq-metrics-extractor/config/config.yaml b/bq-metrics-extractor/config/config.yaml index 165c47955..fb3b71861 100644 --- a/bq-metrics-extractor/config/config.yaml +++ b/bq-metrics-extractor/config/config.yaml @@ -23,14 +23,14 @@ bqmetrics: help: Number of SIMs that has used data last 24 hours resultColumn: count sql: > - SELECT count(DISTINCT msisdn) FROM `pantel-2decb.data_consumption.raw_consumption` + SELECT count(DISTINCT msisdn) AS count FROM `pantel-2decb.data_consumption.raw_consumption` WHERE timestamp >= TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 1 DAY) - type: summary name: total_data_used help: Total data used last 24 hours resultColumn: count sql: > - SELECT sum(bucketBytes) FROM `pantel-2decb.data_consumption.raw_consumption` + SELECT sum(bucketBytes) AS count FROM `pantel-2decb.data_consumption.raw_consumption` WHERE timestamp >= TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 1 DAY) name: revenue_last24hours help: Revenue for last 24 hours @@ -38,4 +38,4 @@ bqmetrics: sql: > SELECT SUM(product.price.amount) as revenue FROM `pantel-2decb.purchases.raw_purchases` WHERE TIMESTAMP_MILLIS(timestamp) > TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 24 HOUR) - AND TIMESTAMP_MILLIS(timestamp) < CURRENT_TIMESTAMP() \ No newline at end of file + AND TIMESTAMP_MILLIS(timestamp) < CURRENT_TIMESTAMP() From 626ad770809f23dda799d045b212bbf3e0a72660 Mon Sep 17 00:00:00 2001 From: Martin Cederlof Date: Tue, 11 Sep 2018 11:28:11 +0200 Subject: [PATCH 66/78] Fix error code for createAndStorePaymantProfile --- .../org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt index dd6c97f4e..d03a2f92b 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt @@ -190,9 +190,9 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu private fun createAndStorePaymentProfile(name: String): Either { return paymentProcessor.createPaymentProfile(name) + .mapLeft { ForbiddenError(it.description) } .flatMap { profileInfo -> - setPaymentProfile(name, profileInfo) - .map { profileInfo } + setPaymentProfile(name, profileInfo).map { profileInfo } } } From 9c9ee87191b47dbc5231def6400fc1e6ca3bdc1d Mon Sep 17 00:00:00 2001 From: Martin Cederlof Date: Tue, 11 Sep 2018 11:46:43 +0200 Subject: [PATCH 67/78] keep original format --- .../org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt index d03a2f92b..6e617bfa6 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt @@ -192,7 +192,8 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu return paymentProcessor.createPaymentProfile(name) .mapLeft { ForbiddenError(it.description) } .flatMap { profileInfo -> - setPaymentProfile(name, profileInfo).map { profileInfo } + setPaymentProfile(name, profileInfo) + .map { profileInfo } } } From 0301fe4724b8d4f2fd1445023b82dacff9bb424c Mon Sep 17 00:00:00 2001 From: Martin Cederlof Date: Tue, 11 Sep 2018 11:53:26 +0200 Subject: [PATCH 68/78] Fix status code --- .../org/ostelco/prime/paymentprocessor/core/PaymentError.kt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/core/PaymentError.kt b/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/core/PaymentError.kt index ffc764b0f..5267345ab 100644 --- a/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/core/PaymentError.kt +++ b/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/core/PaymentError.kt @@ -7,13 +7,13 @@ sealed class PaymentError(val description: String) { } class ForbiddenError(description: String) : PaymentError(description) { - override var status : Int = Response.Status.NOT_FOUND.getStatusCode() + override var status : Int = Response.Status.FORBIDDEN.statusCode } class NotFoundError(description: String) : PaymentError(description) { - override var status : Int = Response.Status.NOT_FOUND.getStatusCode() + override var status : Int = Response.Status.NOT_FOUND.statusCode } class BadGatewayError(description: String) : PaymentError(description) { - override var status : Int = Response.Status.BAD_REQUEST.getStatusCode() + override var status : Int = Response.Status.BAD_REQUEST.statusCode } \ No newline at end of file From a53b6b692e77cf06fda433b49c98bb38e8809376 Mon Sep 17 00:00:00 2001 From: Martin Cederlof Date: Tue, 11 Sep 2018 13:21:34 +0200 Subject: [PATCH 69/78] Store Stripe error messages in PaymentError --- .../ostelco/prime/paymentprocessor/StripePaymentProcessor.kt | 3 ++- .../org/ostelco/prime/paymentprocessor/core/PaymentError.kt | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessor.kt b/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessor.kt index 46e0d1efc..60fbc9c31 100644 --- a/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessor.kt +++ b/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessor.kt @@ -17,7 +17,7 @@ class StripePaymentProcessor : PaymentProcessor { private val logger by logger() override fun getSavedSources(customerId: String): Either> = - either(NotFoundError("Failed to get sources for customer $customerId")) { + either(NotFoundError("Failed to retrieve sources for customer $customerId")) { val sources = mutableListOf() val customer = Customer.retrieve(customerId) customer.sources.data.forEach { @@ -165,6 +165,7 @@ class StripePaymentProcessor : PaymentProcessor { return try { Either.right(action()) } catch (e: Exception) { + paymentError.externalErrorMessage = e.message logger.warn(paymentError.description, e) Either.left(paymentError) } diff --git a/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/core/PaymentError.kt b/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/core/PaymentError.kt index 5267345ab..c57ca30e6 100644 --- a/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/core/PaymentError.kt +++ b/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/core/PaymentError.kt @@ -4,6 +4,7 @@ import javax.ws.rs.core.Response sealed class PaymentError(val description: String) { open var status : Int = 0 + var externalErrorMessage : String? = "" } class ForbiddenError(description: String) : PaymentError(description) { From 5d628df915619a3d7caee1adfc99b2f50198962f Mon Sep 17 00:00:00 2001 From: havard Date: Tue, 11 Sep 2018 13:33:57 +0200 Subject: [PATCH 70/78] feat: add section in deployment of monitoring if creation of clusterroles fails --- prime/infra/README.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/prime/infra/README.md b/prime/infra/README.md index cf8865c30..6c8208b21 100644 --- a/prime/infra/README.md +++ b/prime/infra/README.md @@ -273,6 +273,10 @@ gcloud endpoints services deploy prime/infra/dev/prime-client-api.yaml ```bash kubectl apply -f prime/infra/dev/monitoring.yaml +# If the above command fails on creating clusterroles / clusterbindings you need to add a role to the user you are using to deploy +# You can read more about it here https://github.com/coreos/prometheus-operator/issues/357 +kubectl create clusterrolebinding cluster-admin-binding --clusterrole cluster-admin --user $(gcloud config get-value account) + # kubectl apply -f prime/infra/dev/monitoring-pushgateway.yaml ``` @@ -337,4 +341,4 @@ logName="projects/pantel-2decb/logs/prime" ## Connect using Neo4j Browser -Check [docs/NEO4J.md](../docs/NEO4J.md) \ No newline at end of file +Check [docs/NEO4J.md](../docs/NEO4J.md) From 6e130107d2feb5252939f2972f9502e6ea47380c Mon Sep 17 00:00:00 2001 From: Martin Cederlof Date: Tue, 11 Sep 2018 13:51:45 +0200 Subject: [PATCH 71/78] init externalErrorMessage to null --- .../org/ostelco/prime/paymentprocessor/core/PaymentError.kt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/core/PaymentError.kt b/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/core/PaymentError.kt index c57ca30e6..42b2f3945 100644 --- a/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/core/PaymentError.kt +++ b/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/core/PaymentError.kt @@ -4,7 +4,7 @@ import javax.ws.rs.core.Response sealed class PaymentError(val description: String) { open var status : Int = 0 - var externalErrorMessage : String? = "" + var externalErrorMessage : String? = null } class ForbiddenError(description: String) : PaymentError(description) { From b81196c07b9846e23ba606303709976128f4c3a3 Mon Sep 17 00:00:00 2001 From: havard Date: Tue, 11 Sep 2018 14:21:11 +0200 Subject: [PATCH 72/78] feat: remove alertmanager from monitoring setup --- prime/infra/dev/monitoring.yaml | 360 -------------------------------- 1 file changed, 360 deletions(-) diff --git a/prime/infra/dev/monitoring.yaml b/prime/infra/dev/monitoring.yaml index 0bbe6401a..12376307d 100644 --- a/prime/infra/dev/monitoring.yaml +++ b/prime/infra/dev/monitoring.yaml @@ -6,365 +6,6 @@ kind: Namespace metadata: name: monitoring --- -apiVersion: v1 -data: - default.tmpl: | - {{ define "__alertmanager" }}AlertManager{{ end }} - {{ define "__alertmanagerURL" }}{{ .ExternalURL }}/#/alerts?receiver={{ .Receiver }}{{ end }} - - {{ define "__subject" }}[{{ .Status | toUpper }}{{ if eq .Status "firing" }}:{{ .Alerts.Firing | len }}{{ end }}] {{ .GroupLabels.SortedPairs.Values | join " " }} {{ if gt (len .CommonLabels) (len .GroupLabels) }}({{ with .CommonLabels.Remove .GroupLabels.Names }}{{ .Values | join " " }}{{ end }}){{ end }}{{ end }} - {{ define "__description" }}{{ end }} - - {{ define "__text_alert_list" }}{{ range . }}Labels: - {{ range .Labels.SortedPairs }} - {{ .Name }} = {{ .Value }} - {{ end }}Annotations: - {{ range .Annotations.SortedPairs }} - {{ .Name }} = {{ .Value }} - {{ end }}Source: {{ .GeneratorURL }} - {{ end }}{{ end }} - - - {{ define "slack.default.title" }}{{ template "__subject" . }}{{ end }} - {{ define "slack.default.username" }}{{ template "__alertmanager" . }}{{ end }} - {{ define "slack.default.fallback" }}{{ template "slack.default.title" . }} | {{ template "slack.default.titlelink" . }}{{ end }} - {{ define "slack.default.pretext" }}{{ end }} - {{ define "slack.default.titlelink" }}{{ template "__alertmanagerURL" . }}{{ end }} - {{ define "slack.default.iconemoji" }}{{ end }} - {{ define "slack.default.iconurl" }}{{ end }} - {{ define "slack.default.text" }}{{ end }} - - - {{ define "hipchat.default.from" }}{{ template "__alertmanager" . }}{{ end }} - {{ define "hipchat.default.message" }}{{ template "__subject" . }}{{ end }} - - - {{ define "pagerduty.default.description" }}{{ template "__subject" . }}{{ end }} - {{ define "pagerduty.default.client" }}{{ template "__alertmanager" . }}{{ end }} - {{ define "pagerduty.default.clientURL" }}{{ template "__alertmanagerURL" . }}{{ end }} - {{ define "pagerduty.default.instances" }}{{ template "__text_alert_list" . }}{{ end }} - - - {{ define "opsgenie.default.message" }}{{ template "__subject" . }}{{ end }} - {{ define "opsgenie.default.description" }}{{ .CommonAnnotations.SortedPairs.Values | join " " }} - {{ if gt (len .Alerts.Firing) 0 -}} - Alerts Firing: - {{ template "__text_alert_list" .Alerts.Firing }} - {{- end }} - {{ if gt (len .Alerts.Resolved) 0 -}} - Alerts Resolved: - {{ template "__text_alert_list" .Alerts.Resolved }} - {{- end }} - {{- end }} - {{ define "opsgenie.default.source" }}{{ template "__alertmanagerURL" . }}{{ end }} - - - {{ define "victorops.default.message" }}{{ template "__subject" . }} | {{ template "__alertmanagerURL" . }}{{ end }} - {{ define "victorops.default.from" }}{{ template "__alertmanager" . }}{{ end }} - - - {{ define "email.default.subject" }}{{ template "__subject" . }}{{ end }} - {{ define "email.default.html" }} - - - - - - - {{ template "__subject" . }} - - - - - - - - - - - -
-
- - - - - - - -
- {{ .Alerts | len }} alert{{ if gt (len .Alerts) 1 }}s{{ end }} for {{ range .GroupLabels.SortedPairs }} - {{ .Name }}={{ .Value }} - {{ end }} -
- - - - - {{ if gt (len .Alerts.Firing) 0 }} - - - - {{ end }} - {{ range .Alerts.Firing }} - - - - {{ end }} - - {{ if gt (len .Alerts.Resolved) 0 }} - {{ if gt (len .Alerts.Firing) 0 }} - - - - {{ end }} - - - - {{ end }} - {{ range .Alerts.Resolved }} - - - - {{ end }} -
- View in {{ template "__alertmanager" . }} -
- [{{ .Alerts.Firing | len }}] Firing -
- Labels
- {{ range .Labels.SortedPairs }}{{ .Name }} = {{ .Value }}
{{ end }} - {{ if gt (len .Annotations) 0 }}Annotations
{{ end }} - {{ range .Annotations.SortedPairs }}{{ .Name }} = {{ .Value }}
{{ end }} - Source
-
-
-
-
-
- [{{ .Alerts.Resolved | len }}] Resolved -
- Labels
- {{ range .Labels.SortedPairs }}{{ .Name }} = {{ .Value }}
{{ end }} - {{ if gt (len .Annotations) 0 }}Annotations
{{ end }} - {{ range .Annotations.SortedPairs }}{{ .Name }} = {{ .Value }}
{{ end }} - Source
-
-
- -
-
- - - - - {{ end }} - - {{ define "pushover.default.title" }}{{ template "__subject" . }}{{ end }} - {{ define "pushover.default.message" }}{{ .CommonAnnotations.SortedPairs.Values | join " " }} - {{ if gt (len .Alerts.Firing) 0 }} - Alerts Firing: - {{ template "__text_alert_list" .Alerts.Firing }} - {{ end }} - {{ if gt (len .Alerts.Resolved) 0 }} - Alerts Resolved: - {{ template "__text_alert_list" .Alerts.Resolved }} - {{ end }} - {{ end }} - {{ define "pushover.default.url" }}{{ template "__alertmanagerURL" . }}{{ end }} - slack.tmpl: | - {{ define "slack.devops.text" }} - {{range .Alerts}}{{.Annotations.DESCRIPTION}} - {{end}} - {{ end }} -kind: ConfigMap -metadata: - creationTimestamp: null - name: alertmanager-templates - namespace: monitoring ---- -kind: ConfigMap -apiVersion: v1 -metadata: - name: alertmanager - namespace: monitoring -data: - config.yml: |- - global: - # ResolveTimeout is the time after which an alert is declared resolved - # if it has not been updated. - resolve_timeout: 5m - - # The smarthost and SMTP sender used for mail notifications. - smtp_smarthost: 'smtp.gmail.com:587' - smtp_from: 'foo@bar.com' - smtp_auth_username: 'foo@bar.com' - smtp_auth_password: 'barfoo' - - # The API URL to use for Slack notifications. - slack_api_url: 'https://hooks.slack.com/services/insert-secret-token-here/ - - # # The directory from which notification templates are read. - templates: - - '/etc/alertmanager-templates/*.tmpl' - - # The root route on which each incoming alert enters. - route: - - # The labels by which incoming alerts are grouped together. For example, - # multiple alerts coming in for cluster=A and alertname=LatencyHigh would - # be batched into a single group. - - group_by: ['alertname', 'cluster', 'service'] - - # When a new group of alerts is created by an incoming alert, wait at - # least 'group_wait' to send the initial notification. - # This way ensures that you get multiple alerts for the same group that start - # firing shortly after another are batched together on the first - # notification. - - group_wait: 30s - - # When the first notification was sent, wait 'group_interval' to send a batch - # of new alerts that started firing for that group. - - group_interval: 5m - - # If an alert has successfully been sent, wait 'repeat_interval' to - # resend them. - - #repeat_interval: 1m - repeat_interval: 15m - - # A default receiver - - # If an alert isn't caught by a route, send it to default. - receiver: default - - # All the above attributes are inherited by all child routes and can - # overwritten on each. - - # The child route trees. - routes: - # Send severity=slack alerts to slack. - - match: - severity: slack - receiver: slack_alert - # - match: - # severity: email - # receiver: email_alert - - receivers: - - name: 'default' - slack_configs: - - channel: '#alertmanager-test' - text: '{{ template "slack.devops.text" . }}' - send_resolved: true - - - name: 'slack_alert' - slack_configs: - - channel: '#alertmanager-test' - send_resolved: true ---- -apiVersion: extensions/v1beta1 -kind: Deployment -metadata: - name: alertmanager - namespace: monitoring -spec: - replicas: 1 - selector: - matchLabels: - app: alertmanager - template: - metadata: - name: alertmanager - labels: - app: alertmanager - triggerUpdate: "3" - spec: - containers: - - name: watch - image: weaveworks/watch:master-5b2a6e5 - imagePullPolicy: IfNotPresent - args: ["-v", "-t", "-p=/etc/alertmanager", "curl", "-X", "POST", "--fail", "-o", "-", "-sS", "http://alertmanager:9093/-/reload"] - volumeMounts: - - name: config-volume - mountPath: /etc/alertmanager - - name: alertmanager - image: quay.io/prometheus/alertmanager:v0.7.1 - args: - - '-config.file=/etc/alertmanager/config.yml' - - '-storage.path=/alertmanager' - ports: - - name: alertmanager - containerPort: 9093 - volumeMounts: - - name: config-volume - mountPath: /etc/alertmanager - - name: templates-volume - mountPath: /etc/alertmanager-templates - - name: alertmanager - mountPath: /alertmanager - volumes: - - name: config-volume - configMap: - name: alertmanager - - name: templates-volume - configMap: - name: alertmanager-templates - - name: alertmanager - emptyDir: {} ---- -apiVersion: v1 -kind: Service -metadata: - annotations: - prometheus.io/scrape: 'true' - prometheus.io/path: '/metrics' - labels: - name: alertmanager - name: alertmanager - namespace: monitoring -spec: - selector: - app: alertmanager - type: NodePort - ports: - - name: alertmanager - protocol: TCP - port: 9093 - targetPort: 9093 ---- apiVersion: extensions/v1beta1 kind: Deployment metadata: @@ -2533,7 +2174,6 @@ spec: - '-storage.local.retention=12h' - '-storage.local.memory-chunks=500000' - '-config.file=/etc/prometheus/prometheus.yaml' - - '-alertmanager.url=http://alertmanager:9093/' ports: - name: webui containerPort: 9090 From 4c6bcca9ed836e5bc983e698a9cef23d399a94df Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B8rn=20Remseth?= Date: Tue, 11 Sep 2018 15:09:11 +0200 Subject: [PATCH 73/78] Simple build and upload script, if the cron-job is running it will pick up the newest image the next time it runs. It's magic. --- .../script/build-and-upload-docker-image.sh | 49 +++++++++++++++++++ 1 file changed, 49 insertions(+) create mode 100755 bq-metrics-extractor/script/build-and-upload-docker-image.sh diff --git a/bq-metrics-extractor/script/build-and-upload-docker-image.sh b/bq-metrics-extractor/script/build-and-upload-docker-image.sh new file mode 100755 index 000000000..1413ac5a3 --- /dev/null +++ b/bq-metrics-extractor/script/build-and-upload-docker-image.sh @@ -0,0 +1,49 @@ +#!/bin/sh + +## +## Build a new jar file, then a new docker image, then +## upload the docker image to a google docker +## repository. +## + + +# Exit on failure +set -e + +# Check for dependencies +DEPENDENCIES="gradle docker gcloud" +for dep in $DEPENDENCIES ; do + if [[ -z "$(type $dep)" ]] ; then + echo "Could not find dependency $dep, bailing out" + exit 1 + fi +done + +# Set destination + +GCLOUD_PROJECT_NAME="pantel-2decb" +CONTAINER_NAME="bq-metrics-extractor" +GCLOUD_REPO_NAME="eu.gcr.io" + + + +# Log into the appropriate google account and prepare to build&upload +# XXX Couldn't figure out how to make this work well in a script, but +# that should be solved, therefore I'm keeping the dead code instead +# of doing the right thing according to the project coding standard +# and killing it off. +# gcloud auth login +# gcloud auth configure-docker + +# Build the java .jar application from sources +gradle build + +# Build the docker container +CONTAINER_ID=$(docker build . | grep "Successfully built" | awk '{print $3}') +echo "Built container $CONTAINER_ID" + +# Tag and push the docker container to the google repo +echo "Tagging and pushing container" +THE_TAG="${GCLOUD_REPO_NAME}/${GCLOUD_PROJECT_NAME}/${CONTAINER_NAME}" +docker tag $CONTAINER_ID ${THE_TAG} +docker push ${THE_TAG} From 930a69e15aff3606af4132611dae77c594e022f0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B8rn=20Remseth?= Date: Tue, 11 Sep 2018 15:09:40 +0200 Subject: [PATCH 74/78] Adding curleys for consistency --- bq-metrics-extractor/script/build-and-upload-docker-image.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bq-metrics-extractor/script/build-and-upload-docker-image.sh b/bq-metrics-extractor/script/build-and-upload-docker-image.sh index 1413ac5a3..f7844af69 100755 --- a/bq-metrics-extractor/script/build-and-upload-docker-image.sh +++ b/bq-metrics-extractor/script/build-and-upload-docker-image.sh @@ -45,5 +45,5 @@ echo "Built container $CONTAINER_ID" # Tag and push the docker container to the google repo echo "Tagging and pushing container" THE_TAG="${GCLOUD_REPO_NAME}/${GCLOUD_PROJECT_NAME}/${CONTAINER_NAME}" -docker tag $CONTAINER_ID ${THE_TAG} +docker tag ${CONTAINER_ID} ${THE_TAG} docker push ${THE_TAG} From b78af4b40fd6ddbf5a18bdefd5511e66b116c5a0 Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Tue, 11 Sep 2018 20:07:00 +0200 Subject: [PATCH 75/78] Add Gauge metric type --- bq-metrics-extractor/config/config.yaml | 5 +- .../BqMetricsExtractorApplication.kt | 55 ++++++++++++++----- 2 files changed, 44 insertions(+), 16 deletions(-) diff --git a/bq-metrics-extractor/config/config.yaml b/bq-metrics-extractor/config/config.yaml index d167492ff..d7f6947d7 100644 --- a/bq-metrics-extractor/config/config.yaml +++ b/bq-metrics-extractor/config/config.yaml @@ -17,20 +17,21 @@ bqmetrics: sql: > SELECT count(distinct user_pseudo_id) AS count FROM `pantel-2decb.analytics_160712959.events_*` WHERE event_name = "first_open" - - type: summary + - type: gauge name: sims_who_have_used_data help: Number of SIMs that has used data last 24 hours resultColumn: count sql: > SELECT count(DISTINCT msisdn) AS count FROM `pantel-2decb.data_consumption.raw_consumption` WHERE timestamp >= TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 1 DAY) - - type: summary + - type: gauge name: total_data_used help: Total data used last 24 hours resultColumn: count sql: > SELECT sum(bucketBytes) AS count FROM `pantel-2decb.data_consumption.raw_consumption` WHERE timestamp >= TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 1 DAY) + - type: gauge name: revenue_last24hours help: Revenue for last 24 hours resultColumn: revenue diff --git a/bq-metrics-extractor/src/main/java/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt b/bq-metrics-extractor/src/main/java/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt index 11454bc09..f43705d63 100644 --- a/bq-metrics-extractor/src/main/java/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt +++ b/bq-metrics-extractor/src/main/java/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt @@ -10,6 +10,7 @@ import io.prometheus.client.exporter.PushGateway import io.prometheus.client.CollectorRegistry import io.dropwizard.Configuration import io.dropwizard.cli.ConfiguredCommand +import io.prometheus.client.Gauge import io.prometheus.client.Summary import net.sourceforge.argparse4j.inf.Namespace import net.sourceforge.argparse4j.inf.Subparser @@ -141,18 +142,8 @@ private class BqMetricsExtractorApplication : Application { + metricSources.add(GaugeMetricBuilder( + it.name, + it.help, + it.sql, + it.resultColumn)) + } else -> { log.error("Unknown metrics type '${it.type}'") } From 68360da6a20ebaccd3fadfa4e0bbf357327ced13 Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Wed, 12 Sep 2018 11:21:20 +0200 Subject: [PATCH 76/78] Add paid users count for last 24 hopurs --- bq-metrics-extractor/config/config.yaml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/bq-metrics-extractor/config/config.yaml b/bq-metrics-extractor/config/config.yaml index d7f6947d7..69a3b60d1 100644 --- a/bq-metrics-extractor/config/config.yaml +++ b/bq-metrics-extractor/config/config.yaml @@ -38,4 +38,10 @@ bqmetrics: sql: > SELECT SUM(product.price.amount) as revenue FROM `pantel-2decb.purchases.raw_purchases` WHERE TIMESTAMP_MILLIS(timestamp) > TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 24 HOUR) - AND TIMESTAMP_MILLIS(timestamp) < CURRENT_TIMESTAMP() + - type: gauge + name: total_paid_users + help: Number of users who have purchased in last 24 hours + resultColumn: count + sql: > + SELECT COUNT(DISTINCT subscriberId) as count FROM `pantel-2decb.purchases.raw_purchases` + WHERE TIMESTAMP_MILLIS(timestamp) > TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 24 HOUR) From 41def6d59d58a6e108ca82684e793eda62c96a6a Mon Sep 17 00:00:00 2001 From: "Kjell M. Myksvoll" Date: Wed, 12 Sep 2018 14:18:51 +0200 Subject: [PATCH 77/78] Adds more details about each souce/account/ to the 'list payment sources' client API --- acceptance-tests/script/wait.sh | 0 .../kotlin/org/ostelco/at/jersey/Tests.kt | 6 +++ .../kotlin/org/ostelco/at/okhttp/Tests.kt | 6 +++ .../StripePaymentProcessor.kt | 40 +++++++++++++++---- .../prime/paymentprocessor/core/Model.kt | 2 +- prime/infra/dev/prime-client-api.yaml | 35 ++++++++++++++++ 6 files changed, 81 insertions(+), 8 deletions(-) mode change 100755 => 100644 acceptance-tests/script/wait.sh diff --git a/acceptance-tests/script/wait.sh b/acceptance-tests/script/wait.sh old mode 100755 new mode 100644 diff --git a/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt b/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt index 3d0f96c4a..12b6a676c 100644 --- a/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt +++ b/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt @@ -296,6 +296,12 @@ class SourceTest { assert(sources.isNotEmpty()) { "Expected at least one payment source for profile $email" } assert(sources.map{ it.id }.containsAll(listOf(cardId, newCardId))) { "Expected to find both $cardId and $newCardId in list of sources for profile $email" } + + sources.forEach { + assert(it.details.id.isNotEmpty()) { "Expected 'id' to be set in source account details for profile $email" } + assertEquals("card", it.details.accountType, + "Unexpected source account type ${it.details.accountType} for profile $email") + } } @Test diff --git a/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt b/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt index dcf3b45b6..7a9e99ed3 100644 --- a/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt +++ b/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt @@ -255,6 +255,12 @@ class SourceTest { assert(sources.isNotEmpty()) { "Expected at least one payment source for profile $email" } assert(sources.map{ it.id }.containsAll(listOf(cardId, newCardId))) { "Expected to find both $cardId and $newCardId in list of sources for profile $email" } + + sources.forEach { + assert(it.details.id.isNotEmpty()) { "Expected 'id' to be set in source account details for profile $email" } + assertEquals("card", it.details.accountType, + "Unexpected source account type ${it.details.accountType} for profile $email") + } } @Test diff --git a/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessor.kt b/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessor.kt index 60fbc9c31..ca34b6aef 100644 --- a/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessor.kt +++ b/payment-processor/src/main/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessor.kt @@ -2,13 +2,8 @@ package org.ostelco.prime.paymentprocessor import arrow.core.Either import arrow.core.flatMap -import com.stripe.model.Charge -import com.stripe.model.Customer -import com.stripe.model.Plan -import com.stripe.model.Product -import com.stripe.model.Subscription import org.ostelco.prime.logger -import com.stripe.model.Refund +import com.stripe.model.* import org.ostelco.prime.paymentprocessor.core.* @@ -21,11 +16,42 @@ class StripePaymentProcessor : PaymentProcessor { val sources = mutableListOf() val customer = Customer.retrieve(customerId) customer.sources.data.forEach { - sources.add(SourceInfo(it.id)) + sources.add(SourceInfo(it.id, getAccountDetails(it))) } sources } + /* Returns detailed 'account details' for the given Stripe source/account. + Note that the fields 'id' and 'accountType' are manadatory. */ + private fun getAccountDetails(accountInfo: ExternalAccount) : Map { + when (accountInfo) { + is Card -> { + return mapOf("id" to accountInfo.id, + "accountType" to "card", + "addressLine1" to accountInfo.addressLine1, + "addressLine2" to accountInfo.addressLine2, + "zip" to accountInfo.addressZip, + "city" to accountInfo.addressCity, + "state" to accountInfo.addressState, + "country" to accountInfo.country, + "currency" to accountInfo.currency, + "brand" to accountInfo.brand, // "Visa", "Mastercard" etc. + "last4" to accountInfo.last4, + "expireMonth" to accountInfo.expMonth, + "expireYear" to accountInfo.expYear, + "funding" to accountInfo.funding) // Typ.: "credit" or "debit" + .filterValues { it != null } // Unfortunately the 'swagger' def. will removed fields back again. + } + // To add support for other Stripe source/account types, see + // https://stripe.com/docs/api/java#sources + else -> { + logger.error("Received unsupported Stripe source/account type: {}", accountInfo) + return mapOf("id" to accountInfo.id, + "accountType" to "unsupported") + } + } + } + override fun createPaymentProfile(userEmail: String): Either = either(ForbiddenError("Failed to create profile for user $userEmail")) { val customerParams = mapOf("email" to userEmail) diff --git a/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/core/Model.kt b/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/core/Model.kt index e890b127f..c87fc14cf 100644 --- a/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/core/Model.kt +++ b/prime-api/src/main/kotlin/org/ostelco/prime/paymentprocessor/core/Model.kt @@ -6,6 +6,6 @@ class ProductInfo(val id: String) class ProfileInfo(val id: String) -class SourceInfo(val id: String) +class SourceInfo(val id: String, val details: Map? = null) class SubscriptionInfo(val id: String) diff --git a/prime/infra/dev/prime-client-api.yaml b/prime/infra/dev/prime-client-api.yaml index d8eae28df..eea0a83a7 100644 --- a/prime/infra/dev/prime-client-api.yaml +++ b/prime/infra/dev/prime-client-api.yaml @@ -442,6 +442,41 @@ definitions: id: description: "The identifier for the source" type: string + details: + description: "All information stored with the source" + type: object + properties: + id: + type: string + accountType: + type: string + addressLine1: + type: string + addressLine2: + type: string + zip: + type: string + city: + type: string + state: + type: string + country: + type: string + currency: + type: string + brand: + type: string + last4: + type: string + expireMonth: + type: integer + expireYear: + type: integer + funding: + type: string + required: + - id + - accountType ConsentList: type: array items: From c5723d1c7b6ec00ae0f2dd988172ed63eb72cb3e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B8rn=20Remseth?= Date: Wed, 12 Sep 2018 18:02:15 +0200 Subject: [PATCH 78/78] Features/reduce image size of bq extractor (#279) * Change from openjdk to alpine, reduce image size from 600 something to 40 something mb * Make a smaller, quicker java image * Copy from the test dockerfile to the prod dockerfile * Fixing typo --- bq-metrics-extractor/Dockerfile | 24 ++++++++++++++++--- bq-metrics-extractor/Dockerfile.test | 24 ++++++++++++++++--- bq-metrics-extractor/script/start.sh | 5 ++-- .../BqMetricsExtractorApplication.kt | 10 ++++++++ 4 files changed, 55 insertions(+), 8 deletions(-) diff --git a/bq-metrics-extractor/Dockerfile b/bq-metrics-extractor/Dockerfile index 3d0e959a8..3eefa70c0 100644 --- a/bq-metrics-extractor/Dockerfile +++ b/bq-metrics-extractor/Dockerfile @@ -1,10 +1,28 @@ -FROM openjdk:8u171 +FROM alpine:3.7 MAINTAINER CSI "csi@telenordigital.com" +# +# Copy the files we need +# + COPY script/start.sh /start.sh COPY config/config.yaml /config/config.yaml - COPY build/libs/bq-metrics-extractor-uber.jar /bq-metrics-extractor.jar -CMD ["/start.sh"] +# +# Load, then dump the standard java classes into the +# image being built, to speed up java load time +# using Class Data Sharing. The "quit" command will +# simply quit the program after it's dumped the list of +# classes that should be cached. +# + +CMD ["java", "-Dfile.encoding=UTF-8", "-Xshare:on", "-Xshare:dump", "-jar", "/bq-metrics-extractor.jar", "quit", "config/config.yaml"] + + +# +# Finally the actual entry point +# + +ENTRYPOINT ["/start.sh"] diff --git a/bq-metrics-extractor/Dockerfile.test b/bq-metrics-extractor/Dockerfile.test index 22568bb72..312aae869 100644 --- a/bq-metrics-extractor/Dockerfile.test +++ b/bq-metrics-extractor/Dockerfile.test @@ -1,11 +1,29 @@ -FROM openjdk:8u171 +FROM alpine:3.7 MAINTAINER CSI "csi@telenordigital.com" +# +# Copy the files we need +# + COPY script/start.sh /start.sh COPY config/pantel-prod.json /secret/pantel-prod.json COPY config/config.yaml /config/config.yaml - COPY build/libs/bq-metrics-extractor-uber.jar /bq-metrics-extractor.jar -CMD ["/start.sh"] +# +# Load, then dump the standard java classes into the +# image being built, to speed up java load time +# using Class Data Sharing. The "quit" command will +# simply quit the program after it's dumped the list of +# classes that should be cached. +# + +CMD ["java", "-Dfile.encoding=UTF-8", "-Xshare:on", "-Xshare:dump", "-jar", "/bq-metrics-extractor.jar", "quit", "config/config.yaml"] + + +# +# Finally the actual entry point +# + +ENTRYPOINT ["/start.sh"] diff --git a/bq-metrics-extractor/script/start.sh b/bq-metrics-extractor/script/start.sh index 250e6c3b3..6081dbe61 100755 --- a/bq-metrics-extractor/script/start.sh +++ b/bq-metrics-extractor/script/start.sh @@ -2,5 +2,6 @@ # Start app exec java \ - -Dfile.encoding=UTF-8 \ - -jar /bq-metrics-extractor.jar query --pushgateway pushgateway:8080 config/config.yaml + -Dfile.encoding=UTF-8 \ + -Xshare:on \ + -jar /bq-metrics-extractor.jar query --pushgateway pushgateway:8080 config/config.yaml diff --git a/bq-metrics-extractor/src/main/java/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt b/bq-metrics-extractor/src/main/java/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt index f43705d63..2f821ac3e 100644 --- a/bq-metrics-extractor/src/main/java/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt +++ b/bq-metrics-extractor/src/main/java/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt @@ -310,4 +310,14 @@ private class CollectAndPushMetrics : ConfiguredCommand( + "quit", + "Do nothing, only used to prime caches") { + override fun run(bootstrap: Bootstrap?, + namespace: Namespace?, + configuration: BqMetricsExtractorConfig?) { + // Doing nothing, as advertised. + } + } }