From c73801755bea899810c16e2ed42694092ff7f4a9 Mon Sep 17 00:00:00 2001 From: Vihang Patil Date: Sun, 30 Sep 2018 00:24:59 +0200 Subject: [PATCH 01/93] Fixed missing request logging --- .circleci/config.yml | 5 ++-- .../analytics/requestlogs/URIFilterFactory.kt | 27 +++++++++++++++++++ ...io.dropwizard.logging.filter.FilterFactory | 1 + prime/build.gradle | 2 +- prime/config/config.yaml | 7 ++--- 5 files changed, 35 insertions(+), 7 deletions(-) create mode 100644 analytics-module/src/main/kotlin/org/ostelco/prime/analytics/requestlogs/URIFilterFactory.kt create mode 100644 analytics-module/src/main/resources/META-INF/services/io.dropwizard.logging.filter.FilterFactory diff --git a/.circleci/config.yml b/.circleci/config.yml index 68374faf5..e51df5f5b 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -103,9 +103,8 @@ jobs: ### JOBS FOR on-PR-merge-to-dev PIPELINE build-code: - - docker: - - image: circleci/openjdk:11-jdk-sid + machine: + enabled: true steps: - checkout diff --git a/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/requestlogs/URIFilterFactory.kt b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/requestlogs/URIFilterFactory.kt new file mode 100644 index 000000000..2938ee5fe --- /dev/null +++ b/analytics-module/src/main/kotlin/org/ostelco/prime/analytics/requestlogs/URIFilterFactory.kt @@ -0,0 +1,27 @@ +package org.ostelco.prime.analytics.requestlogs + +import ch.qos.logback.access.spi.IAccessEvent +import ch.qos.logback.core.filter.Filter +import ch.qos.logback.core.spi.FilterReply +import com.fasterxml.jackson.annotation.JsonProperty +import com.fasterxml.jackson.annotation.JsonTypeName +import io.dropwizard.logging.filter.FilterFactory +import org.hibernate.validator.constraints.NotBlank + +@JsonTypeName("URI") +class URIFilterFactory : FilterFactory { + + @NotBlank + @JsonProperty + lateinit var uri: String + + override fun build() = object : Filter() { + override fun decide(event: IAccessEvent): FilterReply { + return if (event.requestURI == "/$uri") { + FilterReply.DENY + } else { + FilterReply.NEUTRAL + } + } + } +} \ No newline at end of file diff --git a/analytics-module/src/main/resources/META-INF/services/io.dropwizard.logging.filter.FilterFactory b/analytics-module/src/main/resources/META-INF/services/io.dropwizard.logging.filter.FilterFactory new file mode 100644 index 000000000..8b1e232e4 --- /dev/null +++ b/analytics-module/src/main/resources/META-INF/services/io.dropwizard.logging.filter.FilterFactory @@ -0,0 +1 @@ +org.ostelco.prime.analytics.requestlogs.URIFilterFactory \ No newline at end of file diff --git a/prime/build.gradle b/prime/build.gradle index 6e73050c9..0a7ff1a0d 100644 --- a/prime/build.gradle +++ b/prime/build.gradle @@ -18,7 +18,7 @@ sourceSets { } } -version = "1.16.0" +version = "1.17.0" repositories { maven { diff --git a/prime/config/config.yaml b/prime/config/config.yaml index f96a3fcc4..7d24daf17 100644 --- a/prime/config/config.yaml +++ b/prime/config/config.yaml @@ -40,9 +40,10 @@ server: appenders: - type: console layout: - type: json - customFieldNames: - level: severity + type: access-json + filterFactories: + - type: URI + uri: prometheus-metrics logging: level: INFO From 66e215f67d3819ae805e2190cfb8f5f2dc9a410d Mon Sep 17 00:00:00 2001 From: Martin Cederlof Date: Mon, 1 Oct 2018 15:35:50 +0200 Subject: [PATCH 02/93] Added roaming information to Grafana --- prime/infra/grafana-dashboard.json | 198 +++++++++++++++++++++++++++-- 1 file changed, 189 insertions(+), 9 deletions(-) diff --git a/prime/infra/grafana-dashboard.json b/prime/infra/grafana-dashboard.json index 542c40883..aa9d6e4c0 100644 --- a/prime/infra/grafana-dashboard.json +++ b/prime/infra/grafana-dashboard.json @@ -327,7 +327,7 @@ }, { "collapse": false, - "height": 279, + "height": 274, "panels": [ { "cacheTimeout": null, @@ -715,7 +715,7 @@ { "aliasColors": {}, "bars": false, - "datasource": null, + "datasource": "prometheus", "fill": 1, "id": 18, "legend": { @@ -741,18 +741,25 @@ "steppedLine": false, "targets": [ { - "expr": "total_data_used_today", + "expr": "total_data_used_today_local_loltel_test", "intervalFactor": 2, "legendFormat": "", - "metric": "total_data_used_today", + "metric": "total_data_used_today_local_loltel_test", "refId": "A", "step": 60 + }, + { + "expr": "total_data_used_today_roaming_loltel_test", + "intervalFactor": 2, + "metric": "total_data_used_today_roaming_loltel_test", + "refId": "B", + "step": 60 } ], "thresholds": [], "timeFrom": null, "timeShift": null, - "title": "Data Usage today", + "title": "Data Usage Today", "tooltip": { "shared": false, "sort": 0, @@ -783,6 +790,83 @@ "show": true } ] + }, + { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": false, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "datasource": null, + "decimals": null, + "format": "decmbytes", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "id": 20, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "span": 2, + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "targets": [ + { + "expr": "total_data_used_today_roaming_loltel_test / 1000000", + "intervalFactor": 2, + "legendFormat": "", + "metric": "total_data_used_today_roaming_loltel_test", + "refId": "A", + "step": 600 + } + ], + "thresholds": "", + "title": "Total Data Used Today Roaming", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "avg" } ], "repeat": null, @@ -794,7 +878,7 @@ }, { "collapse": false, - "height": 286, + "height": 242, "panels": [ { "cacheTimeout": null, @@ -1178,7 +1262,7 @@ { "aliasColors": {}, "bars": false, - "datasource": null, + "datasource": "prometheus", "fill": 1, "id": 19, "legend": { @@ -1210,12 +1294,20 @@ "metric": "total_data_used_yesterday", "refId": "A", "step": 60 + }, + { + "expr": "total_data_used_yesterday_roaming_lotlel_test", + "intervalFactor": 2, + "legendFormat": "", + "metric": "total_data_used_yesterday_roaming_lotlel_test", + "refId": "B", + "step": 60 } ], "thresholds": [], "timeFrom": null, "timeShift": null, - "title": "Data Used yesterday", + "title": "Data Used Yesterday", "tooltip": { "shared": true, "sort": 0, @@ -1246,6 +1338,83 @@ "show": true } ] + }, + { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": false, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "datasource": null, + "decimals": null, + "format": "decmbytes", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "id": 21, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "span": 2, + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "targets": [ + { + "expr": "total_data_used_yesterday_roaming_lotlel_test / 1000000", + "intervalFactor": 2, + "legendFormat": "", + "metric": "total_data_used_yesterday_roaming_lotlel_test", + "refId": "A", + "step": 600 + } + ], + "thresholds": "", + "title": "Total Data Used Yesterday Roaming", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "avg" } ], "repeat": null, @@ -1254,6 +1423,17 @@ "showTitle": true, "title": "Yesterday", "titleSize": "h6" + }, + { + "collapse": false, + "height": 250, + "panels": [], + "repeat": null, + "repeatIteration": null, + "repeatRowId": null, + "showTitle": false, + "title": "Dashboard Row", + "titleSize": "h6" } ], "schemaVersion": 14, @@ -1293,5 +1473,5 @@ }, "timezone": "browser", "title": "Dashboard", - "version": 22 + "version": 27 } \ No newline at end of file From 6b2aea7a20eb92247e09e776509012be97f8af2e Mon Sep 17 00:00:00 2001 From: Martin Cederlof Date: Mon, 1 Oct 2018 15:50:02 +0200 Subject: [PATCH 03/93] push analytics every 30 min Make sure that we have reported at least once the last hour --- .../org/ostelco/ocsgw/data/grpc/OcsgwMetrics.java | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/ocsgw/src/main/java/org/ostelco/ocsgw/data/grpc/OcsgwMetrics.java b/ocsgw/src/main/java/org/ostelco/ocsgw/data/grpc/OcsgwMetrics.java index 4e8524cff..ef36b15b3 100644 --- a/ocsgw/src/main/java/org/ostelco/ocsgw/data/grpc/OcsgwMetrics.java +++ b/ocsgw/src/main/java/org/ostelco/ocsgw/data/grpc/OcsgwMetrics.java @@ -42,6 +42,8 @@ class OcsgwMetrics { private ScheduledFuture keepAliveFuture = null; + private ScheduledFuture autoReportAnalyticsFuture = null; + private OcsgwAnalyticsReport lastActiveSessions = OcsgwAnalyticsReport.newBuilder().setKeepAlive(true).build(); OcsgwMetrics(String metricsServerHostname, ServiceAccountJwtAccessCredentials credentials) { @@ -110,6 +112,15 @@ private void reconnectAnalyticsReport() { TimeUnit.SECONDS); } + private void initAutoReportAnalyticsReport() { + autoReportAnalyticsFuture = executorService.scheduleAtFixedRate((Runnable) () -> { + sendAnalytics(lastActiveSessions); + }, + 30, + 30, + TimeUnit.MINUTES); + } + void initAnalyticsRequest() { ocsgwAnalyticsReport = ocsgwAnalyticsServiceStub.ocsgwAnalyticsEvent( new AnalyticsRequestObserver() { @@ -121,6 +132,7 @@ public void onNext(OcsgwAnalyticsReply value) { } ); initKeepAlive(); + initAutoReportAnalyticsReport(); } private void initKeepAlive() { From b09d5e33fd818385cf989a9da544086faccebe24 Mon Sep 17 00:00:00 2001 From: "Kjell M. Myksvoll" Date: Tue, 2 Oct 2018 10:18:19 +0200 Subject: [PATCH 04/93] Remove dead code --- ...ependencies_get_environment_coordinates.sh | 41 --------- sample-agent/run-export.sh | 89 ------------------- sample-agent/set-gs-names.sh | 20 ----- 3 files changed, 150 deletions(-) delete mode 100644 sample-agent/check_dependencies_get_environment_coordinates.sh delete mode 100755 sample-agent/run-export.sh delete mode 100644 sample-agent/set-gs-names.sh diff --git a/sample-agent/check_dependencies_get_environment_coordinates.sh b/sample-agent/check_dependencies_get_environment_coordinates.sh deleted file mode 100644 index 7e4ff7a18..000000000 --- a/sample-agent/check_dependencies_get_environment_coordinates.sh +++ /dev/null @@ -1,41 +0,0 @@ -#!/bin/bash - -## Intended to be sourced by other programs - - -# -# Check for dependencies -# - -if [[ -z "$DEPENDENCIES ]] ; then - - DEPENDENCIES="gcloud kubectl gsutil" - - for dep in $DEPENDENCIES ; do - if [[ -z $(which $dep) ]] ; then - echo "ERROR: Could not find dependency $dep" - fi - done -fi - -# -# Figure out relevant parts of the environment and check their -# sanity. -# - -if [[ -z "$PROJECT_ID" ]] ; then - PROJECT_ID=$(gcloud config get-value project) - - if [[ -z "$PROJECT_ID" ]] ; then - echo "ERROR: Unknown google project ID" - exit 1 - fi -fi - -if [[ -z "$EXPORTER_PODNAME" ]] ; then - EXPORTER_PODNAME=$(kubectl get pods | grep exporter- | awk '{print $1}') - if [[ -z "$EXPORTER_PODNAME" ]] ; then - echo "ERROR: Unknown exporter podname" - exit 1 - fi -fi diff --git a/sample-agent/run-export.sh b/sample-agent/run-export.sh deleted file mode 100755 index b7fb26826..000000000 --- a/sample-agent/run-export.sh +++ /dev/null @@ -1,89 +0,0 @@ -#!/bin/bash - -## -## Run an export, return the identifier for the export, put the -## files from the export in a directory denoted as the single -## command line parameter. -## - - - -# Absolute path to this script, e.g. /home/user/bin/foo.sh -SCRIPT=$(readlink -f "$0") -# Absolute path this script is in, thus /home/user/bin -SCRIPTPATH=$(dirname "$SCRIPT") -echo $SCRIPTPATH - - -# -# Get command line parameter, which should be an existing -# directory in which to store the results -# - -TARGET_DIR=$1 -if [[ -z "$TARGET_DIR" ]] ; then - echo "$0 Missing parameter" - echo "usage $0 target-dir" - exit 1 -fi - -if [[ ! -d "$TARGET_DIR" ]] ; then - echo "$0 parameter does not designate an existing directory" - echo "usage $0 target-dir" - exit 1 -fi - -$SCRIPTPATH/check_dependencies_get_environment_coordinates.sh - -# -# Run an export inside the kubernetes cluster, then parse -# the output of the script thar ran the export -# -#TEMPFILE="$(mktemp /tmp/abc-script.XXXXXX)" -TEMPFILE="tmpfile.txt" - -kubectl exec -it "${EXPORTER_PODNAME}" -- /bin/bash -c /export_data.sh > "$TEMPFILE" - -# Fail if the exec failed -retVal=$? -if [ $retVal -ne 0 ]; then - echo "ERROR: Failed to export data:" - cat $TMPFILE - rm $TMPFILE - exit 1 -fi - -# -# Parse the output of the tmpfile, getting the export ID, and -# the google filestore URLs for the output files. -# - - -EXPORT_ID=$(grep "Starting export job for" $TEMPFILE | awk '{print $5}' | sed 's/\r$//' ) - -PURCHASES_GS="gs://${PROJECT_ID}-dataconsumption-export/$EXPORT_ID-purchases.csv" -SUB_2_MSISSDN_MAPPING_GS="gs://${PROJECT_ID}-dataconsumption-export/$EXPORT_ID-sub2msisdn.csv" -CONSUMPTION_GS="gs://${PROJECT_ID}-dataconsumption-export/$EXPORT_ID.csv" - -# -# Then copy the CSV files to local storage (current directory) -# - -gsutil cp $PURCHASES_GS $TARGET_DIR -gsutil cp $SUB_2_MSISSDN_MAPPING_GS $TARGET_DIR -gsutil cp $CONSUMPTION_GS $TARGET_DIR - -# -# Clean up the tempfile -# - -rm "$TEMPFILE" - -# -# Finally output the ID of the export, since that's -# what will be used by users of this script to access -# the output -# - -echo $EXPORT_ID -exit 0 diff --git a/sample-agent/set-gs-names.sh b/sample-agent/set-gs-names.sh deleted file mode 100644 index 34075a239..000000000 --- a/sample-agent/set-gs-names.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/bash - - -if [[ -z "$PROJECT_ID" ]] ; then - echo "$0 PROJECT_ID variable not set, cannot determine google filestore coordinates" - exit 1 -fi - - -if [[ -z "$EXPORT_ID" ]] ; then - echo "$0 EXPORT_ID variable not set, cannot determine google filestore coordinates" - exit 1 -fi - - -PURCHASES_GS="gs://${PROJECT_ID}-dataconsumption-export/$EXPORT_ID-purchases.csv" -SUB_2_MSISSDN_MAPPING_GS="gs://${PROJECT_ID}-dataconsumption-export/$EXPORT_ID-sub2msisdn.csv" -CONSUMPTION_GS="gs://${PROJECT_ID}-dataconsumption-export/$EXPORT_ID.csv" -RESULT_SEGMENT_PSEUDO_GS="gs://${PROJECT_ID}-dataconsumption-export/${EXPORT_ID}-resultsegment-pseudoanonymized.csv" -RESULT_SEGMENT_CLEAR_GS="gs://${PROJECT_ID}-dataconsumption-export/${EXPORT_ID}-resultsegment-cleartext.csv" From 86c855d933c98e3ead68bdd3dde455e50a839029 Mon Sep 17 00:00:00 2001 From: "Kjell M. Myksvoll" Date: Tue, 2 Oct 2018 10:18:58 +0200 Subject: [PATCH 05/93] Setting the executable bit on the sample agent --- sample-agent/sample-agent.sh | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 sample-agent/sample-agent.sh diff --git a/sample-agent/sample-agent.sh b/sample-agent/sample-agent.sh old mode 100644 new mode 100755 From fcb3bca46d486ee8c66f89477d99d9c1ddab5c61 Mon Sep 17 00:00:00 2001 From: "Kjell M. Myksvoll" Date: Tue, 2 Oct 2018 11:08:28 +0200 Subject: [PATCH 06/93] More working version, upload not tested yet --- sample-agent/sample-agent.sh | 43 +++++++++++++++++++----------------- 1 file changed, 23 insertions(+), 20 deletions(-) diff --git a/sample-agent/sample-agent.sh b/sample-agent/sample-agent.sh index c2f683400..f71940abb 100755 --- a/sample-agent/sample-agent.sh +++ b/sample-agent/sample-agent.sh @@ -1,10 +1,10 @@ -#!/bin/bash +#!/bin/bash -x set -e ### ### VALIDATING AND PARSING COMMAND LINE PARAMETERS -### +### # # Get command line parameter, which should be an existing @@ -94,6 +94,7 @@ echo "$0: kubectl port-forward $PRIME_PODNAME 8080:8080" function runScriptOnExporterPod { if [[ $# -ne 2 ]] ; then echo "$0 ERROR: runScriptOnExporterPod requires exactly two parameters" + die fi local scriptname=$1 local intentDescription=$2 @@ -104,9 +105,9 @@ function runScriptOnExporterPod { # level process's lifetime, I'll do it this way. TEMPFILE="tmpfile.txt" [[ -f "$TMPFILE" ]] && rm "$TMPFILE" - + kubectl exec -it "${EXPORTER_PODNAME}" -- /bin/bash -c "$scriptname" > "$TEMPFILE" - + # Fail if the exec failed retVal=$? if [[ $retVal -ne 0 ]]; then @@ -132,9 +133,9 @@ function exportDataFromExporterPod { echo "$0 ERROR: Running the runScriptOnExporterPod failed to return the name of a resultfile." die fi - + local exportId="$(grep "Starting export job for" $tmpfilename | awk '{print $5}' | sed 's/\r$//' )" - + if [[ -z "$exportId" ]] ; then echo "$0 Could not get export batch from exporter pod" fi @@ -143,14 +144,18 @@ function exportDataFromExporterPod { } function mapPseudosToUserids { - local tmpfile="$(runScriptOnExporterPod /map_subscribers.sh "mapping pseudoids to subscriber ids")" - [[ -f "$tmpfile" ]] && rm "$tmpfile" + # XXX TODO: Test correct number of parameters + local exportid=$1 + local tmpfile="$(runScriptOnExporterPod "/map_subscribers.sh $exportid" "mapping pseudoids to subscriber ids")" + ## [[ -f "$tmpfile" ]] && rm "$tmpfile" + echo "LOG FROM MAPPING IS:" + cat $tmpfile } # # Generate the Google filesystem names of components associated with # a particular export ID: Typical usage -# +# # PURCHASES_GS="$(gsExportCsvFilename "ab234245cvsr" "purchases")" function gsExportCsvFilename { @@ -158,7 +163,7 @@ function gsExportCsvFilename { echo "$0 ERROR: gsExportCsvFilename requires exactly two parameters, got '$@'" die fi - + local exportId=$1 local componentName=$2 if [[ -z "$exportId" ]] ; then @@ -168,13 +173,13 @@ function gsExportCsvFilename { if [[ -n "$componentName" ]] ; then componentName="-$componentName" fi - + echo "gs://${PROJECT_ID}-dataconsumption-export/${exportId}${componentName}.csv" } # -# Generate a filename +# Generate a filename # function importedCsvFilename { if [[ $# -ne 3 ]] ; then @@ -199,14 +204,14 @@ function importedCsvFilename { if [[ -n "$componentName" ]] ; then componentName="-$componentName" fi - + echo "${importDirectory}/${exportId}${componentName}.csv" } ### ### MAIN SCRIPT -### +### @@ -226,12 +231,12 @@ for component in "purchases" "sub2msisdn" "" ; do if [[ -z "$source" ]] ; then echo "$0 ERROR: Could not determine source file for export component '$component'" fi - + destination="$(importedCsvFilename "$EXPORT_ID" "$TARGET_DIR" "$component")" if [[ -z "$destination" ]] ; then echo "$0 ERROR: Could not determine destination file for export component '$component'" fi - + gsutil cp "$source" "$destination" done @@ -267,7 +272,7 @@ gsutil cp $SEGMENT_TMPFILE_PSEUDO $RESULT_SEGMENT_PSEUDO_GS # Then run the script that will convert it into a none-anonymized # file and fetch the results from gs:/ -mapPseudosToUserids +mapPseudosToUserids "$EXPORT_ID" gsutil cp "$RESULT_SEGMENT_CLEAR_GS" "$RESULT_SEGMENT_CLEAR" @@ -303,7 +308,7 @@ EOF # Adding the list of subscribers in clear text (indented six spaces # with a leading "-" as per YAML list syntax. -awk '{print " - " $1}' $RESULT_SEGMENT_SINGLE_COLUMN >> $IMPORTFILE_YML +awk '{print " - " $1}' $RESULT_SEGMENT_SINGLE_COLUMN >> $IMPORTFILE_YML ## ## Send it to the importer @@ -318,5 +323,3 @@ curl --data-binary @$IMPORTFILE_YML $IMPORTER_URL ## # .... eventually - - From f25faf0cdb4d6b450a426269e328e2f52cd06cbc Mon Sep 17 00:00:00 2001 From: "Kjell M. Myksvoll" Date: Tue, 2 Oct 2018 11:15:44 +0200 Subject: [PATCH 07/93] Set content type when uploading --- sample-agent/sample-agent.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sample-agent/sample-agent.sh b/sample-agent/sample-agent.sh index f71940abb..e777ff50a 100755 --- a/sample-agent/sample-agent.sh +++ b/sample-agent/sample-agent.sh @@ -315,7 +315,7 @@ awk '{print " - " $1}' $RESULT_SEGMENT_SINGLE_COLUMN >> $IMPORTFILE_YML ## (assuming the kubectl port forwarding is enabled) IMPORTER_URL=http://127.0.0.1:8080/importer -curl --data-binary @$IMPORTFILE_YML $IMPORTER_URL +curl -H "Content-type: text/vnd.yaml" --data-binary @$IMPORTFILE_YML $IMPORTER_URL ## From 2513092b26242ceddd3cc4015b1d6511d61a1ebd Mon Sep 17 00:00:00 2001 From: Martin Cederlof Date: Tue, 2 Oct 2018 11:35:01 +0200 Subject: [PATCH 08/93] Create user if not found when listing sources --- .../kotlin/org/ostelco/at/jersey/Tests.kt | 20 +++++++++++++++++++ .../client/api/store/SubscriberDAOImpl.kt | 8 +++++++- 2 files changed, 27 insertions(+), 1 deletion(-) diff --git a/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt b/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt index 86cbc9f0b..2bea6c046 100644 --- a/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt +++ b/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt @@ -308,6 +308,26 @@ class SourceTest { } } + @Test + fun `jersey test - GET list sources no profile `() { + + val email = "purchase-${randomInt()}@test.com" + + try { + + val sources: PaymentSourceList = get { + path = "/paymentSources" + subscriberId = email + } + + assert(sources.isEmpty()) { "Expected no payment source for profile $email" } + + } finally { + StripePayment.deleteCustomer(email = email) + } + } + + @Test fun `jersey test - PUT source set default`() { diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt index a57c167b4..900740f8f 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt @@ -338,7 +338,13 @@ class SubscriberDAOImpl(private val storage: ClientDataSource, private val ocsSu override fun listSources(subscriberId: String): Either> { return paymentProcessor.getPaymentProfile(subscriberId) - .mapLeft { error -> mapPaymentErrorToApiError(error.description, ApiErrorCode.FAILED_TO_FETCH_PAYMENT_SOURCES_LIST, error) } + .fold( + { + paymentProcessor.createPaymentProfile(subscriberId) + .mapLeft { error -> mapPaymentErrorToApiError(error.description, ApiErrorCode.FAILED_TO_FETCH_PAYMENT_SOURCES_LIST, error) } + }, + { profileInfo -> Either.right(profileInfo) } + ) .flatMap { profileInfo -> paymentProcessor.getSavedSources(profileInfo.id) .mapLeft { mapPaymentErrorToApiError("Failed to list sources", ApiErrorCode.FAILED_TO_FETCH_PAYMENT_SOURCES_LIST, it) } From 9f8ae8c8cbac91897fc7f869904140a0d5005700 Mon Sep 17 00:00:00 2001 From: Martin Cederlof Date: Tue, 2 Oct 2018 11:45:44 +0200 Subject: [PATCH 09/93] Added okHttp test --- .../kotlin/org/ostelco/at/jersey/Tests.kt | 4 ++-- .../kotlin/org/ostelco/at/okhttp/Tests.kt | 19 +++++++++++++++++++ 2 files changed, 21 insertions(+), 2 deletions(-) diff --git a/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt b/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt index 2bea6c046..bea718378 100644 --- a/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt +++ b/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt @@ -314,7 +314,7 @@ class SourceTest { val email = "purchase-${randomInt()}@test.com" try { - + val sources: PaymentSourceList = get { path = "/paymentSources" subscriberId = email @@ -378,7 +378,7 @@ class SourceTest { } @Test - fun `okhttp test - DELETE source`() { + fun `jersey test - DELETE source`() { val email = "purchase-${randomInt()}@test.com" diff --git a/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt b/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt index 1a33776aa..9d4835cef 100644 --- a/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt +++ b/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt @@ -268,6 +268,25 @@ class SourceTest { } } + @Test + fun `okhttp test - GET list sources no profile`() { + + val email = "purchase-${randomInt()}@test.com" + try { + + val client = clientForSubject(subject = email) + + Thread.sleep(200) + + val sources = client.listSources() + + assert(sources.isEmpty()) { "Expected no payment source for profile $email" } + + } finally { + StripePayment.deleteCustomer(email = email) + } + } + @Test fun `okhttp test - PUT source set default`() { From 828cdab455c2c01fcc8b102a93aa24cf9fda79a4 Mon Sep 17 00:00:00 2001 From: Martin Cederlof Date: Tue, 2 Oct 2018 12:09:17 +0200 Subject: [PATCH 10/93] Check if user was created --- acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt | 2 ++ acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt | 2 ++ 2 files changed, 4 insertions(+) diff --git a/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt b/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt index bea718378..c3a5e67fc 100644 --- a/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt +++ b/acceptance-tests/src/main/kotlin/org/ostelco/at/jersey/Tests.kt @@ -322,6 +322,8 @@ class SourceTest { assert(sources.isEmpty()) { "Expected no payment source for profile $email" } + assertNotNull(StripePayment.getCustomerIdForEmail(email)) { "Customer Id should have been created" } + } finally { StripePayment.deleteCustomer(email = email) } diff --git a/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt b/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt index 9d4835cef..d11931d30 100644 --- a/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt +++ b/acceptance-tests/src/main/kotlin/org/ostelco/at/okhttp/Tests.kt @@ -282,6 +282,8 @@ class SourceTest { assert(sources.isEmpty()) { "Expected no payment source for profile $email" } + assertNotNull(StripePayment.getCustomerIdForEmail(email)) { "Customer Id should have been created" } + } finally { StripePayment.deleteCustomer(email = email) } From 7603e10f912076c22a0ca2599df905ef937f68ce Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B8rn=20Remseth?= Date: Tue, 2 Oct 2018 13:11:42 +0200 Subject: [PATCH 11/93] Update mapping script to use a a tst on input prameters, log more clearly, and to have more comments --- exporter/script/idle.sh | 4 +- exporter/script/map_subscribers.sh | 70 ++++++++++++++++++++++++++---- sample-agent/sample-agent.sh | 2 +- 3 files changed, 64 insertions(+), 12 deletions(-) diff --git a/exporter/script/idle.sh b/exporter/script/idle.sh index 93c81b854..ed2e64763 100644 --- a/exporter/script/idle.sh +++ b/exporter/script/idle.sh @@ -11,8 +11,8 @@ cleanup () trap cleanup SIGINT SIGTERM -while [ 1 ] +while [[ 1 ]] do sleep 60 & wait $! -done \ No newline at end of file +done diff --git a/exporter/script/map_subscribers.sh b/exporter/script/map_subscribers.sh index 98400bf9f..965c14105 100644 --- a/exporter/script/map_subscribers.sh +++ b/exporter/script/map_subscribers.sh @@ -1,11 +1,51 @@ #!/bin/bash -#set -x + + +## +## Map a list of pseudo-anonymized subscriber IDs into clear text +## subscriber identifiers. +## +## Takes a single parameter, the exportID, so usage is: +## +## ./map_subscribers.sh 8972789sd897987rwefsa9879 +## +## Based on the command line parameter, an input file is imported from +## the file storage. The input is a file named +## +## gs://$projectId-dataconsumption-export/${exportId}-resultsegment-pseudoanonymized.csv$exportId/ +## +## This input file contains a single column, containing pseudoanonymized +## subscriber identifiers. +## +## The script proeduces a single output in the file: +## +## gs://$projectId-dataconsumption-export/${exportId}-resultsegment-cleartext.csv +## +## It contains two columns, with headers, containing pseudo IDs, and the corresponding +## clear text subscriber ID. +## +## + + +## +## Check input parameters +## + +if [[ $# -ne 1 ]] ; then + echo "$0 ERROR: Requires one command line parameter dentifying the export ID" + exit 1 +fi exportId=$1 -if [ -z "$1" ]; then - echo "To convert subscribers, specify the id of the export operation" - exit +if [[ -z "$1" ]]; then + echo "$0 ERROR: To convert subscribers, specify the id of the export operation" + exit 1 fi + +## +## Calculate locations of things to use. +## + exportId=${exportId//-} exportId=${exportId,,} projectId=pantel-2decb @@ -16,12 +56,19 @@ inputSubscriberTable=exported_pseudonyms.${exportId}_pseudo_subscriber subscriberPseudonymsTable=exported_pseudonyms.${exportId}_subscriber outputSubscriberTable=exported_pseudonyms.${exportId}_clear_subscriber +## +## Import the from the csv file. +## -echo "Importing data from csv $csvfile" +echo "$0: INFO Importing data from csv $csvfile" bq --location=EU load --replace --source_format=CSV $projectId:$inputSubscriberTable gs://$csvfile /subscriber-schema.json echo "Exported data to $inputSubscriberTable" -echo "Creating table $outputSubscriberTable" + +## +## Calculate the translation table +## +echo "$0: INFO Creating table $outputSubscriberTable" # SQL for joining pseudonym & hourly consumption tables. read -r -d '' sqlForJoin << EOM CREATE TEMP FUNCTION URLDECODE(url STRING) AS (( @@ -43,8 +90,13 @@ EOM # Run the query using bq & dump results to the new table bq --location=EU --format=none query --destination_table $outputSubscriberTable --replace --use_legacy_sql=false $sqlForJoin -echo "Created table $outputSubscriberTable" +echo "$0 INFO: Created table $outputSubscriberTable" + + +## +## Export data to the outut CSV file +## -echo "Exporting data to csv $outputCsvfile" +echo "$0 INFO: Exporting data to csv $outputCsvfile" bq --location=EU extract --destination_format=CSV $outputSubscriberTable gs://$outputCsvfile -echo "Exported data to gs://$outputCsvfile" +echo "$0 INFO: Exported data to gs://$outputCsvfile" diff --git a/sample-agent/sample-agent.sh b/sample-agent/sample-agent.sh index e777ff50a..d770fb2e7 100755 --- a/sample-agent/sample-agent.sh +++ b/sample-agent/sample-agent.sh @@ -144,7 +144,7 @@ function exportDataFromExporterPod { } function mapPseudosToUserids { - # XXX TODO: Test correct number of parameters + # XXX TODO: Test correct number of parameters local exportid=$1 local tmpfile="$(runScriptOnExporterPod "/map_subscribers.sh $exportid" "mapping pseudoids to subscriber ids")" ## [[ -f "$tmpfile" ]] && rm "$tmpfile" From b71a461953a496c6f356e222e2d961bd83521a61 Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Tue, 2 Oct 2018 18:31:44 +0200 Subject: [PATCH 12/93] Retry BQ SQL query few times before giving up. Saw some logs with GoogleJsonResponseException, asking to retry the job. We are not going to catch this exception, lets see if this happens more often. --- .../ostelco/bqmetrics/BqMetricsExtractorApplication.kt | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/bq-metrics-extractor/src/main/java/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt b/bq-metrics-extractor/src/main/java/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt index 455da297f..468e1c3e5 100644 --- a/bq-metrics-extractor/src/main/java/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt +++ b/bq-metrics-extractor/src/main/java/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt @@ -2,6 +2,7 @@ package org.ostelco.bqmetrics import com.fasterxml.jackson.annotation.JsonProperty +import com.google.cloud.RetryOption import com.google.cloud.bigquery.BigQueryOptions import com.google.cloud.bigquery.Job import com.google.cloud.bigquery.JobId @@ -20,6 +21,7 @@ import net.sourceforge.argparse4j.inf.Namespace import net.sourceforge.argparse4j.inf.Subparser import org.slf4j.Logger import org.slf4j.LoggerFactory +import org.threeten.bp.Duration import java.util.* import javax.validation.Valid import javax.validation.constraints.NotNull @@ -163,7 +165,13 @@ private interface MetricBuilder { var queryJob: Job = bigquery.create(JobInfo.newBuilder(queryConfig).setJobId(jobId).build()); // Wait for the query to complete. - queryJob = queryJob.waitFor(); + // Retry maximum 4 times for up to 2 minutes. + queryJob = queryJob.waitFor( + RetryOption.initialRetryDelay(Duration.ofSeconds(10)), + RetryOption.retryDelayMultiplier(2.0), + RetryOption.maxRetryDelay(Duration.ofSeconds(20)), + RetryOption.maxAttempts(5), + RetryOption.totalTimeout(Duration.ofMinutes(2))); // Check for errors if (queryJob == null) { From 4b59d42635e5b7176d30051f73faac4a4f4bc1f7 Mon Sep 17 00:00:00 2001 From: Martin Cederlof Date: Tue, 2 Oct 2018 20:03:55 +0200 Subject: [PATCH 13/93] Check that last report is set before posting --- .../java/org/ostelco/ocsgw/data/grpc/OcsgwMetrics.java | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/ocsgw/src/main/java/org/ostelco/ocsgw/data/grpc/OcsgwMetrics.java b/ocsgw/src/main/java/org/ostelco/ocsgw/data/grpc/OcsgwMetrics.java index ef36b15b3..91c1d05a5 100644 --- a/ocsgw/src/main/java/org/ostelco/ocsgw/data/grpc/OcsgwMetrics.java +++ b/ocsgw/src/main/java/org/ostelco/ocsgw/data/grpc/OcsgwMetrics.java @@ -44,7 +44,7 @@ class OcsgwMetrics { private ScheduledFuture autoReportAnalyticsFuture = null; - private OcsgwAnalyticsReport lastActiveSessions = OcsgwAnalyticsReport.newBuilder().setKeepAlive(true).build(); + private OcsgwAnalyticsReport lastActiveSessions = null; OcsgwMetrics(String metricsServerHostname, ServiceAccountJwtAccessCredentials credentials) { @@ -146,7 +146,9 @@ private void initKeepAlive() { } void sendAnalytics(OcsgwAnalyticsReport report) { - ocsgwAnalyticsReport.onNext(report); - lastActiveSessions = report; + if (report != null) { + ocsgwAnalyticsReport.onNext(report); + lastActiveSessions = report; + } } } \ No newline at end of file From 12bfb4685d236a5a3096d3752be07fcc2275bb1a Mon Sep 17 00:00:00 2001 From: Martin Cederlof Date: Wed, 3 Oct 2018 13:20:59 +0200 Subject: [PATCH 14/93] Added active sessions to BQ extractor This will push the number of SIMs that has had an active data session. This means any SIM that has tried to access the network bot these that got an active session and those that did not have enough credit to get online. --- bq-metrics-extractor/config/config.yaml | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/bq-metrics-extractor/config/config.yaml b/bq-metrics-extractor/config/config.yaml index 4434c9273..f780a9dd7 100644 --- a/bq-metrics-extractor/config/config.yaml +++ b/bq-metrics-extractor/config/config.yaml @@ -74,6 +74,21 @@ bqmetrics: WHERE timestamp >= TIMESTAMP_SUB(TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY), INTERVAL 1 DAY) AND timestamp < TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY) + - type: gauge + name: sims_who_have_been_active_today + help: Number of SIMs that has had an active data session today + resultColumn: count + sql: > + SELECT COUNT (DISTINCT user.msisdn) FROM `pantel-2decb.ocs_gateway.raw_activeusers`, UNNEST(users) as user + WHERE timestamp >= TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY) + - type: gauge + name: sims_who_was_active_yesterday + help: Number of SIMs that has had an active data session yesterday + resultColumn: count + sql: > + SELECT COUNT (DISTINCT user.msisdn) FROM `pantel-2decb.ocs_gateway.raw_activeusers`, UNNEST(users) as user + WHERE timestamp >= TIMESTAMP_SUB(TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY), INTERVAL 1 DAY) + AND timestamp < TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY) - type: gauge name: total_data_used_today help: Total data used today From 13fc9722609f6dd5e436d5f2995fbb4266f9dd73 Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Wed, 3 Oct 2018 14:15:58 +0200 Subject: [PATCH 15/93] Add support for ENV variables in SQL config. --- bq-metrics-extractor/build.gradle | 22 ++--- bq-metrics-extractor/config/config.yaml | 36 +++---- .../cronjob/deploy-dev-direct.sh | 29 ++++++ bq-metrics-extractor/cronjob/deploy-direct.sh | 29 ++++++ .../cronjob/extractor-dev.yaml | 20 ++++ bq-metrics-extractor/cronjob/extractor.yaml | 5 +- .../BqMetricsExtractorApplication.kt | 69 +++++++++---- .../ostelco/bqmetrics/MetricBuildersTest.kt | 98 +++++++++++++++++++ .../src/main/resources/table_schema.ddl | 24 +++++ prime/infra/raw_purchases_schema.ddl | 24 +++++ 10 files changed, 305 insertions(+), 51 deletions(-) create mode 100755 bq-metrics-extractor/cronjob/deploy-dev-direct.sh create mode 100755 bq-metrics-extractor/cronjob/deploy-direct.sh create mode 100644 bq-metrics-extractor/cronjob/extractor-dev.yaml rename bq-metrics-extractor/src/main/{java => kotlin}/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt (84%) create mode 100644 bq-metrics-extractor/src/test/kotlin/org/ostelco/bqmetrics/MetricBuildersTest.kt diff --git a/bq-metrics-extractor/build.gradle b/bq-metrics-extractor/build.gradle index c01398491..44fdb89aa 100644 --- a/bq-metrics-extractor/build.gradle +++ b/bq-metrics-extractor/build.gradle @@ -5,30 +5,26 @@ plugins { id "idea" } +version = "2.0.0" dependencies { implementation "io.dropwizard:dropwizard-core:$dropwizardVersion" implementation "io.dropwizard:dropwizard-client:$dropwizardVersion" - - testImplementation "io.dropwizard:dropwizard-client:$dropwizardVersion" - testImplementation "io.dropwizard:dropwizard-testing:$dropwizardVersion" - testImplementation "com.fasterxml.jackson.module:jackson-module-kotlin:$jacksonVersion" - testImplementation "org.mockito:mockito-core:$mockitoVersion" - testImplementation 'org.assertj:assertj-core:3.11.1' + implementation "com.fasterxml.jackson.module:jackson-module-kotlin:$jacksonVersion" implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlinVersion" - // Bigquery dependency implementation "com.google.cloud:google-cloud-bigquery:$googleCloudVersion" + implementation 'io.prometheus:simpleclient_pushgateway:0.5.0' runtimeOnly "io.dropwizard:dropwizard-json-logging:$dropwizardVersion" - // Prometheus pushgateway dependencies (we might not need all of these) - // implementation 'io.prometheus:simpleclient:0.5.0' - // implementation 'io.prometheus:simpleclient_hotspot:0.5.0' - // implementation 'io.prometheus:simpleclient_httpserver:0.5.0' - implementation 'io.prometheus:simpleclient_pushgateway:0.5.0' - // implementation 'com.google.apis:google-api-services-pubsub:v1-rev401-1.25.0' + + testImplementation "io.dropwizard:dropwizard-testing:$dropwizardVersion" + testImplementation "org.mockito:mockito-core:$mockitoVersion" + testImplementation "org.jetbrains.kotlin:kotlin-test:$kotlinVersion" + testImplementation "org.jetbrains.kotlin:kotlin-test-junit:$kotlinVersion" + } shadowJar { diff --git a/bq-metrics-extractor/config/config.yaml b/bq-metrics-extractor/config/config.yaml index 4434c9273..24914f58f 100644 --- a/bq-metrics-extractor/config/config.yaml +++ b/bq-metrics-extractor/config/config.yaml @@ -15,14 +15,14 @@ bqmetrics: help: Number of active users resultColumn: count sql: > - SELECT count(distinct user_pseudo_id) AS count FROM `pantel-2decb.analytics_160712959.events_*` + SELECT count(distinct user_pseudo_id) AS count FROM `${DATASET_PROJECT}.analytics_160712959.events_*` WHERE event_name = "first_open" - type: gauge name: sims_who_have_used_data help: Number of SIMs that has used data last 24 hours resultColumn: count sql: > - SELECT count(DISTINCT msisdn) AS count FROM `pantel-2decb.data_consumption.raw_consumption` + SELECT count(DISTINCT msisdn) AS count FROM `${DATASET_PROJECT}.data_consumption${DATASET_MODIFIER}.raw_consumption` WHERE timestamp >= TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 1 DAY) - type: gauge name: total_data_used @@ -30,7 +30,7 @@ bqmetrics: resultColumn: count sql: > SELECT COALESCE ( - (SELECT sum(bucketBytes) AS count FROM `pantel-2decb.data_consumption.raw_consumption` + (SELECT sum(bucketBytes) AS count FROM `${DATASET_PROJECT}.data_consumption${DATASET_MODIFIER}.raw_consumption` WHERE timestamp >= TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 1 DAY)), 0) as count - type: gauge name: revenue_last24hours @@ -38,21 +38,21 @@ bqmetrics: resultColumn: revenue sql: > SELECT COALESCE ( - (SELECT SUM(product.price.amount) as revenue FROM `pantel-2decb.purchases.raw_purchases` + (SELECT SUM(product.price.amount) as revenue FROM `${DATASET_PROJECT}.purchases${DATASET_MODIFIER}.raw_purchases` WHERE TIMESTAMP_MILLIS(timestamp) > TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 24 HOUR)), 0) as revenue - type: gauge name: total_paid_users help: Number of users who have purchased in last 24 hours resultColumn: count sql: > - SELECT COUNT(DISTINCT subscriberId) as count FROM `pantel-2decb.purchases.raw_purchases` + SELECT COUNT(DISTINCT subscriberId) as count FROM `${DATASET_PROJECT}.purchases${DATASET_MODIFIER}.raw_purchases` WHERE TIMESTAMP_MILLIS(timestamp) > TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 24 HOUR) - type: gauge name: active_users_right_now help: Number of active users rigth now (with 1 minute interval) resultColumn: count sql: > - SELECT count(DISTINCT msisdn) AS count FROM `pantel-2decb.data_consumption.raw_consumption` + SELECT count(DISTINCT msisdn) AS count FROM `${DATASET_PROJECT}.data_consumption${DATASET_MODIFIER}.raw_consumption` WHERE timestamp >= TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 1 MINUTE) #----------------------------------------------- # Metrics values split at day boundary. @@ -63,14 +63,14 @@ bqmetrics: help: Number of SIMs that has used data today resultColumn: count sql: > - SELECT count(DISTINCT msisdn) AS count FROM `pantel-2decb.data_consumption.raw_consumption` + SELECT count(DISTINCT msisdn) AS count FROM `${DATASET_PROJECT}.data_consumption${DATASET_MODIFIER}.raw_consumption` WHERE timestamp >= TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY) - type: gauge name: sims_who_have_used_data_yesterday help: Number of SIMs that has used data yesterday resultColumn: count sql: > - SELECT count(DISTINCT msisdn) AS count FROM `pantel-2decb.data_consumption.raw_consumption` + SELECT count(DISTINCT msisdn) AS count FROM `${DATASET_PROJECT}.data_consumption${DATASET_MODIFIER}.raw_consumption` WHERE timestamp >= TIMESTAMP_SUB(TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY), INTERVAL 1 DAY) AND timestamp < TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY) @@ -80,7 +80,7 @@ bqmetrics: resultColumn: count sql: > SELECT COALESCE ( - (SELECT sum(bucketBytes) AS count FROM `pantel-2decb.data_consumption.raw_consumption` + (SELECT sum(bucketBytes) AS count FROM `${DATASET_PROJECT}.data_consumption${DATASET_MODIFIER}.raw_consumption` WHERE timestamp >= TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY)), 0) as count - type: gauge name: total_data_used_yesterday @@ -88,7 +88,7 @@ bqmetrics: resultColumn: count sql: > SELECT COALESCE ( - ( SELECT sum(bucketBytes) AS count FROM `pantel-2decb.data_consumption.raw_consumption` + ( SELECT sum(bucketBytes) AS count FROM `${DATASET_PROJECT}.data_consumption${DATASET_MODIFIER}.raw_consumption` WHERE timestamp >= TIMESTAMP_SUB(TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY), INTERVAL 1 DAY) AND timestamp < TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY) ), 0) as count @@ -98,7 +98,7 @@ bqmetrics: resultColumn: count sql: > SELECT COALESCE ( - (SELECT sum(bucketBytes) AS count FROM `pantel-2decb.data_consumption.raw_consumption` + (SELECT sum(bucketBytes) AS count FROM `${DATASET_PROJECT}.data_consumption${DATASET_MODIFIER}.raw_consumption` WHERE timestamp >= TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY) AND apn = "loltel-test" AND mccMnc = "24201"), 0) as count @@ -108,7 +108,7 @@ bqmetrics: resultColumn: count sql: > SELECT COALESCE ( - ( SELECT sum(bucketBytes) AS count FROM `pantel-2decb.data_consumption.raw_consumption` + ( SELECT sum(bucketBytes) AS count FROM `${DATASET_PROJECT}.data_consumption${DATASET_MODIFIER}.raw_consumption` WHERE timestamp >= TIMESTAMP_SUB(TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY), INTERVAL 1 DAY) AND timestamp < TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY) AND apn = "loltel-test" @@ -121,7 +121,7 @@ bqmetrics: resultColumn: count sql: > SELECT COALESCE ( - (SELECT sum(bucketBytes) AS count FROM `pantel-2decb.data_consumption.raw_consumption` + (SELECT sum(bucketBytes) AS count FROM `${DATASET_PROJECT}.data_consumption${DATASET_MODIFIER}.raw_consumption` WHERE timestamp >= TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY) AND apn = "loltel-test" AND mccMnc != "24201"), 0) as count @@ -131,7 +131,7 @@ bqmetrics: resultColumn: count sql: > SELECT COALESCE ( - ( SELECT sum(bucketBytes) AS count FROM `pantel-2decb.data_consumption.raw_consumption` + ( SELECT sum(bucketBytes) AS count FROM `${DATASET_PROJECT}.data_consumption${DATASET_MODIFIER}.raw_consumption` WHERE timestamp >= TIMESTAMP_SUB(TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY), INTERVAL 1 DAY) AND timestamp < TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY) AND apn = "loltel-test" @@ -143,7 +143,7 @@ bqmetrics: resultColumn: revenue sql: > SELECT COALESCE ( - ( SELECT SUM(product.price.amount) as revenue FROM `pantel-2decb.purchases.raw_purchases` + ( SELECT SUM(product.price.amount) as revenue FROM `${DATASET_PROJECT}.purchases${DATASET_MODIFIER}.raw_purchases` WHERE TIMESTAMP_MILLIS(timestamp) >= TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY) ), 0) as revenue - type: gauge name: revenue_yesterday @@ -151,7 +151,7 @@ bqmetrics: resultColumn: revenue sql: > SELECT COALESCE ( - ( SELECT SUM(product.price.amount) as revenue FROM `pantel-2decb.purchases.raw_purchases` + ( SELECT SUM(product.price.amount) as revenue FROM `${DATASET_PROJECT}.purchases${DATASET_MODIFIER}.raw_purchases` WHERE TIMESTAMP_MILLIS(timestamp) >= TIMESTAMP_SUB(TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY), INTERVAL 1 DAY) AND TIMESTAMP_MILLIS(timestamp) < TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY) ), 0) as revenue @@ -160,13 +160,13 @@ bqmetrics: help: Number of users who have purchased today resultColumn: count sql: > - SELECT COUNT(DISTINCT subscriberId) as count FROM `pantel-2decb.purchases.raw_purchases` + SELECT COUNT(DISTINCT subscriberId) as count FROM `${DATASET_PROJECT}.purchases${DATASET_MODIFIER}.raw_purchases` WHERE TIMESTAMP_MILLIS(timestamp) >= TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY) - type: gauge name: total_paid_users_yesterday help: Number of users who have purchased yesterday resultColumn: count sql: > - SELECT COUNT(DISTINCT subscriberId) as count FROM `pantel-2decb.purchases.raw_purchases` + SELECT COUNT(DISTINCT subscriberId) as count FROM `${DATASET_PROJECT}.purchases${DATASET_MODIFIER}.raw_purchases` WHERE TIMESTAMP_MILLIS(timestamp) >= TIMESTAMP_SUB(TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY), INTERVAL 1 DAY) AND TIMESTAMP_MILLIS(timestamp) < TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY) diff --git a/bq-metrics-extractor/cronjob/deploy-dev-direct.sh b/bq-metrics-extractor/cronjob/deploy-dev-direct.sh new file mode 100755 index 000000000..4c51588cd --- /dev/null +++ b/bq-metrics-extractor/cronjob/deploy-dev-direct.sh @@ -0,0 +1,29 @@ +#!/usr/bin/env bash + +set -e + +if [ ! -f bq-metrics-extractor/cronjob/deploy-dev-direct.sh ]; then + (>&2 echo "Run this script from project root dir (ostelco-core)") + exit 1 +fi + +kubectl config use-context $(kubectl config get-contexts --output name | grep dev-cluster) + +PROJECT_ID="$(gcloud config get-value project -q)" +EXTRACTOR_VERSION="$(gradle bq-metrics-extractor:properties -q | grep "version:" | awk '{print $2}' | tr -d '[:space:]')" +SHORT_SHA="$(git log -1 --pretty=format:%h)" +TAG="${EXTRACTOR_VERSION}-${SHORT_SHA}-dev" + +echo PROJECT_ID=${PROJECT_ID} +echo EXTRACTOR_VERSION=${EXTRACTOR_VERSION} +echo SHORT_SHA=${SHORT_SHA} +echo TAG=${TAG} + + +gradle bq-metrics-extractor:clean bq-metrics-extractor:build +docker build -t eu.gcr.io/${PROJECT_ID}/bq-metrics-extractor:${TAG} bq-metrics-extractor +docker push eu.gcr.io/${PROJECT_ID}/bq-metrics-extractor:${TAG} + +echo "Deploying bq-metrics-extractor to GKE" + +sed -e s/EXTRACTOR_VERSION/${TAG}/g bq-metrics-extractor/cronjob/extractor-dev.yaml | kubectl apply -f - diff --git a/bq-metrics-extractor/cronjob/deploy-direct.sh b/bq-metrics-extractor/cronjob/deploy-direct.sh new file mode 100755 index 000000000..4576b6d3a --- /dev/null +++ b/bq-metrics-extractor/cronjob/deploy-direct.sh @@ -0,0 +1,29 @@ +#!/usr/bin/env bash + +set -e + +if [ ! -f bq-metrics-extractor/cronjob/deploy-direct.sh ]; then + (>&2 echo "Run this script from project root dir (ostelco-core)") + exit 1 +fi + +kubectl config use-context $(kubectl config get-contexts --output name | grep private-cluster) + +PROJECT_ID="$(gcloud config get-value project -q)" +EXTRACTOR_VERSION="$(gradle bq-metrics-extractor:properties -q | grep "version:" | awk '{print $2}' | tr -d '[:space:]')" +SHORT_SHA="$(git log -1 --pretty=format:%h)" +TAG="${EXTRACTOR_VERSION}-${SHORT_SHA}" + +echo PROJECT_ID=${PROJECT_ID} +echo EXTRACTOR_VERSION=${EXTRACTOR_VERSION} +echo SHORT_SHA=${SHORT_SHA} +echo TAG=${TAG} + + +gradle bq-metrics-extractor:clean bq-metrics-extractor:build +docker build -t eu.gcr.io/${PROJECT_ID}/bq-metrics-extractor:${TAG} bq-metrics-extractor +docker push eu.gcr.io/${PROJECT_ID}/bq-metrics-extractor:${TAG} + +echo "Deploying bq-metrics-extractor to GKE" + +sed -e s/EXTRACTOR_VERSION/${TAG}/g bq-metrics-extractor/cronjob/extractor.yaml | kubectl apply -f - diff --git a/bq-metrics-extractor/cronjob/extractor-dev.yaml b/bq-metrics-extractor/cronjob/extractor-dev.yaml new file mode 100644 index 000000000..0bd9892b7 --- /dev/null +++ b/bq-metrics-extractor/cronjob/extractor-dev.yaml @@ -0,0 +1,20 @@ +apiVersion: batch/v1beta1 +kind: CronJob +metadata: + name: bq-metrics-extractor +spec: + schedule: "*/5 * * * *" + jobTemplate: + spec: + template: + spec: + containers: + - name: bq-metrics-extractor + image: eu.gcr.io/pantel-2decb/bq-metrics-extractor:EXTRACTOR_VERSION + imagePullPolicy: Always + env: + - name: DATASET_PROJECT + value: pantel-2decb + - name: DATASET_MODIFIER + value: _dev + restartPolicy: Never diff --git a/bq-metrics-extractor/cronjob/extractor.yaml b/bq-metrics-extractor/cronjob/extractor.yaml index 65bc91486..9262a0967 100644 --- a/bq-metrics-extractor/cronjob/extractor.yaml +++ b/bq-metrics-extractor/cronjob/extractor.yaml @@ -10,6 +10,9 @@ spec: spec: containers: - name: bq-metrics-extractor - image: eu.gcr.io/pantel-2decb/bq-metrics-extractor:latest + image: eu.gcr.io/pantel-2decb/bq-metrics-extractor:EXTRACTOR_VERSION imagePullPolicy: Always + env: + - name: DATASET_PROJECT + value: pantel-2decb restartPolicy: Never diff --git a/bq-metrics-extractor/src/main/java/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt b/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt similarity index 84% rename from bq-metrics-extractor/src/main/java/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt rename to bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt index 468e1c3e5..d71dd2b58 100644 --- a/bq-metrics-extractor/src/main/java/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt +++ b/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt @@ -145,18 +145,42 @@ private class BqMetricsExtractorApplication : Application + // The variable is of the format ${VAR} + // extract variable name + val envVar = it.value.drop(2).dropLast(1) + // return the value of the environment variable + var result = env.getVar(envVar) ?: "" + // Remove all spaces and ; + result = result.replace("\\s".toRegex(), "") + result.replace(";".toRegex(), "") + } + return expandedSql.trimIndent() + } - fun getNumberValueViaSql(sql: String, resultColumn: String): Long { + fun getNumberValueViaSql(): Long { // Instantiate a client. If you don't specify credentials when constructing a client, the // client library will look for credentials in the environment, such as the // GOOGLE_APPLICATION_CREDENTIALS environment variable. val bigquery = BigQueryOptions.getDefaultInstance().service val queryConfig: QueryJobConfiguration = QueryJobConfiguration.newBuilder( - sql.trimIndent()) + expandSql()) .setUseLegacySql(false) .build(); @@ -191,11 +215,12 @@ private interface MetricBuilder { } } -private class SummaryMetricBuilder( - val metricName: String, - val help: String, - val sql: String, - val resultColumn: String) : MetricBuilder { +class SummaryMetricBuilder( + metricName: String, + help: String, + sql: String, + resultColumn: String, + env: EnvironmentVars) : MetricBuilder(metricName, help, sql, resultColumn, env) { private val log: Logger = LoggerFactory.getLogger(SummaryMetricBuilder::class.java) @@ -205,8 +230,9 @@ private class SummaryMetricBuilder( val summary: Summary = Summary.build() .name(metricName) .help(help).register(registry) - val value: Long = getNumberValueViaSql(sql, resultColumn) + val value: Long = getNumberValueViaSql() + log.info("Summarizing metric $metricName SQL = ${expandSql()}") log.info("Summarizing metric $metricName to be $value") summary.observe(value * 1.0) @@ -216,21 +242,23 @@ private class SummaryMetricBuilder( } } -private class GaugeMetricBuilder( - val metricName: String, - val help: String, - val sql: String, - val resultColumn: String) : MetricBuilder { +class GaugeMetricBuilder( + metricName: String, + help: String, + sql: String, + resultColumn: String, + env: EnvironmentVars) : MetricBuilder(metricName, help, sql, resultColumn, env) { - private val log: Logger = LoggerFactory.getLogger(SummaryMetricBuilder::class.java) + private val log: Logger = LoggerFactory.getLogger(GaugeMetricBuilder::class.java) override fun buildMetric(registry: CollectorRegistry) { try { val gauge: Gauge = Gauge.build() .name(metricName) .help(help).register(registry) - val value: Long = getNumberValueViaSql(sql, resultColumn) + val value: Long = getNumberValueViaSql() + log.info("Gauge metric $metricName = SQL = ${expandSql()}") log.info("Gauge metric $metricName = $value") gauge.set(value * 1.0) @@ -259,6 +287,7 @@ private class PrometheusPusher(val pushGateway: String, val job: String) { private val log: Logger = LoggerFactory.getLogger(PrometheusPusher::class.java) val registry = CollectorRegistry() + val env: EnvironmentVars = EnvironmentVars() fun publishMetrics(metrics: List) { @@ -271,14 +300,16 @@ private class PrometheusPusher(val pushGateway: String, val job: String) { it.name, it.help, it.sql, - it.resultColumn)) + it.resultColumn, + env)) } "GAUGE" -> { metricSources.add(GaugeMetricBuilder( it.name, it.help, it.sql, - it.resultColumn)) + it.resultColumn, + env)) } else -> { log.error("Unknown metrics type '${it.type}'") diff --git a/bq-metrics-extractor/src/test/kotlin/org/ostelco/bqmetrics/MetricBuildersTest.kt b/bq-metrics-extractor/src/test/kotlin/org/ostelco/bqmetrics/MetricBuildersTest.kt new file mode 100644 index 000000000..2e979f96e --- /dev/null +++ b/bq-metrics-extractor/src/test/kotlin/org/ostelco/bqmetrics/MetricBuildersTest.kt @@ -0,0 +1,98 @@ +package org.ostelco.bqmetrics + +import org.mockito.Mockito.mock +import kotlin.test.Test +import kotlin.test.assertEquals +import org.mockito.Mockito.`when` +import kotlin.test.assertNotEquals + +class MetricBuildersTest { + + @Test + fun testSQLNoVars() { + val testEnvVars = mock(EnvironmentVars::class.java) + `when`(testEnvVars.getVar("DATASET_PROJECT")).thenReturn("pantel-2decb") + `when`(testEnvVars.getVar("DATASET_MODIFIER")).thenReturn("_dev") + val sql = """ + SELECT count(distinct user_pseudo_id) AS count FROM `pantel-2decb.analytics_160712959.events_*` + WHERE event_name = "first_open" + """ + val metric: SummaryMetricBuilder = SummaryMetricBuilder( + metricName = "metric1", + help = "none", + sql = sql, + resultColumn = "result1", + env = testEnvVars + ) + assertEquals(metric.expandSql(), sql.trimIndent()) + } + @Test + fun testSQL2Vars() { + val testEnvVars = mock(EnvironmentVars::class.java) + `when`(testEnvVars.getVar("DATASET_PROJECT")).thenReturn("pantel-2decb") + `when`(testEnvVars.getVar("DATASET_MODIFIER")).thenReturn("_dev") + val sql = """ + SELECT count(distinct user_pseudo_id) AS count FROM `${'$'}{DATASET_PROJECT}.analytics_160712959${'$'}{DATASET_MODIFIER}.events_*` + WHERE event_name = "first_open" + """ + val sqlResult = """ + SELECT count(distinct user_pseudo_id) AS count FROM `pantel-2decb.analytics_160712959_dev.events_*` + WHERE event_name = "first_open" + """ + val metric: SummaryMetricBuilder = SummaryMetricBuilder( + metricName = "metric1", + help = "none", + sql = sql, + resultColumn = "result1", + env = testEnvVars + ) + assertEquals(metric.expandSql(), sqlResult.trimIndent()) + } + + @Test + fun testSQLUnknownVar() { + val testEnvVars = mock(EnvironmentVars::class.java) + `when`(testEnvVars.getVar("DATASET_PROJECT")).thenReturn("pantel-2decb") + `when`(testEnvVars.getVar("DATASET_MODIFIER")).thenReturn(null) + val sql = """ + SELECT count(distinct user_pseudo_id) AS count FROM `${'$'}{DATASET_PROJECT}.analytics_160712959${'$'}{DATASET_MODIFIER}.events_*` + WHERE event_name = "first_open" + """ + val sqlResult = """ + SELECT count(distinct user_pseudo_id) AS count FROM `pantel-2decb.analytics_160712959.events_*` + WHERE event_name = "first_open" + """ + val metric: SummaryMetricBuilder = SummaryMetricBuilder( + metricName = "metric1", + help = "none", + sql = sql, + resultColumn = "result1", + env = testEnvVars + ) + assertEquals(metric.expandSql(), sqlResult.trimIndent()) + } + + @Test + fun testMangleBadSQL() { + val testEnvVars = mock(EnvironmentVars::class.java) + `when`(testEnvVars.getVar("DATASET_PROJECT")).thenReturn("pantel-2decb") + `when`(testEnvVars.getVar("DATASET_MODIFIER")).thenReturn("; DELETE * from abc;") + val sql = """ + SELECT count(distinct user_pseudo_id) AS count FROM `${'$'}{DATASET_PROJECT}.analytics_160712959${'$'}{DATASET_MODIFIER}.events_*` + WHERE event_name = "first_open" + """ + val sqlResult = """ + SELECT count(distinct user_pseudo_id) AS count FROM `pantel-2decb.analytics_160712959; DELETE * from abc;.events_*` + WHERE event_name = "first_open" + """ + val metric: SummaryMetricBuilder = SummaryMetricBuilder( + metricName = "metric1", + help = "none", + sql = sql, + resultColumn = "result1", + env = testEnvVars + ) + println(metric.expandSql()) + assertNotEquals(metric.expandSql(), sqlResult.trimIndent()) + } +} diff --git a/dataflow-pipelines/src/main/resources/table_schema.ddl b/dataflow-pipelines/src/main/resources/table_schema.ddl index 5c13066d3..fcf0fb19e 100644 --- a/dataflow-pipelines/src/main/resources/table_schema.ddl +++ b/dataflow-pipelines/src/main/resources/table_schema.ddl @@ -20,4 +20,28 @@ CREATE TABLE IF NOT EXISTS apn STRING NOT NULL, mccMnc STRING NOT NULL ) +PARTITION BY DATE(timestamp); + +CREATE TABLE IF NOT EXISTS +`pantel-2decb.data_consumption_dev.hourly_consumption` +( + msisdn STRING NOT NULL, + bytes INT64 NOT NULL, + timestamp TIMESTAMP NOT NULL, + apn STRING NOT NULL, + mccMnc STRING NOT NULL +) +PARTITION BY DATE(timestamp); + + +CREATE TABLE IF NOT EXISTS +`pantel-2decb.data_consumption_dev.raw_consumption` +( + msisdn STRING NOT NULL, + bucketBytes INT64 NOT NULL, + bundleBytes INT64 NOT NULL, + timestamp TIMESTAMP NOT NULL, + apn STRING NOT NULL, + mccMnc STRING NOT NULL +) PARTITION BY DATE(timestamp); \ No newline at end of file diff --git a/prime/infra/raw_purchases_schema.ddl b/prime/infra/raw_purchases_schema.ddl index dabb4fd95..6172c84d7 100644 --- a/prime/infra/raw_purchases_schema.ddl +++ b/prime/infra/raw_purchases_schema.ddl @@ -21,3 +21,27 @@ > NOT NULL ) PARTITION BY DATE(_PARTITIONTIME) + + CREATE TABLE purchases_dev.raw_purchases + ( + id STRING NOT NULL, + subscriberId STRING NOT NULL, + timestamp INT64 NOT NULL, + status STRING NOT NULL, + product STRUCT< + sku STRING NOT NULL, + price STRUCT< + amount INT64 NOT NULL, + currency STRING NOT NULL + > NOT NULL, + properties ARRAY< STRUCT< + key STRING NOT NULL, + value STRING NOT NULL + > >, + presentation ARRAY< STRUCT< + key STRING NOT NULL, + value STRING NOT NULL + > > + > NOT NULL +) +PARTITION BY DATE(_PARTITIONTIME) \ No newline at end of file From c168878dcf6d1e7877e65756d6ed65b43fbcc126 Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Wed, 3 Oct 2018 14:27:44 +0200 Subject: [PATCH 16/93] Update readme to reflect new scripts --- bq-metrics-extractor/README.md | 20 ++++++-------------- 1 file changed, 6 insertions(+), 14 deletions(-) diff --git a/bq-metrics-extractor/README.md b/bq-metrics-extractor/README.md index ef069b2b1..66b863eb0 100644 --- a/bq-metrics-extractor/README.md +++ b/bq-metrics-extractor/README.md @@ -54,26 +54,18 @@ a credentials file that will provide access for the BigQuery library. How to build and deploy the cronjob manually === -##First get credentials (upgrade gcloud for good measure): +##Build and deploy the artifact: - gcloud components update - gcloud container clusters get-credentials dev-cluster --zone europe-west1-b --project pantel-2decb +Build and deploy to dev cluster -##Build the artefact: + bq-metrics-extractor/cronjob/deploy-dev-direct.sh - gradle build - docker build . +Build and deploy to prod cluster -##Authorize tag and push to docker registry in google cloud: + bq-metrics-extractor/cronjob/deploy-direct.sh - gcloud auth configure-docker - docker tag foobarbaz eu.gcr.io/pantel-2decb/bq-metrics-extractor - docker push eu.gcr.io/pantel-2decb/bq-metrics-extractor +## Display the cronjob status in kubernetes -... where foobarbaz is the id of the container built by docker build. - -## Then start the cronjob in kubernetes - kubectl apply -f cronjob/extractor.yaml kubectl describe cronjob bq-metrics-extractor ## To talk to the prometheus in the monitoring namespace & watch the users metrics evolve From 54f535e82fd543c123dab2f2435ef3ad6b401eb3 Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Wed, 3 Oct 2018 14:28:16 +0200 Subject: [PATCH 17/93] Fix the interval for dev --- bq-metrics-extractor/cronjob/extractor-dev.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bq-metrics-extractor/cronjob/extractor-dev.yaml b/bq-metrics-extractor/cronjob/extractor-dev.yaml index 0bd9892b7..70755976d 100644 --- a/bq-metrics-extractor/cronjob/extractor-dev.yaml +++ b/bq-metrics-extractor/cronjob/extractor-dev.yaml @@ -3,7 +3,7 @@ kind: CronJob metadata: name: bq-metrics-extractor spec: - schedule: "*/5 * * * *" + schedule: "*/30 * * * *" jobTemplate: spec: template: From 881a1fbf943def61a3418811645e7180085e388b Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Wed, 3 Oct 2018 14:28:46 +0200 Subject: [PATCH 18/93] Remove unused log --- .../org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt | 2 -- 1 file changed, 2 deletions(-) diff --git a/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt b/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt index d71dd2b58..6caefc5de 100644 --- a/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt +++ b/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt @@ -232,7 +232,6 @@ class SummaryMetricBuilder( .help(help).register(registry) val value: Long = getNumberValueViaSql() - log.info("Summarizing metric $metricName SQL = ${expandSql()}") log.info("Summarizing metric $metricName to be $value") summary.observe(value * 1.0) @@ -258,7 +257,6 @@ class GaugeMetricBuilder( .help(help).register(registry) val value: Long = getNumberValueViaSql() - log.info("Gauge metric $metricName = SQL = ${expandSql()}") log.info("Gauge metric $metricName = $value") gauge.set(value * 1.0) From 1e7c2f4bd1bf9ec0ae8743f0bfcce30a47f6eb25 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B8rn=20Remseth?= Date: Wed, 3 Oct 2018 15:54:07 +0200 Subject: [PATCH 19/93] Adding notes and suggestions --- .../legacy/sample-offer-legacy.yaml | 77 +++++++++ .../demo-yamls/legacy/sample-offer-only.yaml | 12 ++ .../sample-offer-products-segments.yaml | 35 +++++ sample-agent/demo-yamls/notes.txt | 148 ++++++++++++++++++ sample-agent/demo-yamls/script1.yaml | 1 + 5 files changed, 273 insertions(+) create mode 100644 sample-agent/demo-yamls/legacy/sample-offer-legacy.yaml create mode 100644 sample-agent/demo-yamls/legacy/sample-offer-only.yaml create mode 100644 sample-agent/demo-yamls/legacy/sample-offer-products-segments.yaml create mode 100644 sample-agent/demo-yamls/notes.txt create mode 100644 sample-agent/demo-yamls/script1.yaml diff --git a/sample-agent/demo-yamls/legacy/sample-offer-legacy.yaml b/sample-agent/demo-yamls/legacy/sample-offer-legacy.yaml new file mode 100644 index 000000000..2a6f38ebe --- /dev/null +++ b/sample-agent/demo-yamls/legacy/sample-offer-legacy.yaml @@ -0,0 +1,77 @@ +# +# This is a sample YAML format to be used by +# agents that produce offers. The general idea +# is that an offer has a set of parameters, +# and also a set of selected subscribers that will +# get it. +# +# YAML was chosen since it's more human readable than +# e.g. json or protobuffers, while still being +# easy to produce by an agent, and relatively compact, +# in particular when gzipped. +# + +producingAgent: + name: Simple agent + version: 1.0 + +# # All of the parameters below are just copied from the firebasr +# # realtime database we used in the demo, converted to +# # camel case. All the fields should be documented +# # in this document, and we should think through if this is +# # the best set of parameters we went. + +offer: + # XXX This offer does not have an ID, but if we were just + # updating the list of members of the segment, it would + # make sense to have an OfferID, or something that refers + # to a previously created offer. That id should be created + # by the importer, and used by the agent when updating + # membership. If any other parameters are going to be + # changed, it is necessary to produce a new offer. + # It may make sense to put the ID in the url when + # when we update (PUT method) changes, but then the + # ID in the yaml will be redundant. Figure out how to + # do this one way or another and just do it. + visibility: + from: "2018-02-22T12:41:49.871Z" + to: "2018-02-22T12:41:49.871Z" + presentation: + badgeLabel: "mbop" + description: "Best offer you will get today" + shortDescription: "Best offer!" + label: "3 GB" + name: "3 GB" + priceLabel: "49 NOK" + hidden: false + imageUrl: https://www.ft-associates.com/wp-content/uploads/2015/08/Best-Offer.jpg + financial: + repurchability: 1 + currencyLabel: "NOK" + price: 4900 + taxRate: 10.0 + product: + sku: 2 + # A possibly very long list of product parameters that are all + # dependent on the SKU's requirement. Details ignored here, + # that may pop up later. Deal with them then. + noOfBytes: 3000000000 + +# # We put the segment last, since it may have a long list of +# # members in it. We want that list to be last, since it contains +# # little information that humans are interested in, and we want +# # humans to start reading the file at the top. + +segment: + type: agent-specific-segment + description: "This is how this segment should be described" + members: + # The decryption key is what the de-anonymizer will use to + # make proper identifiers out of the members listed below. + # The special purpose key "none" indicatest that the member list + # is in clear text. + decryptionKey: none + members: + - 4790300157 + - 4790300144 + - 4333333333 diff --git a/sample-agent/demo-yamls/legacy/sample-offer-only.yaml b/sample-agent/demo-yamls/legacy/sample-offer-only.yaml new file mode 100644 index 000000000..5e11702f8 --- /dev/null +++ b/sample-agent/demo-yamls/legacy/sample-offer-only.yaml @@ -0,0 +1,12 @@ +producingAgent: + name: Simple agent + version: 1.0 + +offer: + id: test-offer + # use existing product + products: + - 1GB_249NOK + # use existing segment + segments: + - test-segment diff --git a/sample-agent/demo-yamls/legacy/sample-offer-products-segments.yaml b/sample-agent/demo-yamls/legacy/sample-offer-products-segments.yaml new file mode 100644 index 000000000..b1a6e2809 --- /dev/null +++ b/sample-agent/demo-yamls/legacy/sample-offer-products-segments.yaml @@ -0,0 +1,35 @@ +producingAgent: + name: Simple agent + version: 1.0 + +offer: + id: test-offer + + # list of existing products + # listing products to be created in this yaml is OPTIONAL + +# products: +# - 1GB_249NOK + + # list of existing segments + # listing segments to be created in this yaml is OPTIONAL + +# segments: +# - test-segment + +# These products will be created and linked to offer - 'test-offer' +products: + - sku: 1GB_249NOK + price: + amount: 249 + currency: NOK + properties: + noOfBytes: 1_000_000_000 + presentation: + isDefault: true + offerLabel: Default Offer + priceLabel: 249 NOK + +# These segments will be created and linked to offer - 'test-offer' +segments: + - id: test-segment diff --git a/sample-agent/demo-yamls/notes.txt b/sample-agent/demo-yamls/notes.txt new file mode 100644 index 000000000..7fde16d49 --- /dev/null +++ b/sample-agent/demo-yamls/notes.txt @@ -0,0 +1,148 @@ +TAKE ONE +******* + + +====: Script 1 (initialization, may or not actually be run, but should represent the situation the later scripts operate on) + +createProducts: + - sku: 1GB_200NOK + price: + amount: 200 + currency: NOK + properties: + productClass: SIMPLE_DATA + noOfBytes: 1_000_000_000 + presentation: + productClass: SIMPLE_PRESENTATION + isDefault: true + offerLabel: Top Up + priceLabel: 200 NOK + + - sku: 2GB_200NOK + price: + amount: 200 + currency: NOK + properties: + noOfBytes: 2_000_000_000 + presentation: + isDefault: true + offerLabel: Top Up + priceLabel: 200 NOK + + - sku: 1GB_50NOK + price: + amount: 50 + currency: NOK + properties: + noOfBytes: 1_000_000_000 + presentation: + isDefault: true + offerLabel: Special offer + priceLabel: 50 NOK + +createSegments: + - id: onlySignedUp_Demo + - id: hasUsedLotsOfData_Demo + - id: hasUsedEvenMoreData_Demo + +createOffers: + - id: initialOffer + segments: + - onlySignedUp_Demo + products: + - 1GB_200NOK + - id: offerForBigDataUsers + segments: + - onlySignedUp_Demo + products: + - 2GB_200NOK + - id: specialOffer + segments: + - hasUsedEvenMoreData_Demo + products: + - 1GB_50NOK + +moveToSegment: + source: any + target: onlySignedUp_Demo + subscribers: + - foo@baz.com + - bar@baz.com + + +==== Script 2: Promote SH from onlySignedUp_Demo to offerForBigDataUser segment +moveToSegment: + source: onlySignedUp_Demo + target: hasUsedLotsOfData_Demo + subscribers: + - foo@baz.com + + +==== Script 3: Promote SH from onlySignedUp_Demo to offerForBigDataUser segment + +moveToSegment: + source: hasUsedLotsOfData_Demo + target: hasUsedEvenMoreData_Demo + subscribers: + - foo@baz.com + + +TAKE 2 +****** + +== script 1 (initialization, may or not actually be run, but should represent the situation the later scripts operate on) +createOffer: + id: demoOffer1 + createProducts: + - sku: 1GB_200NOK + price: + amount: 200 + currency: NOK + properties: + noOfBytes: 1_000_000_000 + presentation: + isDefault: true + offerLabel: Top Up + priceLabel: 200 NOK + createSegments: + - id: demoSegment1 + subscribers: + - foo@baz.com + - bar@baz.com + +== script 2 +createOffer: + id: demoOffer2 + createProducts: + - sku: 2GB_200NOK + price: + amount: 200 + currency: NOK + properties: + noOfBytes: 2_000_000_000 + presentation: + isDefault: true + offerLabel: Top Up + priceLabel: 200 NOK + createSegments: + - id: demoSegment2 + + +== script 3 +createOffer: + id: demoOffer3 + createProducts: + - sku: 1GB_50NOK + price: + amount: 50 + currency: NOK + properties: + noOfBytes: 1_000_000_000 + presentation: + isDefault: true + offerLabel: Special offer + priceLabel: 50 NOK + createSegments: + - id: demoSegment3 + subscribers: + - bar@baz.com diff --git a/sample-agent/demo-yamls/script1.yaml b/sample-agent/demo-yamls/script1.yaml new file mode 100644 index 000000000..27e2e5bb9 --- /dev/null +++ b/sample-agent/demo-yamls/script1.yaml @@ -0,0 +1 @@ +createOffer: From 9dfd79868e9b64190f88c1ed2db11f3fb503128f Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Wed, 3 Oct 2018 21:49:35 +0200 Subject: [PATCH 20/93] Add documentation to the class. --- .../BqMetricsExtractorApplication.kt | 26 ++++++++++++++++++- .../ostelco/bqmetrics/MetricBuildersTest.kt | 3 +++ 2 files changed, 28 insertions(+), 1 deletion(-) diff --git a/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt b/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt index 6caefc5de..cb165f1bf 100644 --- a/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt +++ b/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt @@ -145,19 +145,34 @@ private class BqMetricsExtractorApplication : Application @@ -173,6 +188,9 @@ abstract class MetricBuilder( return expandedSql.trimIndent() } + /** + * Execute the SQL and get a single number value. + */ fun getNumberValueViaSql(): Long { // Instantiate a client. If you don't specify credentials when constructing a client, the // client library will look for credentials in the environment, such as the @@ -215,6 +233,9 @@ abstract class MetricBuilder( } } +/** + * Class for capturing value in a summary metric. + */ class SummaryMetricBuilder( metricName: String, help: String, @@ -241,6 +262,9 @@ class SummaryMetricBuilder( } } +/** + * Class for capturing value in a Gauge metric. + */ class GaugeMetricBuilder( metricName: String, help: String, diff --git a/bq-metrics-extractor/src/test/kotlin/org/ostelco/bqmetrics/MetricBuildersTest.kt b/bq-metrics-extractor/src/test/kotlin/org/ostelco/bqmetrics/MetricBuildersTest.kt index 2e979f96e..21cc68cc1 100644 --- a/bq-metrics-extractor/src/test/kotlin/org/ostelco/bqmetrics/MetricBuildersTest.kt +++ b/bq-metrics-extractor/src/test/kotlin/org/ostelco/bqmetrics/MetricBuildersTest.kt @@ -6,6 +6,9 @@ import kotlin.test.assertEquals import org.mockito.Mockito.`when` import kotlin.test.assertNotEquals +/** + * Class for testing the SQL expander. + */ class MetricBuildersTest { @Test From dc7578d7697899df230a55d394cf3ffebd2fbaf3 Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Wed, 3 Oct 2018 22:33:10 +0200 Subject: [PATCH 21/93] Use the correct dataset name in for exporter in dev cluster --- exporter/deploy/deploy-dev.sh | 2 +- exporter/deploy/deploy.sh | 2 +- exporter/deploy/exporter-dev.yaml | 26 ++++++++++++++++++++++++++ exporter/{ => deploy}/exporter.yaml | 3 +++ exporter/script/delete_export_data.sh | 8 +++++++- exporter/script/export_data.sh | 22 +++++++++++++++++++--- exporter/script/map_subscribers.sh | 7 ++++++- 7 files changed, 63 insertions(+), 7 deletions(-) create mode 100644 exporter/deploy/exporter-dev.yaml rename exporter/{ => deploy}/exporter.yaml (85%) diff --git a/exporter/deploy/deploy-dev.sh b/exporter/deploy/deploy-dev.sh index a1f4a5c1d..e2b4a6479 100755 --- a/exporter/deploy/deploy-dev.sh +++ b/exporter/deploy/deploy-dev.sh @@ -22,4 +22,4 @@ docker push eu.gcr.io/${PROJECT_ID}/exporter:${TAG} echo "Deploying exporter to GKE" -sed -e s/EXPORTER_VERSION/${TAG}/g exporter/exporter.yaml | kubectl apply -f - \ No newline at end of file +sed -e s/EXPORTER_VERSION/${TAG}/g exporter/deploy/exporter-dev.yaml | kubectl apply -f - \ No newline at end of file diff --git a/exporter/deploy/deploy.sh b/exporter/deploy/deploy.sh index 19a989c05..4f0ef926a 100755 --- a/exporter/deploy/deploy.sh +++ b/exporter/deploy/deploy.sh @@ -22,4 +22,4 @@ docker push eu.gcr.io/${PROJECT_ID}/exporter:${TAG} echo "Deploying exporter to GKE" -sed -e s/EXPORTER_VERSION/${TAG}/g exporter/exporter.yaml | kubectl apply -f - \ No newline at end of file +sed -e s/EXPORTER_VERSION/${TAG}/g exporter/deploy/exporter.yaml | kubectl apply -f - \ No newline at end of file diff --git a/exporter/deploy/exporter-dev.yaml b/exporter/deploy/exporter-dev.yaml new file mode 100644 index 000000000..aba828f6d --- /dev/null +++ b/exporter/deploy/exporter-dev.yaml @@ -0,0 +1,26 @@ +apiVersion: extensions/v1beta1 +kind: Deployment +metadata: + name: exporter + labels: + app: exporter + tier: backend +spec: + replicas: 1 + template: + metadata: + labels: + app: exporter + tier: backend + spec: + containers: + - name: exporter + image: eu.gcr.io/pantel-2decb/exporter:EXPORTER_VERSION + imagePullPolicy: Always + env: + - name: PROJECT_ID + value: pantel-2decb + - name: DATASET_MODIFIER + value: _dev + ports: + - containerPort: 8080 diff --git a/exporter/exporter.yaml b/exporter/deploy/exporter.yaml similarity index 85% rename from exporter/exporter.yaml rename to exporter/deploy/exporter.yaml index c174ac9d5..f05109d88 100644 --- a/exporter/exporter.yaml +++ b/exporter/deploy/exporter.yaml @@ -17,5 +17,8 @@ spec: - name: exporter image: eu.gcr.io/pantel-2decb/exporter:EXPORTER_VERSION imagePullPolicy: Always + env: + - name: PROJECT_ID + value: pantel-2decb ports: - containerPort: 8080 diff --git a/exporter/script/delete_export_data.sh b/exporter/script/delete_export_data.sh index 740c99fa9..51513b941 100644 --- a/exporter/script/delete_export_data.sh +++ b/exporter/script/delete_export_data.sh @@ -8,7 +8,13 @@ if [ -z "$1" ]; then fi exportId=${exportId//-} exportId=${exportId,,} -projectId=pantel-2decb + +# Set the projectId +if [[ -z "${PROJECT_ID}" ]]; then + projectId=pantel-2decb +else + projectId="${PROJECT_ID}" +fi msisdnPseudonymsTable=exported_pseudonyms.${exportId}_msisdn subscriberPseudonymsTable=exported_pseudonyms.${exportId}_subscriber diff --git a/exporter/script/export_data.sh b/exporter/script/export_data.sh index a63afd2e2..2af4d633c 100644 --- a/exporter/script/export_data.sh +++ b/exporter/script/export_data.sh @@ -7,19 +7,35 @@ if [ -z "$1" ]; then fi exportId=${exportId//-} exportId=${exportId,,} -projectId=pantel-2decb + +# Set the projectId +if [[ -z "${PROJECT_ID}" ]]; then + projectId=pantel-2decb +else + projectId="${PROJECT_ID}" +fi + +# Set the datasetModifier +if [[ -z "${DATASET_MODIFIER}" ]]; then + datasetModifier="" +else + datasetModifier="${DATASET_MODIFIER}" +fi msisdnPseudonymsTable=$projectId.exported_pseudonyms.${exportId}_msisdn subscriberPseudonymsTable=$projectId.exported_pseudonyms.${exportId}_subscriber sub2msisdnMappingsTable=exported_data_consumption.${exportId}_sub2msisdn -hourlyConsumptionTable=$projectId.data_consumption.hourly_consumption +hourlyConsumptionTable=$projectId.data_consumption${datasetModifier}.hourly_consumption dataConsumptionTable=exported_data_consumption.$exportId -rawPurchasesTable=$projectId.purchases.raw_purchases +rawPurchasesTable=$projectId.purchases${datasetModifier}.raw_purchases purchaseRecordsTable=exported_data_consumption.${exportId}_purchases csvfile=$projectId-dataconsumption-export/$exportId.csv purchasesCsvfile=$projectId-dataconsumption-export/$exportId-purchases.csv sub2msisdnCsvfile=$projectId-dataconsumption-export/$exportId-sub2msisdn.csv +echo $rawPurchasesTable +echo $hourlyConsumptionTable + # Generate the pseudonym tables for this export echo "Starting export job for $exportId" pseudonymHost="pseudonym-server-service.default.svc.cluster.local" diff --git a/exporter/script/map_subscribers.sh b/exporter/script/map_subscribers.sh index 98400bf9f..ac1a1a18c 100644 --- a/exporter/script/map_subscribers.sh +++ b/exporter/script/map_subscribers.sh @@ -8,7 +8,12 @@ if [ -z "$1" ]; then fi exportId=${exportId//-} exportId=${exportId,,} -projectId=pantel-2decb +# Set the projectId +if [[ -z "${PROJECT_ID}" ]]; then + projectId=pantel-2decb +else + projectId="${PROJECT_ID}" +fi csvfile=$projectId-dataconsumption-export/${exportId}-resultsegment-pseudoanonymized.csv outputCsvfile=$projectId-dataconsumption-export/${exportId}-resultsegment-cleartext.csv From 3473cbd12a9a44f01f4adff0d970b0d2da2e0afe Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Wed, 3 Oct 2018 22:53:51 +0200 Subject: [PATCH 22/93] Use variable names for project and the dataset --- bq-metrics-extractor/config/config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bq-metrics-extractor/config/config.yaml b/bq-metrics-extractor/config/config.yaml index 4fa2acf45..74a32c67c 100644 --- a/bq-metrics-extractor/config/config.yaml +++ b/bq-metrics-extractor/config/config.yaml @@ -79,14 +79,14 @@ bqmetrics: help: Number of SIMs that has had an active data session today resultColumn: count sql: > - SELECT COUNT (DISTINCT user.msisdn) FROM `pantel-2decb.ocs_gateway.raw_activeusers`, UNNEST(users) as user + SELECT COUNT (DISTINCT user.msisdn) FROM `${DATASET_PROJECT}.ocs_gateway${DATASET_MODIFIER}.raw_activeusers`, UNNEST(users) as user WHERE timestamp >= TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY) - type: gauge name: sims_who_was_active_yesterday help: Number of SIMs that has had an active data session yesterday resultColumn: count sql: > - SELECT COUNT (DISTINCT user.msisdn) FROM `pantel-2decb.ocs_gateway.raw_activeusers`, UNNEST(users) as user + SELECT COUNT (DISTINCT user.msisdn) FROM `${DATASET_PROJECT}.ocs_gateway${DATASET_MODIFIER}.raw_activeusers`, UNNEST(users) as user WHERE timestamp >= TIMESTAMP_SUB(TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY), INTERVAL 1 DAY) AND timestamp < TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY) - type: gauge From 3295c1d8f19154d9196aaebb65325386658a0a60 Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Wed, 3 Oct 2018 22:55:59 +0200 Subject: [PATCH 23/93] Use result name --- bq-metrics-extractor/config/config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bq-metrics-extractor/config/config.yaml b/bq-metrics-extractor/config/config.yaml index 74a32c67c..a125f31d7 100644 --- a/bq-metrics-extractor/config/config.yaml +++ b/bq-metrics-extractor/config/config.yaml @@ -79,14 +79,14 @@ bqmetrics: help: Number of SIMs that has had an active data session today resultColumn: count sql: > - SELECT COUNT (DISTINCT user.msisdn) FROM `${DATASET_PROJECT}.ocs_gateway${DATASET_MODIFIER}.raw_activeusers`, UNNEST(users) as user + SELECT COUNT (DISTINCT user.msisdn) AS count FROM `${DATASET_PROJECT}.ocs_gateway${DATASET_MODIFIER}.raw_activeusers`, UNNEST(users) as user WHERE timestamp >= TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY) - type: gauge name: sims_who_was_active_yesterday help: Number of SIMs that has had an active data session yesterday resultColumn: count sql: > - SELECT COUNT (DISTINCT user.msisdn) FROM `${DATASET_PROJECT}.ocs_gateway${DATASET_MODIFIER}.raw_activeusers`, UNNEST(users) as user + SELECT COUNT (DISTINCT user.msisdn) AS count FROM `${DATASET_PROJECT}.ocs_gateway${DATASET_MODIFIER}.raw_activeusers`, UNNEST(users) as user WHERE timestamp >= TIMESTAMP_SUB(TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY), INTERVAL 1 DAY) AND timestamp < TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY) - type: gauge From 3603fd563a089d8a685b694d098ae98b3b1b08e4 Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Wed, 3 Oct 2018 22:58:50 +0200 Subject: [PATCH 24/93] Remove unwanted logs --- exporter/script/export_data.sh | 3 --- 1 file changed, 3 deletions(-) diff --git a/exporter/script/export_data.sh b/exporter/script/export_data.sh index 2af4d633c..4844d864b 100644 --- a/exporter/script/export_data.sh +++ b/exporter/script/export_data.sh @@ -33,9 +33,6 @@ csvfile=$projectId-dataconsumption-export/$exportId.csv purchasesCsvfile=$projectId-dataconsumption-export/$exportId-purchases.csv sub2msisdnCsvfile=$projectId-dataconsumption-export/$exportId-sub2msisdn.csv -echo $rawPurchasesTable -echo $hourlyConsumptionTable - # Generate the pseudonym tables for this export echo "Starting export job for $exportId" pseudonymHost="pseudonym-server-service.default.svc.cluster.local" From 1dd7436cf04488f3caa1538e62a29ea04b680b0f Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Thu, 4 Oct 2018 07:10:26 +0200 Subject: [PATCH 25/93] Bump version. --- bq-metrics-extractor/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bq-metrics-extractor/build.gradle b/bq-metrics-extractor/build.gradle index 44fdb89aa..63f260c4e 100644 --- a/bq-metrics-extractor/build.gradle +++ b/bq-metrics-extractor/build.gradle @@ -5,7 +5,7 @@ plugins { id "idea" } -version = "2.0.0" +version = "2.1.0" dependencies { From 522529534231f1a739eda73580a3a96ffa45e661 Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Thu, 4 Oct 2018 07:11:24 +0200 Subject: [PATCH 26/93] Use new deploy scripts in cronjob folder --- .../script/build-and-upload-docker-image.sh | 49 ------------------- 1 file changed, 49 deletions(-) delete mode 100755 bq-metrics-extractor/script/build-and-upload-docker-image.sh diff --git a/bq-metrics-extractor/script/build-and-upload-docker-image.sh b/bq-metrics-extractor/script/build-and-upload-docker-image.sh deleted file mode 100755 index f7844af69..000000000 --- a/bq-metrics-extractor/script/build-and-upload-docker-image.sh +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/sh - -## -## Build a new jar file, then a new docker image, then -## upload the docker image to a google docker -## repository. -## - - -# Exit on failure -set -e - -# Check for dependencies -DEPENDENCIES="gradle docker gcloud" -for dep in $DEPENDENCIES ; do - if [[ -z "$(type $dep)" ]] ; then - echo "Could not find dependency $dep, bailing out" - exit 1 - fi -done - -# Set destination - -GCLOUD_PROJECT_NAME="pantel-2decb" -CONTAINER_NAME="bq-metrics-extractor" -GCLOUD_REPO_NAME="eu.gcr.io" - - - -# Log into the appropriate google account and prepare to build&upload -# XXX Couldn't figure out how to make this work well in a script, but -# that should be solved, therefore I'm keeping the dead code instead -# of doing the right thing according to the project coding standard -# and killing it off. -# gcloud auth login -# gcloud auth configure-docker - -# Build the java .jar application from sources -gradle build - -# Build the docker container -CONTAINER_ID=$(docker build . | grep "Successfully built" | awk '{print $3}') -echo "Built container $CONTAINER_ID" - -# Tag and push the docker container to the google repo -echo "Tagging and pushing container" -THE_TAG="${GCLOUD_REPO_NAME}/${GCLOUD_PROJECT_NAME}/${CONTAINER_NAME}" -docker tag ${CONTAINER_ID} ${THE_TAG} -docker push ${THE_TAG} From ab9c00f0e13b41267e6871eeec4eed46b51b239b Mon Sep 17 00:00:00 2001 From: "Kjell M. Myksvoll" Date: Thu, 4 Oct 2018 08:06:00 +0200 Subject: [PATCH 27/93] Minor whitespace fixes --- .../ostelco/prime/admin/importer/ImportProcessor.kt | 3 ++- sample-agent/sample-agent.sh | 11 +++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/admin-api/src/main/kotlin/org/ostelco/prime/admin/importer/ImportProcessor.kt b/admin-api/src/main/kotlin/org/ostelco/prime/admin/importer/ImportProcessor.kt index 3f1c653ee..6c3af7a09 100644 --- a/admin-api/src/main/kotlin/org/ostelco/prime/admin/importer/ImportProcessor.kt +++ b/admin-api/src/main/kotlin/org/ostelco/prime/admin/importer/ImportProcessor.kt @@ -1,3 +1,4 @@ + package org.ostelco.prime.admin.importer import arrow.core.Either @@ -23,4 +24,4 @@ class ImportAdapter : ImportProcessor { segments = importDeclaration.segments) .mapLeft { BadRequestError(it.message, ApiErrorCode.FAILED_TO_IMPORT_OFFER) } } -} \ No newline at end of file +} diff --git a/sample-agent/sample-agent.sh b/sample-agent/sample-agent.sh index d770fb2e7..6f01a2d3b 100755 --- a/sample-agent/sample-agent.sh +++ b/sample-agent/sample-agent.sh @@ -1,4 +1,4 @@ -#!/bin/bash -x +#!/bin/bash set -e @@ -34,7 +34,7 @@ trap "exit 1" TERM export TOP_PID=$$ function die() { - kill -s TERM $TOP_PID + kill -s TERM $TOP_PID } # @@ -44,9 +44,9 @@ function die() { DEPENDENCIES="gcloud kubectl gsutil" for dep in $DEPENDENCIES ; do - if [[ -z $(which $dep) ]] ; then - echo "ERROR: Could not find dependency $dep" - fi + if [[ -z $(which $dep) ]] ; then + echo "ERROR: Could not find dependency $dep" + fi done # @@ -293,7 +293,6 @@ cat > $IMPORTFILE_YML < Date: Thu, 4 Oct 2018 10:03:38 +0200 Subject: [PATCH 28/93] Script to apply a yaml from somewhere --- sample-agent/demo-yamls/apply-yaml.sh | 87 +++++++++++++++++++++++++++ sample-agent/demo-yamls/script1.yaml | 1 - 2 files changed, 87 insertions(+), 1 deletion(-) create mode 100755 sample-agent/demo-yamls/apply-yaml.sh delete mode 100644 sample-agent/demo-yamls/script1.yaml diff --git a/sample-agent/demo-yamls/apply-yaml.sh b/sample-agent/demo-yamls/apply-yaml.sh new file mode 100755 index 000000000..0536ff0f8 --- /dev/null +++ b/sample-agent/demo-yamls/apply-yaml.sh @@ -0,0 +1,87 @@ +#!/bin/bash -x + +set -e + +### +### SEND PRE_WRITTEN YAML SCRIPT TO THE IMPORTER. +### + +# +# Get command line parameter, which should be an existing +# file containing a yaml file. +# + +YAML_SCRIPTNAME=$1 +if [[ -z "$YAML_SCRIPTNAME" ]] ; then + echo "$0 Missing script" + echo "usage $0 yaml-script" + exit 1 +fi + +if [[ ! -f "$YAML_SCRIPTNAME" ]] ; then + echo "$0 $YAML_SCRIPTNAME does not exist or is not a file" + echo "usage $0 yaml-script" + exit 1 +fi + +### +### PRELIMINARIES +### + +# Be able to die from inside procedures + +# +# Check for dependencies being satisfied +# + +DEPENDENCIES="gcloud kubectl gsutil" + +for dep in $DEPENDENCIES ; do + if [[ -z $(which $dep) ]] ; then + echo "ERROR: Could not find dependency $dep" + fi +done + +# +# Figure out relevant parts of the environment and check their +# sanity. +# + +PROJECT_ID=$(gcloud config get-value project) + +if [[ -z "$PROJECT_ID" ]] ; then + echo "ERROR: Unknown google project ID" + exit 1 +fi + +PRIME_PODNAME=$(kubectl get pods | grep prime- | awk '{print $1}') +if [[ -z "$PRIME_PODNAME" ]] ; then + echo "ERROR: Unknown prime podname" + exit 1 +fi + + +## +## Checking the assumption that localhost forwarding is actually +## working. +## + +EXPECTED_FROM_GET_TO_IMPORT='{"code":405,"message":"HTTP 405 Method Not Allowed"}' +RESULT_FROM_GET_PROBE="$(curl http://127.0.0.1:8080/importer 2>/dev/null)" + +if [[ "$EXPECTED_FROM_GET_TO_IMPORT" != "$RESULT_FROM_GET_PROBE" ]] ; then + echo "$0 ERROR: Did not get expected result when probing importer, bailing out" + echo "$0: ERROR: Assuming that prime is running at $PRIME_PODNAME" + echo "$0: ERROR: and that you have done" + echo "$0: ERRIR: kubectl port-forward $PRIME_PODNAME 8080:8080" + echo "$0: ERROR: Please check if this is working" + exit 1 +fi + + +## +## Send it to the importer +## (assuming the kubectl port forwarding is enabled) + +IMPORTER_URL=http://127.0.0.1:8080/importer +curl -H "Content-type: text/vnd.yaml" --data-binary @$YAML_SCRIPTNAME $IMPORTER_URL diff --git a/sample-agent/demo-yamls/script1.yaml b/sample-agent/demo-yamls/script1.yaml deleted file mode 100644 index 27e2e5bb9..000000000 --- a/sample-agent/demo-yamls/script1.yaml +++ /dev/null @@ -1 +0,0 @@ -createOffer: From 441e0bb4ab671d189b199ab6bf0dea8d0351bf2b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B8rn=20Remseth?= Date: Thu, 4 Oct 2018 10:27:07 +0200 Subject: [PATCH 29/93] Moving everything to top level --- sample-agent/{demo-yamls => }/apply-yaml.sh | 0 .../legacy/sample-offer-legacy.yaml | 77 ------------------- .../demo-yamls/legacy/sample-offer-only.yaml | 12 --- .../sample-offer-products-segments.yaml | 35 --------- sample-agent/{demo-yamls => }/notes.txt | 0 5 files changed, 124 deletions(-) rename sample-agent/{demo-yamls => }/apply-yaml.sh (100%) delete mode 100644 sample-agent/demo-yamls/legacy/sample-offer-legacy.yaml delete mode 100644 sample-agent/demo-yamls/legacy/sample-offer-only.yaml delete mode 100644 sample-agent/demo-yamls/legacy/sample-offer-products-segments.yaml rename sample-agent/{demo-yamls => }/notes.txt (100%) diff --git a/sample-agent/demo-yamls/apply-yaml.sh b/sample-agent/apply-yaml.sh similarity index 100% rename from sample-agent/demo-yamls/apply-yaml.sh rename to sample-agent/apply-yaml.sh diff --git a/sample-agent/demo-yamls/legacy/sample-offer-legacy.yaml b/sample-agent/demo-yamls/legacy/sample-offer-legacy.yaml deleted file mode 100644 index 2a6f38ebe..000000000 --- a/sample-agent/demo-yamls/legacy/sample-offer-legacy.yaml +++ /dev/null @@ -1,77 +0,0 @@ -# -# This is a sample YAML format to be used by -# agents that produce offers. The general idea -# is that an offer has a set of parameters, -# and also a set of selected subscribers that will -# get it. -# -# YAML was chosen since it's more human readable than -# e.g. json or protobuffers, while still being -# easy to produce by an agent, and relatively compact, -# in particular when gzipped. -# - -producingAgent: - name: Simple agent - version: 1.0 - -# # All of the parameters below are just copied from the firebasr -# # realtime database we used in the demo, converted to -# # camel case. All the fields should be documented -# # in this document, and we should think through if this is -# # the best set of parameters we went. - -offer: - # XXX This offer does not have an ID, but if we were just - # updating the list of members of the segment, it would - # make sense to have an OfferID, or something that refers - # to a previously created offer. That id should be created - # by the importer, and used by the agent when updating - # membership. If any other parameters are going to be - # changed, it is necessary to produce a new offer. - # It may make sense to put the ID in the url when - # when we update (PUT method) changes, but then the - # ID in the yaml will be redundant. Figure out how to - # do this one way or another and just do it. - visibility: - from: "2018-02-22T12:41:49.871Z" - to: "2018-02-22T12:41:49.871Z" - presentation: - badgeLabel: "mbop" - description: "Best offer you will get today" - shortDescription: "Best offer!" - label: "3 GB" - name: "3 GB" - priceLabel: "49 NOK" - hidden: false - imageUrl: https://www.ft-associates.com/wp-content/uploads/2015/08/Best-Offer.jpg - financial: - repurchability: 1 - currencyLabel: "NOK" - price: 4900 - taxRate: 10.0 - product: - sku: 2 - # A possibly very long list of product parameters that are all - # dependent on the SKU's requirement. Details ignored here, - # that may pop up later. Deal with them then. - noOfBytes: 3000000000 - -# # We put the segment last, since it may have a long list of -# # members in it. We want that list to be last, since it contains -# # little information that humans are interested in, and we want -# # humans to start reading the file at the top. - -segment: - type: agent-specific-segment - description: "This is how this segment should be described" - members: - # The decryption key is what the de-anonymizer will use to - # make proper identifiers out of the members listed below. - # The special purpose key "none" indicatest that the member list - # is in clear text. - decryptionKey: none - members: - - 4790300157 - - 4790300144 - - 4333333333 diff --git a/sample-agent/demo-yamls/legacy/sample-offer-only.yaml b/sample-agent/demo-yamls/legacy/sample-offer-only.yaml deleted file mode 100644 index 5e11702f8..000000000 --- a/sample-agent/demo-yamls/legacy/sample-offer-only.yaml +++ /dev/null @@ -1,12 +0,0 @@ -producingAgent: - name: Simple agent - version: 1.0 - -offer: - id: test-offer - # use existing product - products: - - 1GB_249NOK - # use existing segment - segments: - - test-segment diff --git a/sample-agent/demo-yamls/legacy/sample-offer-products-segments.yaml b/sample-agent/demo-yamls/legacy/sample-offer-products-segments.yaml deleted file mode 100644 index b1a6e2809..000000000 --- a/sample-agent/demo-yamls/legacy/sample-offer-products-segments.yaml +++ /dev/null @@ -1,35 +0,0 @@ -producingAgent: - name: Simple agent - version: 1.0 - -offer: - id: test-offer - - # list of existing products - # listing products to be created in this yaml is OPTIONAL - -# products: -# - 1GB_249NOK - - # list of existing segments - # listing segments to be created in this yaml is OPTIONAL - -# segments: -# - test-segment - -# These products will be created and linked to offer - 'test-offer' -products: - - sku: 1GB_249NOK - price: - amount: 249 - currency: NOK - properties: - noOfBytes: 1_000_000_000 - presentation: - isDefault: true - offerLabel: Default Offer - priceLabel: 249 NOK - -# These segments will be created and linked to offer - 'test-offer' -segments: - - id: test-segment diff --git a/sample-agent/demo-yamls/notes.txt b/sample-agent/notes.txt similarity index 100% rename from sample-agent/demo-yamls/notes.txt rename to sample-agent/notes.txt From 20a82489f68914d00f3877bc92466cdba2ec6bca Mon Sep 17 00:00:00 2001 From: "Kjell M. Myksvoll" Date: Thu, 4 Oct 2018 10:31:26 +0200 Subject: [PATCH 30/93] Fixes bug causing empty list with subscribers to be added to new segment --- sample-agent/sample-agent.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sample-agent/sample-agent.sh b/sample-agent/sample-agent.sh index 6f01a2d3b..4c2afa9d2 100755 --- a/sample-agent/sample-agent.sh +++ b/sample-agent/sample-agent.sh @@ -261,10 +261,11 @@ awk -F, '!/^subscriberId/{print $1'} $(importedCsvFilename "$EXPORT_ID" "$TARGET RESULTSEG_PSEUDO_BASENAME="resultsegment-pseudoanonymized" RESULTSEG_CLEARTEXT_BASENAME="resultsegment-cleartext" +RESULTSEG_CLEARTEXT_SUBSCRIBERS="resultsegment-cleartext-subscribers" RESULT_SEGMENT_PSEUDO_GS="$(gsExportCsvFilename "$EXPORT_ID" "$RESULTSEG_PSEUDO_BASENAME")" RESULT_SEGMENT_CLEAR_GS="$(gsExportCsvFilename "$EXPORT_ID" "$RESULTSEG_CLEARTEXT_BASENAME")" RESULT_SEGMENT_CLEAR="$(importedCsvFilename "$EXPORT_ID" "$TARGET_DIR" "$RESULTSEG_CLEARTEXT_BASENAME")" -RESULT_SEGMENT_SINGLE_COLUMN="$(importedCsvFilename "$EXPORT_ID" "$TARGET_DIR" "$RESULTSEG_CLEARTEXT_BASENAME")" +RESULT_SEGMENT_SINGLE_COLUMN="$(importedCsvFilename "$EXPORT_ID" "$TARGET_DIR" "$RESULTSEG_CLEARTEXT_SUBSCRIBERS")" # Copy the segment pseudo file to gs From 5c649a7602b33473b645be55e1db6ec9a0cfc333 Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Thu, 4 Oct 2018 10:39:57 +0200 Subject: [PATCH 31/93] Move to commandline options - Added 2 new options - Accepts commandline start options. - Different files for dev and prod --- dataflow-pipelines/docker-compose-dev.yaml | 13 +++++ dataflow-pipelines/docker-compose.yaml | 7 ++- dataflow-pipelines/script/start.sh | 10 +++- .../dataflow/pipelines/DeployPipeline.kt | 56 +++++++++---------- .../DataConsumptionPipelineDefinition.kt | 16 ++++-- .../definitions/DummyPipelineDefinition.kt | 3 +- .../definitions/PipelineDefinition.kt | 3 +- .../ostelco/dataflow/pipelines/io/BigQuery.kt | 8 +-- .../ostelco/dataflow/pipelines/io/PubSub.kt | 4 +- 9 files changed, 75 insertions(+), 45 deletions(-) create mode 100644 dataflow-pipelines/docker-compose-dev.yaml diff --git a/dataflow-pipelines/docker-compose-dev.yaml b/dataflow-pipelines/docker-compose-dev.yaml new file mode 100644 index 000000000..afef9c1c6 --- /dev/null +++ b/dataflow-pipelines/docker-compose-dev.yaml @@ -0,0 +1,13 @@ +version: "3.7" + +services: + dataflow-pipelines: + container_name: dataflow-pipelines + build: . + environment: + - GOOGLE_APPLICATION_CREDENTIALS=/config/pantel-prod.json + - PROJECT=pantel-2decb + - JOB_NAME=data-traffic-dev + - PUBSUB_TOPIC=data-traffic-dev + - DATASET=data_consumption_dev + - UPDATING=false \ No newline at end of file diff --git a/dataflow-pipelines/docker-compose.yaml b/dataflow-pipelines/docker-compose.yaml index d5a8058b9..55c2c5823 100644 --- a/dataflow-pipelines/docker-compose.yaml +++ b/dataflow-pipelines/docker-compose.yaml @@ -5,4 +5,9 @@ services: container_name: dataflow-pipelines build: . environment: - - GOOGLE_APPLICATION_CREDENTIALS=/config/pantel-prod.json \ No newline at end of file + - GOOGLE_APPLICATION_CREDENTIALS=/config/pantel-prod.json + - PROJECT=pantel-2decb + - JOB_NAME=data-traffic + - PUBSUB_TOPIC=data-traffic + - DATASET=data_consumption + - UPDATING=false \ No newline at end of file diff --git a/dataflow-pipelines/script/start.sh b/dataflow-pipelines/script/start.sh index 9ee58596d..4749f0556 100755 --- a/dataflow-pipelines/script/start.sh +++ b/dataflow-pipelines/script/start.sh @@ -3,4 +3,12 @@ # Start app exec java \ -Dfile.encoding=UTF-8 \ - -jar /dataflow-pipelines.jar + -jar /dataflow-pipelines.jar \ + --project=$PROJECT \ + --runner=DataflowRunner \ + --stagingLocation=gs://data-traffic/staging/ \ + --region=europe-west1 \ + --jobName=$JOB_NAME \ + --pubsubTopic=$PUBSUB_TOPIC \ + --dataset=$DATASET \ + --update=$UPDATING diff --git a/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/DeployPipeline.kt b/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/DeployPipeline.kt index 39491d5fa..8d2a9fdd7 100644 --- a/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/DeployPipeline.kt +++ b/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/DeployPipeline.kt @@ -1,61 +1,61 @@ package org.ostelco.dataflow.pipelines import ch.qos.logback.classic.util.ContextInitializer -import org.apache.beam.runners.dataflow.DataflowRunner import org.apache.beam.runners.dataflow.options.DataflowPipelineOptions import org.apache.beam.sdk.Pipeline -import org.apache.beam.sdk.options.PipelineOptions +import org.apache.beam.sdk.options.Default +import org.apache.beam.sdk.options.Description import org.apache.beam.sdk.options.PipelineOptionsFactory import org.ostelco.dataflow.pipelines.definitions.DataConsumptionPipelineDefinition import org.ostelco.dataflow.pipelines.definitions.DummyPipelineDefinition import org.ostelco.dataflow.pipelines.definitions.PipelineDefinition + enum class PipelineDefinitionRegistry(val pipelineDefinition: PipelineDefinition) { DATA_CONSUMPTION(DataConsumptionPipelineDefinition), DUMMY(DummyPipelineDefinition), } +interface ConsumptionPipelineOptions : DataflowPipelineOptions { + @get:Description("Dataset name.") + @get:Default.String("data_consumption") + var dataset: String + @get:Description("PubSub toipc name.") + @get:Default.String("data-traffic") + var pubsubTopic: String +} + fun main(args: Array) { System.setProperty(ContextInitializer.CONFIG_FILE_PROPERTY, "config/logback.xml") - DeployPipeline().deploy(pipelineName = "DATA_CONSUMPTION") + DeployPipeline().deploy( + pipelineName = "DATA_CONSUMPTION", + args = args) } class DeployPipeline { - private fun parseOptions(): PipelineOptions { - - // may be we need to pass options via command-line args - /* - val options = PipelineOptionsFactory - .fromArgs( - "--project=pantel-2decb", - "--runner=DataflowRunner", - "--stagingLocation=gs://data-traffic/staging/", - "--jobName=data-traffic") - .withValidation() - .create() - */ - - val options = PipelineOptionsFactory.`as`(DataflowPipelineOptions::class.java) - options.jobName = "data-traffic" - options.project = "pantel-2decb" - options.stagingLocation = "gs://data-traffic/staging/" - options.region = "europe-west1" - options.runner = DataflowRunner::class.java - options.isUpdate = true + private fun parseOptions(args: Array): ConsumptionPipelineOptions { + + PipelineOptionsFactory.register(ConsumptionPipelineOptions::class.java) + + val options = PipelineOptionsFactory + .fromArgs(*args) + .withValidation() + .`as`(ConsumptionPipelineOptions::class.java) + + println("${options.dataset}, ${options.pubsubTopic}, ${options.jobName}, ${options.isUpdate} ") return options } - fun deploy(pipelineName: String) { - - val options = parseOptions() + fun deploy(pipelineName: String, args: Array) { + val options = parseOptions(args) PipelineDefinitionRegistry .valueOf(pipelineName) .apply { Pipeline.create(options) - .apply { pipelineDefinition.define(this) } + .apply { pipelineDefinition.define(this, options) } .run() .waitUntilFinish() } diff --git a/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/definitions/DataConsumptionPipelineDefinition.kt b/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/definitions/DataConsumptionPipelineDefinition.kt index 2963bf8af..1029a35d1 100644 --- a/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/definitions/DataConsumptionPipelineDefinition.kt +++ b/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/definitions/DataConsumptionPipelineDefinition.kt @@ -21,6 +21,7 @@ import org.joda.time.Duration import org.joda.time.Instant import org.ostelco.analytics.api.AggregatedDataTrafficInfo import org.ostelco.analytics.api.DataTrafficInfo +import org.ostelco.dataflow.pipelines.ConsumptionPipelineOptions import org.ostelco.dataflow.pipelines.dsl.ParDoFn import org.ostelco.dataflow.pipelines.io.BigQueryIOUtils.saveToBigQuery import org.ostelco.dataflow.pipelines.io.Table.HOURLY_CONSUMPTION @@ -31,7 +32,7 @@ import org.ostelco.dataflow.pipelines.io.readFromPubSub object DataConsumptionPipelineDefinition : PipelineDefinition { - override fun define(pipeline: Pipeline) { + override fun define(pipeline: Pipeline, options: ConsumptionPipelineOptions) { // Filter events with empty buckets val filterEmptyBucketEvents = Filter.by(SerializableFunction { dataTrafficInfo: DataTrafficInfo -> @@ -42,24 +43,29 @@ object DataConsumptionPipelineDefinition : PipelineDefinition { // Construct pipeline chain // - // First two common steps of pipeline, before it gets forked. val dataTrafficInfoEvents = pipeline - .apply("readFromPubSub", readFromPubSub("data-traffic")) + .apply("readFromPubSub", readFromPubSub(project = options.project, topic = options.pubsubTopic)) .apply("filterEmptyBucketEvents", filterEmptyBucketEvents) // PubSubEvents -> raw_consumption big-query dataTrafficInfoEvents .apply("convertToRawTableRows", convertToRawTableRows) .setCoder(TableRowJsonCoder.of()) - .apply("saveRawEventsToBigQuery", saveToBigQuery(RAW_CONSUMPTION)) + .apply("saveRawEventsToBigQuery", saveToBigQuery( + project = options.project, + dataset = options.dataset, + table = RAW_CONSUMPTION)) // PubSubEvents -> aggregate by hour -> hourly_consumption big-query dataTrafficInfoEvents .apply("TotalDataConsumptionGroupByMsisdn", consumptionPerMsisdn) .apply("convertToHourlyTableRows", convertToHourlyTableRows) .setCoder(TableRowJsonCoder.of()) - .apply("saveToBigQueryGroupedByHour", saveToBigQuery(HOURLY_CONSUMPTION)) + .apply("saveToBigQueryGroupedByHour", saveToBigQuery( + project = options.project, + dataset = options.dataset, + table = HOURLY_CONSUMPTION)) } } diff --git a/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/definitions/DummyPipelineDefinition.kt b/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/definitions/DummyPipelineDefinition.kt index bba3e0627..94855042a 100644 --- a/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/definitions/DummyPipelineDefinition.kt +++ b/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/definitions/DummyPipelineDefinition.kt @@ -1,9 +1,10 @@ package org.ostelco.dataflow.pipelines.definitions import org.apache.beam.sdk.Pipeline +import org.ostelco.dataflow.pipelines.ConsumptionPipelineOptions object DummyPipelineDefinition : PipelineDefinition { - override fun define(pipeline: Pipeline) { + override fun define(pipeline: Pipeline, options: ConsumptionPipelineOptions) { TODO("not implemented") //To change body of created functions use File | Settings | File Templates. } } \ No newline at end of file diff --git a/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/definitions/PipelineDefinition.kt b/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/definitions/PipelineDefinition.kt index 2a69de643..a2959128a 100644 --- a/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/definitions/PipelineDefinition.kt +++ b/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/definitions/PipelineDefinition.kt @@ -1,7 +1,8 @@ package org.ostelco.dataflow.pipelines.definitions import org.apache.beam.sdk.Pipeline +import org.ostelco.dataflow.pipelines.ConsumptionPipelineOptions interface PipelineDefinition { - fun define(pipeline: Pipeline) + fun define(pipeline: Pipeline, options: ConsumptionPipelineOptions) } \ No newline at end of file diff --git a/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/io/BigQuery.kt b/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/io/BigQuery.kt index 79e629e27..f8f7e62d5 100644 --- a/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/io/BigQuery.kt +++ b/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/io/BigQuery.kt @@ -8,6 +8,7 @@ import com.google.protobuf.util.Timestamps import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO import org.ostelco.analytics.api.AggregatedDataTrafficInfo import org.ostelco.analytics.api.DataTrafficInfo +import org.ostelco.dataflow.pipelines.ConsumptionPipelineOptions import org.ostelco.dataflow.pipelines.dsl.ParDoFn import org.ostelco.dataflow.pipelines.io.Table.DAILY_CONSUMPTION import org.ostelco.dataflow.pipelines.io.Table.HOURLY_CONSUMPTION @@ -19,11 +20,6 @@ import java.util.* // This code is an attempt to keep all database schema in one place. -// This may be moved to config. -private const val project = "pantel-2decb" -private const val dataset = "data_consumption" - - /** * Enum containing identifiers for three tables * stored in bigtable. @@ -108,7 +104,7 @@ object BigQueryIOUtils { * Create a [BigQueryIO.Write] query for writing all the * rows in a [Table] - denoted table. */ - fun saveToBigQuery(table: Table): BigQueryIO.Write { + fun saveToBigQuery(project: String, dataset: String, table: Table): BigQueryIO.Write { return BigQueryIO.writeTableRows() .to("$project:$dataset.${table.name.toLowerCase()}") .withSchema(TableSchemas.getTableSchema(table)) diff --git a/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/io/PubSub.kt b/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/io/PubSub.kt index 8353134e3..a47a7b7c1 100644 --- a/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/io/PubSub.kt +++ b/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/io/PubSub.kt @@ -4,6 +4,6 @@ import org.apache.beam.sdk.io.gcp.pubsub.PubsubIO import org.ostelco.analytics.api.DataTrafficInfo // Read from PubSub -fun readFromPubSub(topic: String) = PubsubIO +fun readFromPubSub(project:String, topic: String) = PubsubIO .readProtos(DataTrafficInfo::class.java) - .fromSubscription("projects/pantel-2decb/subscriptions/$topic") + .fromSubscription("projects/$project/subscriptions/$topic") From 080019761eff2b4c8404d009081cfd2bed020f1a Mon Sep 17 00:00:00 2001 From: Vihang Patil Date: Thu, 4 Oct 2018 11:20:57 +0200 Subject: [PATCH 32/93] Reverting back from jdk11 to jdk8 for Apache Beam --- analytics-grpc-api/build.gradle | 3 +++ dataflow-pipelines/Dockerfile | 2 +- dataflow-pipelines/build.gradle | 3 +++ 3 files changed, 7 insertions(+), 1 deletion(-) diff --git a/analytics-grpc-api/build.gradle b/analytics-grpc-api/build.gradle index fb2ba62f8..1fb4487e6 100644 --- a/analytics-grpc-api/build.gradle +++ b/analytics-grpc-api/build.gradle @@ -26,6 +26,9 @@ protobuf { } } +sourceCompatibility = "1.8" +targetCompatibility = "1.8" + idea { module { sourceDirs += files("${protobuf.generatedFilesBaseDir}/main/java") diff --git a/dataflow-pipelines/Dockerfile b/dataflow-pipelines/Dockerfile index d0cd99b27..4827fd238 100644 --- a/dataflow-pipelines/Dockerfile +++ b/dataflow-pipelines/Dockerfile @@ -1,4 +1,4 @@ -FROM openjdk:11 +FROM azul/zulu-openjdk:8u181-8.31.0.1 MAINTAINER CSI "csi@telenordigital.com" diff --git a/dataflow-pipelines/build.gradle b/dataflow-pipelines/build.gradle index 54b887a52..b315d2b2f 100644 --- a/dataflow-pipelines/build.gradle +++ b/dataflow-pipelines/build.gradle @@ -32,6 +32,9 @@ shadowJar { version = null } +sourceCompatibility = "1.8" +targetCompatibility = "1.8" + test { // native support to Junit5 in Gradle 4.6+ useJUnitPlatform { From 64d579930bbeead27db601622236a83ea6ef4d3e Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Thu, 4 Oct 2018 11:44:40 +0200 Subject: [PATCH 33/93] Allow to update the pipeline --- dataflow-pipelines/docker-compose-dev.yaml | 2 +- dataflow-pipelines/docker-compose.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dataflow-pipelines/docker-compose-dev.yaml b/dataflow-pipelines/docker-compose-dev.yaml index afef9c1c6..90a458535 100644 --- a/dataflow-pipelines/docker-compose-dev.yaml +++ b/dataflow-pipelines/docker-compose-dev.yaml @@ -10,4 +10,4 @@ services: - JOB_NAME=data-traffic-dev - PUBSUB_TOPIC=data-traffic-dev - DATASET=data_consumption_dev - - UPDATING=false \ No newline at end of file + - UPDATING=true \ No newline at end of file diff --git a/dataflow-pipelines/docker-compose.yaml b/dataflow-pipelines/docker-compose.yaml index 55c2c5823..6cadccabf 100644 --- a/dataflow-pipelines/docker-compose.yaml +++ b/dataflow-pipelines/docker-compose.yaml @@ -10,4 +10,4 @@ services: - JOB_NAME=data-traffic - PUBSUB_TOPIC=data-traffic - DATASET=data_consumption - - UPDATING=false \ No newline at end of file + - UPDATING=true \ No newline at end of file From f45db53de140bbfe5babc95c8ec838e1d60ef070 Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Thu, 4 Oct 2018 11:46:22 +0200 Subject: [PATCH 34/93] Remove logs --- .../org/ostelco/dataflow/pipelines/DeployPipeline.kt | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/DeployPipeline.kt b/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/DeployPipeline.kt index 8d2a9fdd7..20db8afc4 100644 --- a/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/DeployPipeline.kt +++ b/dataflow-pipelines/src/main/kotlin/org/ostelco/dataflow/pipelines/DeployPipeline.kt @@ -35,17 +35,11 @@ fun main(args: Array) { class DeployPipeline { private fun parseOptions(args: Array): ConsumptionPipelineOptions { - PipelineOptionsFactory.register(ConsumptionPipelineOptions::class.java) - - val options = PipelineOptionsFactory + return PipelineOptionsFactory .fromArgs(*args) .withValidation() .`as`(ConsumptionPipelineOptions::class.java) - - println("${options.dataset}, ${options.pubsubTopic}, ${options.jobName}, ${options.isUpdate} ") - - return options } fun deploy(pipelineName: String, args: Array) { From 94d2054303df54554214aeade715d083a36ac09c Mon Sep 17 00:00:00 2001 From: Vihang Patil Date: Thu, 4 Oct 2018 13:05:56 +0200 Subject: [PATCH 35/93] Log store error for bad gateway response --- admin-api/build.gradle | 1 - .../prime/admin/api/ImporterResource.kt | 3 +- auth-server/build.gradle | 4 +- client-api/build.gradle | 1 - .../prime/client/api/ClientApiModule.kt | 4 +- .../client/api/auth/OAuthAuthenticator.kt | 4 +- .../prime/client/api/core/EndpointUserInfo.kt | 4 +- .../client/api/resources/AnalyticsResource.kt | 1 + .../api/resources/ApplicationTokenResource.kt | 1 + .../client/api/resources/BundlesResource.kt | 1 + .../client/api/resources/ConsentsResource.kt | 1 + .../client/api/resources/PaymentResource.kt | 1 + .../client/api/resources/ProductsResource.kt | 1 + .../client/api/resources/ProfileResource.kt | 1 + .../client/api/resources/PurchaseResource.kt | 1 + .../client/api/resources/ReferralResource.kt | 1 + .../api/resources/SubscriptionResource.kt | 1 + .../client/api/store/SubscriberDAOImpl.kt | 4 +- .../api/resources/AnalyticsResourceTest.kt | 4 +- .../resources/ApplicationTokenResourceTest.kt | 7 ++-- .../api/resources/ProductsResourceTest.kt | 7 ++-- .../api/resources/ProfileResourceTest.kt | 5 +-- .../api/resources/PurchasesResourceTest.kt | 7 ++-- .../api/resources/SubscriptionResourceTest.kt | 8 ++-- .../resources/SubscriptionsResourceTest.kt | 7 ++-- firebase-extensions/build.gradle | 1 - .../FirebaseExtensions.kt | 5 +-- neo4j-store/build.gradle | 1 - .../ostelco/prime/storage/graph/Neo4jStore.kt | 3 +- .../org/ostelco/prime/storage/graph/Schema.kt | 9 +++-- .../ostelco/prime/storage/graph/SchemaTest.kt | 4 +- prime-modules/build.gradle | 1 + .../org/ostelco/prime/apierror/ApiError.kt | 39 ++++++++++++------- .../ostelco/prime/jsonmapper/JsonMapper.kt | 6 +-- prime/build.gradle | 1 - pseudonym-server/build.gradle | 2 +- 36 files changed, 84 insertions(+), 68 deletions(-) rename client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ResourceHelpers.kt => prime-modules/src/main/kotlin/org/ostelco/prime/jsonmapper/JsonMapper.kt (74%) diff --git a/admin-api/build.gradle b/admin-api/build.gradle index 70f917bf0..8eb890594 100644 --- a/admin-api/build.gradle +++ b/admin-api/build.gradle @@ -5,7 +5,6 @@ plugins { dependencies { implementation project(":prime-modules") - implementation "com.fasterxml.jackson.module:jackson-module-kotlin:$jacksonVersion" testImplementation "io.dropwizard:dropwizard-testing:$dropwizardVersion" } diff --git a/admin-api/src/main/kotlin/org/ostelco/prime/admin/api/ImporterResource.kt b/admin-api/src/main/kotlin/org/ostelco/prime/admin/api/ImporterResource.kt index 3f5c81c0b..4f89b1597 100644 --- a/admin-api/src/main/kotlin/org/ostelco/prime/admin/api/ImporterResource.kt +++ b/admin-api/src/main/kotlin/org/ostelco/prime/admin/api/ImporterResource.kt @@ -3,6 +3,7 @@ package org.ostelco.prime.admin.api import com.fasterxml.jackson.core.JsonProcessingException import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.dataformat.yaml.YAMLFactory +import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper import com.fasterxml.jackson.module.kotlin.registerKotlinModule import org.ostelco.prime.admin.importer.ImportDeclaration import org.ostelco.prime.admin.importer.ImportProcessor @@ -74,7 +75,7 @@ class YamlMessageBodyReader : MessageBodyReader { * Common 'helper' functions for resources. * */ -val objectMapper = ObjectMapper() +val objectMapper = jacksonObjectMapper() fun R.asJson(`object`: Any): String { try { diff --git a/auth-server/build.gradle b/auth-server/build.gradle index 82c788ffc..39c560193 100644 --- a/auth-server/build.gradle +++ b/auth-server/build.gradle @@ -8,8 +8,10 @@ plugins { dependencies { implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlinVersion" implementation "io.dropwizard:dropwizard-core:$dropwizardVersion" - implementation project(":firebase-extensions") + implementation project(":firebase-extensions") + implementation "com.fasterxml.jackson.module:jackson-module-kotlin:$jacksonVersion" + implementation 'javax.xml.bind:jaxb-api:2.3.0' implementation 'javax.activation:activation:1.1.1' diff --git a/client-api/build.gradle b/client-api/build.gradle index b0519c812..782439b07 100644 --- a/client-api/build.gradle +++ b/client-api/build.gradle @@ -19,7 +19,6 @@ dependencies { testImplementation "io.dropwizard:dropwizard-client:$dropwizardVersion" testImplementation "io.dropwizard:dropwizard-testing:$dropwizardVersion" - testImplementation "com.fasterxml.jackson.module:jackson-module-kotlin:$jacksonVersion" testImplementation "org.mockito:mockito-core:$mockitoVersion" testImplementation "org.assertj:assertj-core:$assertJVersion" diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/ClientApiModule.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/ClientApiModule.kt index 7f8876c9d..52fdad641 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/ClientApiModule.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/ClientApiModule.kt @@ -3,7 +3,7 @@ package org.ostelco.prime.client.api import com.fasterxml.jackson.annotation.JsonProperty import com.fasterxml.jackson.annotation.JsonTypeName import com.fasterxml.jackson.databind.DeserializationFeature -import com.fasterxml.jackson.databind.ObjectMapper +import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper import io.dropwizard.auth.AuthDynamicFeature import io.dropwizard.auth.AuthValueFactoryProvider import io.dropwizard.auth.CachingAuthenticator @@ -65,7 +65,7 @@ class ClientApiModule : PrimeModule { val client: Client = JerseyClientBuilder(env) .using(config.jerseyClientConfiguration) - .using(ObjectMapper() + .using(jacksonObjectMapper() .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)) .build(env.name) diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/auth/OAuthAuthenticator.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/auth/OAuthAuthenticator.kt index 07ac2d8ca..8b4d7f50b 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/auth/OAuthAuthenticator.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/auth/OAuthAuthenticator.kt @@ -2,7 +2,7 @@ package org.ostelco.prime.client.api.auth import com.fasterxml.jackson.core.JsonParseException import com.fasterxml.jackson.databind.JsonNode -import com.fasterxml.jackson.databind.ObjectMapper +import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper import io.dropwizard.auth.AuthenticationException import io.dropwizard.auth.Authenticator import org.ostelco.prime.client.api.core.UserInfo @@ -29,7 +29,7 @@ class OAuthAuthenticator(private val client: Client) : Authenticator { diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/core/EndpointUserInfo.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/core/EndpointUserInfo.kt index 516d38178..ecd6035a5 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/core/EndpointUserInfo.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/core/EndpointUserInfo.kt @@ -1,7 +1,7 @@ package org.ostelco.prime.client.api.core import com.fasterxml.jackson.databind.JsonNode -import com.fasterxml.jackson.databind.ObjectMapper +import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper import org.ostelco.prime.getLogger import java.util.* @@ -25,7 +25,7 @@ class EndpointUserInfo(enc: String) { private val logger by getLogger() - private val mapper = ObjectMapper() + private val mapper = jacksonObjectMapper() private val obj: JsonNode = mapper.readTree(decode(enc)) private fun decode(enc: String): String = String(Base64.getDecoder().decode(enc)) diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/AnalyticsResource.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/AnalyticsResource.kt index 4de3e8639..55581c397 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/AnalyticsResource.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/AnalyticsResource.kt @@ -3,6 +3,7 @@ package org.ostelco.prime.client.api.resources import io.dropwizard.auth.Auth import org.ostelco.prime.client.api.auth.AccessTokenPrincipal import org.ostelco.prime.client.api.store.SubscriberDAO +import org.ostelco.prime.jsonmapper.asJson import javax.validation.constraints.NotNull import javax.ws.rs.Consumes import javax.ws.rs.POST diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ApplicationTokenResource.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ApplicationTokenResource.kt index b743ed398..26a3bbb35 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ApplicationTokenResource.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ApplicationTokenResource.kt @@ -3,6 +3,7 @@ package org.ostelco.prime.client.api.resources import io.dropwizard.auth.Auth import org.ostelco.prime.client.api.auth.AccessTokenPrincipal import org.ostelco.prime.client.api.store.SubscriberDAO +import org.ostelco.prime.jsonmapper.asJson import org.ostelco.prime.model.ApplicationToken import javax.validation.constraints.NotNull import javax.ws.rs.Consumes diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/BundlesResource.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/BundlesResource.kt index 9096dfb80..e204a1b4d 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/BundlesResource.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/BundlesResource.kt @@ -3,6 +3,7 @@ package org.ostelco.prime.client.api.resources import io.dropwizard.auth.Auth import org.ostelco.prime.client.api.auth.AccessTokenPrincipal import org.ostelco.prime.client.api.store.SubscriberDAO +import org.ostelco.prime.jsonmapper.asJson import javax.ws.rs.GET import javax.ws.rs.Path import javax.ws.rs.Produces diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ConsentsResource.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ConsentsResource.kt index dab01548c..ee38b2f72 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ConsentsResource.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ConsentsResource.kt @@ -3,6 +3,7 @@ package org.ostelco.prime.client.api.resources import io.dropwizard.auth.Auth import org.ostelco.prime.client.api.auth.AccessTokenPrincipal import org.ostelco.prime.client.api.store.SubscriberDAO +import org.ostelco.prime.jsonmapper.asJson import javax.validation.constraints.NotNull import javax.ws.rs.DefaultValue diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/PaymentResource.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/PaymentResource.kt index ff050e8d7..ce37f15e8 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/PaymentResource.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/PaymentResource.kt @@ -4,6 +4,7 @@ import io.dropwizard.auth.Auth import org.ostelco.prime.client.api.auth.AccessTokenPrincipal import org.ostelco.prime.client.api.store.SubscriberDAO import org.ostelco.prime.getLogger +import org.ostelco.prime.jsonmapper.asJson import javax.validation.constraints.NotNull import javax.ws.rs.* import javax.ws.rs.core.Response diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ProductsResource.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ProductsResource.kt index b89a8f446..71e224489 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ProductsResource.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ProductsResource.kt @@ -3,6 +3,7 @@ package org.ostelco.prime.client.api.resources import io.dropwizard.auth.Auth import org.ostelco.prime.client.api.auth.AccessTokenPrincipal import org.ostelco.prime.client.api.store.SubscriberDAO +import org.ostelco.prime.jsonmapper.asJson import javax.validation.constraints.NotNull import javax.ws.rs.GET import javax.ws.rs.POST diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ProfileResource.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ProfileResource.kt index 0c7cb1391..d06479f3d 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ProfileResource.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ProfileResource.kt @@ -3,6 +3,7 @@ package org.ostelco.prime.client.api.resources import io.dropwizard.auth.Auth import org.ostelco.prime.client.api.auth.AccessTokenPrincipal import org.ostelco.prime.client.api.store.SubscriberDAO +import org.ostelco.prime.jsonmapper.asJson import org.ostelco.prime.model.Subscriber import javax.validation.constraints.NotNull import javax.ws.rs.Consumes diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/PurchaseResource.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/PurchaseResource.kt index b19f3865f..e222fa2b9 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/PurchaseResource.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/PurchaseResource.kt @@ -3,6 +3,7 @@ package org.ostelco.prime.client.api.resources import io.dropwizard.auth.Auth import org.ostelco.prime.client.api.auth.AccessTokenPrincipal import org.ostelco.prime.client.api.store.SubscriberDAO +import org.ostelco.prime.jsonmapper.asJson import javax.ws.rs.GET import javax.ws.rs.Path import javax.ws.rs.Produces diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ReferralResource.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ReferralResource.kt index 698f48bb3..ebf210e48 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ReferralResource.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ReferralResource.kt @@ -3,6 +3,7 @@ package org.ostelco.prime.client.api.resources import io.dropwizard.auth.Auth import org.ostelco.prime.client.api.auth.AccessTokenPrincipal import org.ostelco.prime.client.api.store.SubscriberDAO +import org.ostelco.prime.jsonmapper.asJson import javax.ws.rs.GET import javax.ws.rs.Path import javax.ws.rs.Produces diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/SubscriptionResource.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/SubscriptionResource.kt index bd44709bb..28fc032b0 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/SubscriptionResource.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/SubscriptionResource.kt @@ -3,6 +3,7 @@ package org.ostelco.prime.client.api.resources import io.dropwizard.auth.Auth import org.ostelco.prime.client.api.auth.AccessTokenPrincipal import org.ostelco.prime.client.api.store.SubscriberDAO +import org.ostelco.prime.jsonmapper.asJson import javax.ws.rs.GET import javax.ws.rs.Path import javax.ws.rs.Produces diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt index 900740f8f..d6ce88aaa 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/store/SubscriberDAOImpl.kt @@ -5,12 +5,12 @@ import arrow.core.flatMap import org.ostelco.prime.analytics.AnalyticsService import org.ostelco.prime.apierror.ApiError import org.ostelco.prime.apierror.ApiErrorCode +import org.ostelco.prime.apierror.ApiErrorMapper.mapPaymentErrorToApiError +import org.ostelco.prime.apierror.ApiErrorMapper.mapStorageErrorToApiError import org.ostelco.prime.apierror.BadGatewayError import org.ostelco.prime.apierror.BadRequestError import org.ostelco.prime.apierror.InsufficientStorageError import org.ostelco.prime.apierror.NotFoundError -import org.ostelco.prime.apierror.mapPaymentErrorToApiError -import org.ostelco.prime.apierror.mapStorageErrorToApiError import org.ostelco.prime.client.api.metrics.updateMetricsOnNewSubscriber import org.ostelco.prime.client.api.model.Consent import org.ostelco.prime.client.api.model.Person diff --git a/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/AnalyticsResourceTest.kt b/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/AnalyticsResourceTest.kt index f44d50879..f14cb3aed 100644 --- a/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/AnalyticsResourceTest.kt +++ b/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/AnalyticsResourceTest.kt @@ -2,7 +2,7 @@ package org.ostelco.prime.client.api.resources import arrow.core.Either import com.fasterxml.jackson.core.JsonParseException -import com.fasterxml.jackson.databind.ObjectMapper +import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper import com.nhaarman.mockito_kotlin.argumentCaptor import io.dropwizard.auth.AuthDynamicFeature import io.dropwizard.auth.AuthValueFactoryProvider @@ -32,7 +32,7 @@ import javax.ws.rs.core.Response */ class AnalyticsResourceTest { - private val MAPPER = ObjectMapper() + private val MAPPER = jacksonObjectMapper() private val email = "mw@internet.org" diff --git a/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/ApplicationTokenResourceTest.kt b/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/ApplicationTokenResourceTest.kt index abe3ca9e6..e129dadb8 100644 --- a/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/ApplicationTokenResourceTest.kt +++ b/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/ApplicationTokenResourceTest.kt @@ -1,12 +1,11 @@ package org.ostelco.prime.client.api.resources import arrow.core.Either -import com.fasterxml.jackson.module.kotlin.KotlinModule +import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper import com.nhaarman.mockito_kotlin.argumentCaptor import io.dropwizard.auth.AuthDynamicFeature import io.dropwizard.auth.AuthValueFactoryProvider import io.dropwizard.auth.oauth.OAuthCredentialAuthFilter -import io.dropwizard.jackson.Jackson import io.dropwizard.testing.junit.ResourceTestRule import org.assertj.core.api.Assertions.assertThat import org.glassfish.jersey.test.grizzly.GrizzlyWebTestContainerFactory @@ -16,11 +15,11 @@ import org.junit.Test import org.mockito.ArgumentMatchers import org.mockito.Mockito.`when` import org.mockito.Mockito.mock +import org.ostelco.prime.apierror.ApiError import org.ostelco.prime.client.api.auth.AccessTokenPrincipal import org.ostelco.prime.client.api.auth.OAuthAuthenticator import org.ostelco.prime.client.api.store.SubscriberDAO import org.ostelco.prime.client.api.util.AccessToken -import org.ostelco.prime.apierror.ApiError import org.ostelco.prime.model.ApplicationToken import java.util.* import javax.ws.rs.client.Client @@ -90,7 +89,7 @@ class ApplicationTokenResourceTest { @JvmField @ClassRule val RULE = ResourceTestRule.builder() - .setMapper(Jackson.newObjectMapper().registerModule(KotlinModule())) + .setMapper(jacksonObjectMapper()) .addResource(AuthDynamicFeature( OAuthCredentialAuthFilter.Builder() .setAuthenticator(AUTHENTICATOR) diff --git a/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/ProductsResourceTest.kt b/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/ProductsResourceTest.kt index 275e0f43f..3d5f578a5 100644 --- a/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/ProductsResourceTest.kt +++ b/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/ProductsResourceTest.kt @@ -1,12 +1,11 @@ package org.ostelco.prime.client.api.resources import arrow.core.Either -import com.fasterxml.jackson.module.kotlin.KotlinModule +import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper import com.nhaarman.mockito_kotlin.argumentCaptor import io.dropwizard.auth.AuthDynamicFeature import io.dropwizard.auth.AuthValueFactoryProvider import io.dropwizard.auth.oauth.OAuthCredentialAuthFilter -import io.dropwizard.jackson.Jackson import io.dropwizard.testing.junit.ResourceTestRule import org.assertj.core.api.Assertions.assertThat import org.glassfish.jersey.test.grizzly.GrizzlyWebTestContainerFactory @@ -17,11 +16,11 @@ import org.junit.Test import org.mockito.ArgumentMatchers import org.mockito.Mockito.`when` import org.mockito.Mockito.mock +import org.ostelco.prime.apierror.ApiError import org.ostelco.prime.client.api.auth.AccessTokenPrincipal import org.ostelco.prime.client.api.auth.OAuthAuthenticator import org.ostelco.prime.client.api.store.SubscriberDAO import org.ostelco.prime.client.api.util.AccessToken -import org.ostelco.prime.apierror.ApiError import org.ostelco.prime.model.Price import org.ostelco.prime.model.Product import org.ostelco.prime.paymentprocessor.PaymentProcessor @@ -122,7 +121,7 @@ class ProductsResourceTest { @JvmField @ClassRule val RULE = ResourceTestRule.builder() - .setMapper(Jackson.newObjectMapper().registerModule(KotlinModule())) + .setMapper(jacksonObjectMapper()) .addResource(AuthDynamicFeature( OAuthCredentialAuthFilter.Builder() .setAuthenticator(AUTHENTICATOR) diff --git a/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/ProfileResourceTest.kt b/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/ProfileResourceTest.kt index d9312ff69..bbc5ad684 100644 --- a/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/ProfileResourceTest.kt +++ b/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/ProfileResourceTest.kt @@ -1,12 +1,11 @@ package org.ostelco.prime.client.api.resources import arrow.core.Either -import com.fasterxml.jackson.module.kotlin.KotlinModule +import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper import com.nhaarman.mockito_kotlin.argumentCaptor import io.dropwizard.auth.AuthDynamicFeature import io.dropwizard.auth.AuthValueFactoryProvider import io.dropwizard.auth.oauth.OAuthCredentialAuthFilter -import io.dropwizard.jackson.Jackson import io.dropwizard.testing.junit.ResourceTestRule import org.assertj.core.api.Assertions.assertThat import org.glassfish.jersey.test.grizzly.GrizzlyWebTestContainerFactory @@ -180,7 +179,7 @@ class ProfileResourceTest { @JvmField @ClassRule val RULE = ResourceTestRule.builder() - .setMapper(Jackson.newObjectMapper().registerModule(KotlinModule())) + .setMapper(jacksonObjectMapper()) .addResource(AuthDynamicFeature( OAuthCredentialAuthFilter.Builder() .setAuthenticator(AUTHENTICATOR) diff --git a/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/PurchasesResourceTest.kt b/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/PurchasesResourceTest.kt index d16050fe9..5301f2101 100644 --- a/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/PurchasesResourceTest.kt +++ b/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/PurchasesResourceTest.kt @@ -1,12 +1,11 @@ package org.ostelco.prime.client.api.resources import arrow.core.Either -import com.fasterxml.jackson.module.kotlin.KotlinModule +import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper import com.nhaarman.mockito_kotlin.argumentCaptor import io.dropwizard.auth.AuthDynamicFeature import io.dropwizard.auth.AuthValueFactoryProvider import io.dropwizard.auth.oauth.OAuthCredentialAuthFilter.Builder -import io.dropwizard.jackson.Jackson import io.dropwizard.testing.junit.ResourceTestRule import org.assertj.core.api.Assertions import org.glassfish.jersey.test.grizzly.GrizzlyWebTestContainerFactory @@ -15,11 +14,11 @@ import org.junit.ClassRule import org.junit.Test import org.mockito.ArgumentMatchers import org.mockito.Mockito +import org.ostelco.prime.apierror.ApiError import org.ostelco.prime.client.api.auth.AccessTokenPrincipal import org.ostelco.prime.client.api.auth.OAuthAuthenticator import org.ostelco.prime.client.api.store.SubscriberDAO import org.ostelco.prime.client.api.util.AccessToken -import org.ostelco.prime.apierror.ApiError import org.ostelco.prime.model.Price import org.ostelco.prime.model.Product import org.ostelco.prime.model.PurchaseRecord @@ -79,7 +78,7 @@ class PurchasesResourceTest { @JvmField @ClassRule val RULE = ResourceTestRule.builder() - .setMapper(Jackson.newObjectMapper().registerModule(KotlinModule())) + .setMapper(jacksonObjectMapper()) .addResource(AuthDynamicFeature( Builder() .setAuthenticator(AUTHENTICATOR) diff --git a/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/SubscriptionResourceTest.kt b/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/SubscriptionResourceTest.kt index ad6926288..67b0c66b1 100644 --- a/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/SubscriptionResourceTest.kt +++ b/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/SubscriptionResourceTest.kt @@ -1,13 +1,11 @@ package org.ostelco.prime.client.api.resources import arrow.core.Either -import com.fasterxml.jackson.databind.ObjectMapper -import com.fasterxml.jackson.module.kotlin.KotlinModule +import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper import com.nhaarman.mockito_kotlin.argumentCaptor import io.dropwizard.auth.AuthDynamicFeature import io.dropwizard.auth.AuthValueFactoryProvider import io.dropwizard.auth.oauth.OAuthCredentialAuthFilter -import io.dropwizard.jackson.Jackson import io.dropwizard.testing.junit.ResourceTestRule import org.assertj.core.api.Assertions.assertThat import org.glassfish.jersey.test.grizzly.GrizzlyWebTestContainerFactory @@ -85,7 +83,7 @@ class SubscriptionResourceTest { `when`(DAO.getActivePseudonymOfMsisdnForSubscriber(arg.capture())) .thenReturn(Either.right(activePseudonyms)) - val responseJsonString = ObjectMapper().writeValueAsString(activePseudonyms) + val responseJsonString = jacksonObjectMapper().writeValueAsString(activePseudonyms) val resp = RULE.target("/subscription/activePseudonyms") .request() @@ -106,7 +104,7 @@ class SubscriptionResourceTest { @JvmField @ClassRule val RULE: ResourceTestRule = ResourceTestRule.builder() - .setMapper(Jackson.newObjectMapper().registerModule(KotlinModule())) + .setMapper(jacksonObjectMapper()) .addResource(AuthDynamicFeature( OAuthCredentialAuthFilter.Builder() .setAuthenticator(AUTHENTICATOR) diff --git a/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/SubscriptionsResourceTest.kt b/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/SubscriptionsResourceTest.kt index 2abde50a8..61e45d517 100644 --- a/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/SubscriptionsResourceTest.kt +++ b/client-api/src/test/kotlin/org/ostelco/prime/client/api/resources/SubscriptionsResourceTest.kt @@ -1,12 +1,11 @@ package org.ostelco.prime.client.api.resources import arrow.core.Either -import com.fasterxml.jackson.module.kotlin.KotlinModule +import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper import com.nhaarman.mockito_kotlin.argumentCaptor import io.dropwizard.auth.AuthDynamicFeature import io.dropwizard.auth.AuthValueFactoryProvider import io.dropwizard.auth.oauth.OAuthCredentialAuthFilter -import io.dropwizard.jackson.Jackson import io.dropwizard.testing.junit.ResourceTestRule import org.assertj.core.api.Assertions.assertThat import org.glassfish.jersey.test.grizzly.GrizzlyWebTestContainerFactory @@ -16,11 +15,11 @@ import org.junit.Test import org.mockito.ArgumentMatchers import org.mockito.Mockito.`when` import org.mockito.Mockito.mock +import org.ostelco.prime.apierror.ApiError import org.ostelco.prime.client.api.auth.AccessTokenPrincipal import org.ostelco.prime.client.api.auth.OAuthAuthenticator import org.ostelco.prime.client.api.store.SubscriberDAO import org.ostelco.prime.client.api.util.AccessToken -import org.ostelco.prime.apierror.ApiError import org.ostelco.prime.model.Subscription import java.util.* import javax.ws.rs.client.Invocation @@ -72,7 +71,7 @@ class SubscriptionsResourceTest { @JvmField @ClassRule val RULE: ResourceTestRule = ResourceTestRule.builder() - .setMapper(Jackson.newObjectMapper().registerModule(KotlinModule())) + .setMapper(jacksonObjectMapper()) .addResource(AuthDynamicFeature( OAuthCredentialAuthFilter.Builder() .setAuthenticator(AUTHENTICATOR) diff --git a/firebase-extensions/build.gradle b/firebase-extensions/build.gradle index c39c5cc27..8a46b088a 100644 --- a/firebase-extensions/build.gradle +++ b/firebase-extensions/build.gradle @@ -7,5 +7,4 @@ dependencies { implementation project(":prime-modules") // Match netty via ocs-api api "com.google.firebase:firebase-admin:$firebaseVersion" - api "com.fasterxml.jackson.module:jackson-module-kotlin:$jacksonVersion" } \ No newline at end of file diff --git a/firebase-extensions/src/main/kotlin/org.ostelco.common.firebasex/FirebaseExtensions.kt b/firebase-extensions/src/main/kotlin/org.ostelco.common.firebasex/FirebaseExtensions.kt index 142f10170..43306213b 100644 --- a/firebase-extensions/src/main/kotlin/org.ostelco.common.firebasex/FirebaseExtensions.kt +++ b/firebase-extensions/src/main/kotlin/org.ostelco.common.firebasex/FirebaseExtensions.kt @@ -1,8 +1,7 @@ package org.ostelco.common.firebasex import com.fasterxml.jackson.core.type.TypeReference -import com.fasterxml.jackson.databind.ObjectMapper -import com.fasterxml.jackson.module.kotlin.registerKotlinModule +import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper import com.google.auth.oauth2.GoogleCredentials import com.google.firebase.FirebaseOptions.Builder import java.io.File @@ -12,7 +11,7 @@ import java.nio.file.Paths import javax.naming.ConfigurationException private val genericMapType = object : TypeReference>() {} -private val objectMapper = ObjectMapper().registerKotlinModule() +private val objectMapper = jacksonObjectMapper() /** * Extension function added into [com.google.firebase.FirebaseOptions.Builder] which accepts Firebase Credentials diff --git a/neo4j-store/build.gradle b/neo4j-store/build.gradle index a9161fc15..65868d564 100644 --- a/neo4j-store/build.gradle +++ b/neo4j-store/build.gradle @@ -20,7 +20,6 @@ repositories { dependencies { implementation project(":prime-modules") - implementation "com.fasterxml.jackson.module:jackson-module-kotlin:$jacksonVersion" implementation "com.fasterxml.jackson.core:jackson-databind:$jacksonVersion" implementation "org.neo4j:neo4j-graphdb-api:$neo4jVersion" diff --git a/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt b/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt index 1dab69abd..aef78f06d 100644 --- a/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt +++ b/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt @@ -22,6 +22,7 @@ import org.ostelco.prime.paymentprocessor.core.BadGatewayError import org.ostelco.prime.paymentprocessor.core.PaymentError import org.ostelco.prime.paymentprocessor.core.ProductInfo import org.ostelco.prime.storage.GraphStore +import org.ostelco.prime.storage.NotCreatedError import org.ostelco.prime.storage.NotFoundError import org.ostelco.prime.storage.StoreError import org.ostelco.prime.storage.ValidationError @@ -156,7 +157,7 @@ object Neo4jStoreSingleton : GraphStore { getSegmentNameFromCountryCode(subscriber.country), transaction) .mapLeft { storeError -> - if (storeError is NotFoundError && storeError.type == segmentEntity.name) { + if (storeError is NotCreatedError && storeError.type == subscriberToSegmentRelation.relation.name) { ValidationError( type = subscriberEntity.name, id = subscriber.id, diff --git a/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Schema.kt b/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Schema.kt index 396b28350..63f0366a4 100644 --- a/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Schema.kt +++ b/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Schema.kt @@ -3,8 +3,7 @@ package org.ostelco.prime.storage.graph import arrow.core.Either import arrow.core.flatMap import com.fasterxml.jackson.core.type.TypeReference -import com.fasterxml.jackson.databind.ObjectMapper -import com.fasterxml.jackson.module.kotlin.registerKotlinModule +import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper import org.neo4j.driver.v1.AccessMode.READ import org.neo4j.driver.v1.AccessMode.WRITE import org.neo4j.driver.v1.StatementResult @@ -198,6 +197,7 @@ class RelationStore(private val relationType: Relation CREATE (from)-[:${relationType.relation.name} { $strProps } ]->(to); """.trimIndent(), transaction) { + // TODO vihang: validate if 'from' and 'to' node exists Either.cond( test = it.summary().counters().relationshipsCreated() == 1, ifTrue = {}, @@ -210,6 +210,7 @@ class RelationStore(private val relationType: Relation CREATE (from)-[:${relationType.relation.name}]->(to); """.trimIndent(), transaction) { + // TODO vihang: validate if 'from' and 'to' node exists Either.cond( test = it.summary().counters().relationshipsCreated() == 1, ifTrue = {}, @@ -221,6 +222,7 @@ class RelationStore(private val relationType: Relation CREATE (from)-[:${relationType.relation.name}]->(to); """.trimIndent(), transaction) { + // TODO vihang: validate if 'from' and 'to' node exists Either.cond( test = it.summary().counters().relationshipsCreated() == 1, ifTrue = {}, @@ -232,6 +234,7 @@ class RelationStore(private val relationType: Relation CREATE (from)-[:${relationType.relation.name}]->(to); """.trimIndent(), transaction) { + // TODO vihang: validate if 'from' and 'to' node exists Either.cond( test = it.summary().counters().relationshipsCreated() == 1, ifTrue = {}, @@ -323,7 +326,7 @@ object ObjectHandler { private const val SEPARATOR = '/' - private val objectMapper = ObjectMapper().registerKotlinModule() + private val objectMapper = jacksonObjectMapper() // // Object to Map diff --git a/neo4j-store/src/test/kotlin/org/ostelco/prime/storage/graph/SchemaTest.kt b/neo4j-store/src/test/kotlin/org/ostelco/prime/storage/graph/SchemaTest.kt index ddc64ff2f..8ebf67f10 100644 --- a/neo4j-store/src/test/kotlin/org/ostelco/prime/storage/graph/SchemaTest.kt +++ b/neo4j-store/src/test/kotlin/org/ostelco/prime/storage/graph/SchemaTest.kt @@ -1,7 +1,7 @@ package org.ostelco.prime.storage.graph import com.fasterxml.jackson.core.type.TypeReference -import com.fasterxml.jackson.databind.ObjectMapper +import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper import com.palantir.docker.compose.DockerComposeRule import com.palantir.docker.compose.connection.waiting.HealthChecks import org.joda.time.Duration @@ -186,7 +186,7 @@ class SchemaTest { @Test fun `json to map`() { - val objectMapper = ObjectMapper() + val objectMapper = jacksonObjectMapper() val map = objectMapper.readValue>("""{"label":"3GB for 300 NOK"}""", object : TypeReference>() {}) assertEquals("3GB for 300 NOK", map["label"]) } diff --git a/prime-modules/build.gradle b/prime-modules/build.gradle index 43c9bbe4e..559ba42f4 100644 --- a/prime-modules/build.gradle +++ b/prime-modules/build.gradle @@ -8,6 +8,7 @@ dependencies { api "org.jetbrains.kotlin:kotlin-reflect:$kotlinVersion" api "io.jsonwebtoken:jjwt:0.9.1" + api "com.fasterxml.jackson.module:jackson-module-kotlin:$jacksonVersion" api project(':ocs-grpc-api') api project(':analytics-grpc-api') diff --git a/prime-modules/src/main/kotlin/org/ostelco/prime/apierror/ApiError.kt b/prime-modules/src/main/kotlin/org/ostelco/prime/apierror/ApiError.kt index 05b523976..5f2ef2032 100644 --- a/prime-modules/src/main/kotlin/org/ostelco/prime/apierror/ApiError.kt +++ b/prime-modules/src/main/kotlin/org/ostelco/prime/apierror/ApiError.kt @@ -1,5 +1,7 @@ package org.ostelco.prime.apierror +import org.ostelco.prime.getLogger +import org.ostelco.prime.jsonmapper.asJson import org.ostelco.prime.paymentprocessor.core.PaymentError import org.ostelco.prime.storage.StoreError import javax.ws.rs.core.Response @@ -28,21 +30,30 @@ class NotFoundError(description: String, errorCode: ApiErrorCode, error: Interna override var status : Int = Response.Status.NOT_FOUND.statusCode } -fun mapPaymentErrorToApiError(description: String, errorCode: ApiErrorCode, paymentError: PaymentError) : ApiError { - return when(paymentError) { - is org.ostelco.prime.paymentprocessor.core.ForbiddenError -> org.ostelco.prime.apierror.ForbiddenError(description, errorCode, paymentError) - is org.ostelco.prime.paymentprocessor.core.BadGatewayError -> org.ostelco.prime.apierror.BadGatewayError(description, errorCode) - is org.ostelco.prime.paymentprocessor.core.NotFoundError -> org.ostelco.prime.apierror.NotFoundError(description, errorCode, paymentError) +object ApiErrorMapper { + + val logger by getLogger() + + fun mapPaymentErrorToApiError(description: String, errorCode: ApiErrorCode, paymentError: PaymentError) : ApiError { + logger.error("description: $description, errorCode: $errorCode, paymentError: ${asJson(paymentError)}") + return when(paymentError) { + is org.ostelco.prime.paymentprocessor.core.ForbiddenError -> org.ostelco.prime.apierror.ForbiddenError(description, errorCode, paymentError) + // FIXME vihang: remove PaymentError from BadGatewayError + is org.ostelco.prime.paymentprocessor.core.BadGatewayError -> org.ostelco.prime.apierror.BadGatewayError(description, errorCode, paymentError) + is org.ostelco.prime.paymentprocessor.core.NotFoundError -> org.ostelco.prime.apierror.NotFoundError(description, errorCode, paymentError) + } } -} -fun mapStorageErrorToApiError(description: String, errorCode: ApiErrorCode, storeError: StoreError) : ApiError { - return when(storeError) { - is org.ostelco.prime.storage.NotFoundError -> org.ostelco.prime.apierror.NotFoundError(description, errorCode, storeError) - is org.ostelco.prime.storage.AlreadyExistsError -> org.ostelco.prime.apierror.ForbiddenError(description, errorCode, storeError) - is org.ostelco.prime.storage.NotCreatedError -> org.ostelco.prime.apierror.BadGatewayError(description, errorCode) - is org.ostelco.prime.storage.NotUpdatedError -> org.ostelco.prime.apierror.BadGatewayError(description, errorCode) - is org.ostelco.prime.storage.NotDeletedError -> org.ostelco.prime.apierror.BadGatewayError(description, errorCode) - is org.ostelco.prime.storage.ValidationError -> org.ostelco.prime.apierror.ForbiddenError(description, errorCode, storeError) + fun mapStorageErrorToApiError(description: String, errorCode: ApiErrorCode, storeError: StoreError) : ApiError { + logger.error("description: $description, errorCode: $errorCode, storeError: ${asJson(storeError)}") + return when(storeError) { + is org.ostelco.prime.storage.NotFoundError -> org.ostelco.prime.apierror.NotFoundError(description, errorCode, storeError) + is org.ostelco.prime.storage.AlreadyExistsError -> org.ostelco.prime.apierror.ForbiddenError(description, errorCode, storeError) + // FIXME vihang: remove StoreError from BadGatewayError + is org.ostelco.prime.storage.NotCreatedError -> org.ostelco.prime.apierror.BadGatewayError(description, errorCode) + is org.ostelco.prime.storage.NotUpdatedError -> org.ostelco.prime.apierror.BadGatewayError(description, errorCode) + is org.ostelco.prime.storage.NotDeletedError -> org.ostelco.prime.apierror.BadGatewayError(description, errorCode) + is org.ostelco.prime.storage.ValidationError -> org.ostelco.prime.apierror.ForbiddenError(description, errorCode, storeError) + } } } \ No newline at end of file diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ResourceHelpers.kt b/prime-modules/src/main/kotlin/org/ostelco/prime/jsonmapper/JsonMapper.kt similarity index 74% rename from client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ResourceHelpers.kt rename to prime-modules/src/main/kotlin/org/ostelco/prime/jsonmapper/JsonMapper.kt index 45678206a..b4f9a331a 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/resources/ResourceHelpers.kt +++ b/prime-modules/src/main/kotlin/org/ostelco/prime/jsonmapper/JsonMapper.kt @@ -1,14 +1,14 @@ -package org.ostelco.prime.client.api.resources +package org.ostelco.prime.jsonmapper import com.fasterxml.jackson.core.JsonProcessingException -import com.fasterxml.jackson.databind.ObjectMapper +import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper import org.ostelco.prime.getLogger /** * Common 'helper' functions for resources. * */ -val objectMapper = ObjectMapper() +val objectMapper = jacksonObjectMapper() fun R.asJson(`object`: Any): String { try { diff --git a/prime/build.gradle b/prime/build.gradle index 0a7ff1a0d..8226c1456 100644 --- a/prime/build.gradle +++ b/prime/build.gradle @@ -41,7 +41,6 @@ dependencies { runtimeOnly project(':payment-processor') runtimeOnly project(':analytics-module') - implementation "com.fasterxml.jackson.module:jackson-module-kotlin:$jacksonVersion" implementation "io.dropwizard:dropwizard-http2:$dropwizardVersion" runtimeOnly "io.dropwizard:dropwizard-json-logging:$dropwizardVersion" implementation "com.google.guava:guava:$guavaVersion" diff --git a/pseudonym-server/build.gradle b/pseudonym-server/build.gradle index a6ceecd35..8d2652abe 100644 --- a/pseudonym-server/build.gradle +++ b/pseudonym-server/build.gradle @@ -19,7 +19,7 @@ dependencies { implementation "com.google.cloud:google-cloud-bigquery:$googleCloudVersion" implementation "com.google.cloud:google-cloud-datastore:$googleCloudVersion" implementation "com.google.cloud:google-cloud-pubsub:$googleCloudVersion" - implementation "com.fasterxml.jackson.module:jackson-module-kotlin:$jacksonVersion" + testImplementation "io.dropwizard:dropwizard-testing:$dropwizardVersion" testImplementation "org.jetbrains.kotlin:kotlin-test-junit:$kotlinVersion" From f6f37f0fc47eaff29c42afa038e60abaa6e6dddf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B8rn=20Remseth?= Date: Thu, 4 Oct 2018 15:29:07 +0200 Subject: [PATCH 36/93] More notes --- sample-agent/notes.txt | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/sample-agent/notes.txt b/sample-agent/notes.txt index 7fde16d49..797cf902c 100644 --- a/sample-agent/notes.txt +++ b/sample-agent/notes.txt @@ -1,3 +1,24 @@ +INTRODUCTION +==== + + +This document contains two attempts at generating yaml representations +that would be useful to guide us through the tech due-dil demo. + +The steps ahead would be: + + 1. Determine which of these paths to take. + 2. Separate them into actual yaml files that dan be executed + using the apply-yaml.sh script + 3. Implement changes in prime so that the changes are actually done. + +The one thing missing in the descriptions below is how to reset the +simulation to its intial state. We'll fix that in a day or two +tand then we're basically done. + + + + TAKE ONE ******* From cfeea9e5793c6050f2f678f1614f606e657c24ab Mon Sep 17 00:00:00 2001 From: "Kjell M. Myksvoll" Date: Tue, 2 Oct 2018 10:18:19 +0200 Subject: [PATCH 37/93] Remove dead code --- ...ependencies_get_environment_coordinates.sh | 41 --------- sample-agent/run-export.sh | 89 ------------------- sample-agent/set-gs-names.sh | 20 ----- 3 files changed, 150 deletions(-) delete mode 100644 sample-agent/check_dependencies_get_environment_coordinates.sh delete mode 100755 sample-agent/run-export.sh delete mode 100644 sample-agent/set-gs-names.sh diff --git a/sample-agent/check_dependencies_get_environment_coordinates.sh b/sample-agent/check_dependencies_get_environment_coordinates.sh deleted file mode 100644 index 7e4ff7a18..000000000 --- a/sample-agent/check_dependencies_get_environment_coordinates.sh +++ /dev/null @@ -1,41 +0,0 @@ -#!/bin/bash - -## Intended to be sourced by other programs - - -# -# Check for dependencies -# - -if [[ -z "$DEPENDENCIES ]] ; then - - DEPENDENCIES="gcloud kubectl gsutil" - - for dep in $DEPENDENCIES ; do - if [[ -z $(which $dep) ]] ; then - echo "ERROR: Could not find dependency $dep" - fi - done -fi - -# -# Figure out relevant parts of the environment and check their -# sanity. -# - -if [[ -z "$PROJECT_ID" ]] ; then - PROJECT_ID=$(gcloud config get-value project) - - if [[ -z "$PROJECT_ID" ]] ; then - echo "ERROR: Unknown google project ID" - exit 1 - fi -fi - -if [[ -z "$EXPORTER_PODNAME" ]] ; then - EXPORTER_PODNAME=$(kubectl get pods | grep exporter- | awk '{print $1}') - if [[ -z "$EXPORTER_PODNAME" ]] ; then - echo "ERROR: Unknown exporter podname" - exit 1 - fi -fi diff --git a/sample-agent/run-export.sh b/sample-agent/run-export.sh deleted file mode 100755 index b7fb26826..000000000 --- a/sample-agent/run-export.sh +++ /dev/null @@ -1,89 +0,0 @@ -#!/bin/bash - -## -## Run an export, return the identifier for the export, put the -## files from the export in a directory denoted as the single -## command line parameter. -## - - - -# Absolute path to this script, e.g. /home/user/bin/foo.sh -SCRIPT=$(readlink -f "$0") -# Absolute path this script is in, thus /home/user/bin -SCRIPTPATH=$(dirname "$SCRIPT") -echo $SCRIPTPATH - - -# -# Get command line parameter, which should be an existing -# directory in which to store the results -# - -TARGET_DIR=$1 -if [[ -z "$TARGET_DIR" ]] ; then - echo "$0 Missing parameter" - echo "usage $0 target-dir" - exit 1 -fi - -if [[ ! -d "$TARGET_DIR" ]] ; then - echo "$0 parameter does not designate an existing directory" - echo "usage $0 target-dir" - exit 1 -fi - -$SCRIPTPATH/check_dependencies_get_environment_coordinates.sh - -# -# Run an export inside the kubernetes cluster, then parse -# the output of the script thar ran the export -# -#TEMPFILE="$(mktemp /tmp/abc-script.XXXXXX)" -TEMPFILE="tmpfile.txt" - -kubectl exec -it "${EXPORTER_PODNAME}" -- /bin/bash -c /export_data.sh > "$TEMPFILE" - -# Fail if the exec failed -retVal=$? -if [ $retVal -ne 0 ]; then - echo "ERROR: Failed to export data:" - cat $TMPFILE - rm $TMPFILE - exit 1 -fi - -# -# Parse the output of the tmpfile, getting the export ID, and -# the google filestore URLs for the output files. -# - - -EXPORT_ID=$(grep "Starting export job for" $TEMPFILE | awk '{print $5}' | sed 's/\r$//' ) - -PURCHASES_GS="gs://${PROJECT_ID}-dataconsumption-export/$EXPORT_ID-purchases.csv" -SUB_2_MSISSDN_MAPPING_GS="gs://${PROJECT_ID}-dataconsumption-export/$EXPORT_ID-sub2msisdn.csv" -CONSUMPTION_GS="gs://${PROJECT_ID}-dataconsumption-export/$EXPORT_ID.csv" - -# -# Then copy the CSV files to local storage (current directory) -# - -gsutil cp $PURCHASES_GS $TARGET_DIR -gsutil cp $SUB_2_MSISSDN_MAPPING_GS $TARGET_DIR -gsutil cp $CONSUMPTION_GS $TARGET_DIR - -# -# Clean up the tempfile -# - -rm "$TEMPFILE" - -# -# Finally output the ID of the export, since that's -# what will be used by users of this script to access -# the output -# - -echo $EXPORT_ID -exit 0 diff --git a/sample-agent/set-gs-names.sh b/sample-agent/set-gs-names.sh deleted file mode 100644 index 34075a239..000000000 --- a/sample-agent/set-gs-names.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/bash - - -if [[ -z "$PROJECT_ID" ]] ; then - echo "$0 PROJECT_ID variable not set, cannot determine google filestore coordinates" - exit 1 -fi - - -if [[ -z "$EXPORT_ID" ]] ; then - echo "$0 EXPORT_ID variable not set, cannot determine google filestore coordinates" - exit 1 -fi - - -PURCHASES_GS="gs://${PROJECT_ID}-dataconsumption-export/$EXPORT_ID-purchases.csv" -SUB_2_MSISSDN_MAPPING_GS="gs://${PROJECT_ID}-dataconsumption-export/$EXPORT_ID-sub2msisdn.csv" -CONSUMPTION_GS="gs://${PROJECT_ID}-dataconsumption-export/$EXPORT_ID.csv" -RESULT_SEGMENT_PSEUDO_GS="gs://${PROJECT_ID}-dataconsumption-export/${EXPORT_ID}-resultsegment-pseudoanonymized.csv" -RESULT_SEGMENT_CLEAR_GS="gs://${PROJECT_ID}-dataconsumption-export/${EXPORT_ID}-resultsegment-cleartext.csv" From b4a50eb419617060950424eea99577e265736c6f Mon Sep 17 00:00:00 2001 From: "Kjell M. Myksvoll" Date: Tue, 2 Oct 2018 10:18:58 +0200 Subject: [PATCH 38/93] Setting the executable bit on the sample agent --- sample-agent/sample-agent.sh | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 sample-agent/sample-agent.sh diff --git a/sample-agent/sample-agent.sh b/sample-agent/sample-agent.sh old mode 100644 new mode 100755 From 51a14b657f18f39d54c1547616e65b2817dcf611 Mon Sep 17 00:00:00 2001 From: "Kjell M. Myksvoll" Date: Tue, 2 Oct 2018 11:08:28 +0200 Subject: [PATCH 39/93] More working version, upload not tested yet --- sample-agent/sample-agent.sh | 43 +++++++++++++++++++----------------- 1 file changed, 23 insertions(+), 20 deletions(-) diff --git a/sample-agent/sample-agent.sh b/sample-agent/sample-agent.sh index c2f683400..f71940abb 100755 --- a/sample-agent/sample-agent.sh +++ b/sample-agent/sample-agent.sh @@ -1,10 +1,10 @@ -#!/bin/bash +#!/bin/bash -x set -e ### ### VALIDATING AND PARSING COMMAND LINE PARAMETERS -### +### # # Get command line parameter, which should be an existing @@ -94,6 +94,7 @@ echo "$0: kubectl port-forward $PRIME_PODNAME 8080:8080" function runScriptOnExporterPod { if [[ $# -ne 2 ]] ; then echo "$0 ERROR: runScriptOnExporterPod requires exactly two parameters" + die fi local scriptname=$1 local intentDescription=$2 @@ -104,9 +105,9 @@ function runScriptOnExporterPod { # level process's lifetime, I'll do it this way. TEMPFILE="tmpfile.txt" [[ -f "$TMPFILE" ]] && rm "$TMPFILE" - + kubectl exec -it "${EXPORTER_PODNAME}" -- /bin/bash -c "$scriptname" > "$TEMPFILE" - + # Fail if the exec failed retVal=$? if [[ $retVal -ne 0 ]]; then @@ -132,9 +133,9 @@ function exportDataFromExporterPod { echo "$0 ERROR: Running the runScriptOnExporterPod failed to return the name of a resultfile." die fi - + local exportId="$(grep "Starting export job for" $tmpfilename | awk '{print $5}' | sed 's/\r$//' )" - + if [[ -z "$exportId" ]] ; then echo "$0 Could not get export batch from exporter pod" fi @@ -143,14 +144,18 @@ function exportDataFromExporterPod { } function mapPseudosToUserids { - local tmpfile="$(runScriptOnExporterPod /map_subscribers.sh "mapping pseudoids to subscriber ids")" - [[ -f "$tmpfile" ]] && rm "$tmpfile" + # XXX TODO: Test correct number of parameters + local exportid=$1 + local tmpfile="$(runScriptOnExporterPod "/map_subscribers.sh $exportid" "mapping pseudoids to subscriber ids")" + ## [[ -f "$tmpfile" ]] && rm "$tmpfile" + echo "LOG FROM MAPPING IS:" + cat $tmpfile } # # Generate the Google filesystem names of components associated with # a particular export ID: Typical usage -# +# # PURCHASES_GS="$(gsExportCsvFilename "ab234245cvsr" "purchases")" function gsExportCsvFilename { @@ -158,7 +163,7 @@ function gsExportCsvFilename { echo "$0 ERROR: gsExportCsvFilename requires exactly two parameters, got '$@'" die fi - + local exportId=$1 local componentName=$2 if [[ -z "$exportId" ]] ; then @@ -168,13 +173,13 @@ function gsExportCsvFilename { if [[ -n "$componentName" ]] ; then componentName="-$componentName" fi - + echo "gs://${PROJECT_ID}-dataconsumption-export/${exportId}${componentName}.csv" } # -# Generate a filename +# Generate a filename # function importedCsvFilename { if [[ $# -ne 3 ]] ; then @@ -199,14 +204,14 @@ function importedCsvFilename { if [[ -n "$componentName" ]] ; then componentName="-$componentName" fi - + echo "${importDirectory}/${exportId}${componentName}.csv" } ### ### MAIN SCRIPT -### +### @@ -226,12 +231,12 @@ for component in "purchases" "sub2msisdn" "" ; do if [[ -z "$source" ]] ; then echo "$0 ERROR: Could not determine source file for export component '$component'" fi - + destination="$(importedCsvFilename "$EXPORT_ID" "$TARGET_DIR" "$component")" if [[ -z "$destination" ]] ; then echo "$0 ERROR: Could not determine destination file for export component '$component'" fi - + gsutil cp "$source" "$destination" done @@ -267,7 +272,7 @@ gsutil cp $SEGMENT_TMPFILE_PSEUDO $RESULT_SEGMENT_PSEUDO_GS # Then run the script that will convert it into a none-anonymized # file and fetch the results from gs:/ -mapPseudosToUserids +mapPseudosToUserids "$EXPORT_ID" gsutil cp "$RESULT_SEGMENT_CLEAR_GS" "$RESULT_SEGMENT_CLEAR" @@ -303,7 +308,7 @@ EOF # Adding the list of subscribers in clear text (indented six spaces # with a leading "-" as per YAML list syntax. -awk '{print " - " $1}' $RESULT_SEGMENT_SINGLE_COLUMN >> $IMPORTFILE_YML +awk '{print " - " $1}' $RESULT_SEGMENT_SINGLE_COLUMN >> $IMPORTFILE_YML ## ## Send it to the importer @@ -318,5 +323,3 @@ curl --data-binary @$IMPORTFILE_YML $IMPORTER_URL ## # .... eventually - - From 72d87f8a4c400e1b4970e8eaf816afc375bbcc9a Mon Sep 17 00:00:00 2001 From: "Kjell M. Myksvoll" Date: Tue, 2 Oct 2018 11:15:44 +0200 Subject: [PATCH 40/93] Set content type when uploading --- sample-agent/sample-agent.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sample-agent/sample-agent.sh b/sample-agent/sample-agent.sh index f71940abb..e777ff50a 100755 --- a/sample-agent/sample-agent.sh +++ b/sample-agent/sample-agent.sh @@ -315,7 +315,7 @@ awk '{print " - " $1}' $RESULT_SEGMENT_SINGLE_COLUMN >> $IMPORTFILE_YML ## (assuming the kubectl port forwarding is enabled) IMPORTER_URL=http://127.0.0.1:8080/importer -curl --data-binary @$IMPORTFILE_YML $IMPORTER_URL +curl -H "Content-type: text/vnd.yaml" --data-binary @$IMPORTFILE_YML $IMPORTER_URL ## From 21440f9bc29b3e294c3b952472a0298221e7d93c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B8rn=20Remseth?= Date: Tue, 2 Oct 2018 13:11:42 +0200 Subject: [PATCH 41/93] Update mapping script to use a a tst on input prameters, log more clearly, and to have more comments --- exporter/script/idle.sh | 4 +- exporter/script/map_subscribers.sh | 70 ++++++++++++++++++++++++++---- sample-agent/sample-agent.sh | 2 +- 3 files changed, 64 insertions(+), 12 deletions(-) diff --git a/exporter/script/idle.sh b/exporter/script/idle.sh index 93c81b854..ed2e64763 100644 --- a/exporter/script/idle.sh +++ b/exporter/script/idle.sh @@ -11,8 +11,8 @@ cleanup () trap cleanup SIGINT SIGTERM -while [ 1 ] +while [[ 1 ]] do sleep 60 & wait $! -done \ No newline at end of file +done diff --git a/exporter/script/map_subscribers.sh b/exporter/script/map_subscribers.sh index ac1a1a18c..7d7d520f6 100644 --- a/exporter/script/map_subscribers.sh +++ b/exporter/script/map_subscribers.sh @@ -1,11 +1,51 @@ #!/bin/bash -#set -x + + +## +## Map a list of pseudo-anonymized subscriber IDs into clear text +## subscriber identifiers. +## +## Takes a single parameter, the exportID, so usage is: +## +## ./map_subscribers.sh 8972789sd897987rwefsa9879 +## +## Based on the command line parameter, an input file is imported from +## the file storage. The input is a file named +## +## gs://$projectId-dataconsumption-export/${exportId}-resultsegment-pseudoanonymized.csv$exportId/ +## +## This input file contains a single column, containing pseudoanonymized +## subscriber identifiers. +## +## The script proeduces a single output in the file: +## +## gs://$projectId-dataconsumption-export/${exportId}-resultsegment-cleartext.csv +## +## It contains two columns, with headers, containing pseudo IDs, and the corresponding +## clear text subscriber ID. +## +## + + +## +## Check input parameters +## + +if [[ $# -ne 1 ]] ; then + echo "$0 ERROR: Requires one command line parameter dentifying the export ID" + exit 1 +fi exportId=$1 -if [ -z "$1" ]; then - echo "To convert subscribers, specify the id of the export operation" - exit +if [[ -z "$1" ]]; then + echo "$0 ERROR: To convert subscribers, specify the id of the export operation" + exit 1 fi + +## +## Calculate locations of things to use. +## + exportId=${exportId//-} exportId=${exportId,,} # Set the projectId @@ -21,12 +61,19 @@ inputSubscriberTable=exported_pseudonyms.${exportId}_pseudo_subscriber subscriberPseudonymsTable=exported_pseudonyms.${exportId}_subscriber outputSubscriberTable=exported_pseudonyms.${exportId}_clear_subscriber +## +## Import the from the csv file. +## -echo "Importing data from csv $csvfile" +echo "$0: INFO Importing data from csv $csvfile" bq --location=EU load --replace --source_format=CSV $projectId:$inputSubscriberTable gs://$csvfile /subscriber-schema.json echo "Exported data to $inputSubscriberTable" -echo "Creating table $outputSubscriberTable" + +## +## Calculate the translation table +## +echo "$0: INFO Creating table $outputSubscriberTable" # SQL for joining pseudonym & hourly consumption tables. read -r -d '' sqlForJoin << EOM CREATE TEMP FUNCTION URLDECODE(url STRING) AS (( @@ -48,8 +95,13 @@ EOM # Run the query using bq & dump results to the new table bq --location=EU --format=none query --destination_table $outputSubscriberTable --replace --use_legacy_sql=false $sqlForJoin -echo "Created table $outputSubscriberTable" +echo "$0 INFO: Created table $outputSubscriberTable" + + +## +## Export data to the outut CSV file +## -echo "Exporting data to csv $outputCsvfile" +echo "$0 INFO: Exporting data to csv $outputCsvfile" bq --location=EU extract --destination_format=CSV $outputSubscriberTable gs://$outputCsvfile -echo "Exported data to gs://$outputCsvfile" +echo "$0 INFO: Exported data to gs://$outputCsvfile" diff --git a/sample-agent/sample-agent.sh b/sample-agent/sample-agent.sh index e777ff50a..d770fb2e7 100755 --- a/sample-agent/sample-agent.sh +++ b/sample-agent/sample-agent.sh @@ -144,7 +144,7 @@ function exportDataFromExporterPod { } function mapPseudosToUserids { - # XXX TODO: Test correct number of parameters + # XXX TODO: Test correct number of parameters local exportid=$1 local tmpfile="$(runScriptOnExporterPod "/map_subscribers.sh $exportid" "mapping pseudoids to subscriber ids")" ## [[ -f "$tmpfile" ]] && rm "$tmpfile" From 7314d58ef1d86bcd3929a83d278f94c436f9665f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B8rn=20Remseth?= Date: Wed, 3 Oct 2018 15:54:07 +0200 Subject: [PATCH 42/93] Adding notes and suggestions --- .../legacy/sample-offer-legacy.yaml | 77 +++++++++ .../demo-yamls/legacy/sample-offer-only.yaml | 12 ++ .../sample-offer-products-segments.yaml | 35 +++++ sample-agent/demo-yamls/notes.txt | 148 ++++++++++++++++++ sample-agent/demo-yamls/script1.yaml | 1 + 5 files changed, 273 insertions(+) create mode 100644 sample-agent/demo-yamls/legacy/sample-offer-legacy.yaml create mode 100644 sample-agent/demo-yamls/legacy/sample-offer-only.yaml create mode 100644 sample-agent/demo-yamls/legacy/sample-offer-products-segments.yaml create mode 100644 sample-agent/demo-yamls/notes.txt create mode 100644 sample-agent/demo-yamls/script1.yaml diff --git a/sample-agent/demo-yamls/legacy/sample-offer-legacy.yaml b/sample-agent/demo-yamls/legacy/sample-offer-legacy.yaml new file mode 100644 index 000000000..2a6f38ebe --- /dev/null +++ b/sample-agent/demo-yamls/legacy/sample-offer-legacy.yaml @@ -0,0 +1,77 @@ +# +# This is a sample YAML format to be used by +# agents that produce offers. The general idea +# is that an offer has a set of parameters, +# and also a set of selected subscribers that will +# get it. +# +# YAML was chosen since it's more human readable than +# e.g. json or protobuffers, while still being +# easy to produce by an agent, and relatively compact, +# in particular when gzipped. +# + +producingAgent: + name: Simple agent + version: 1.0 + +# # All of the parameters below are just copied from the firebasr +# # realtime database we used in the demo, converted to +# # camel case. All the fields should be documented +# # in this document, and we should think through if this is +# # the best set of parameters we went. + +offer: + # XXX This offer does not have an ID, but if we were just + # updating the list of members of the segment, it would + # make sense to have an OfferID, or something that refers + # to a previously created offer. That id should be created + # by the importer, and used by the agent when updating + # membership. If any other parameters are going to be + # changed, it is necessary to produce a new offer. + # It may make sense to put the ID in the url when + # when we update (PUT method) changes, but then the + # ID in the yaml will be redundant. Figure out how to + # do this one way or another and just do it. + visibility: + from: "2018-02-22T12:41:49.871Z" + to: "2018-02-22T12:41:49.871Z" + presentation: + badgeLabel: "mbop" + description: "Best offer you will get today" + shortDescription: "Best offer!" + label: "3 GB" + name: "3 GB" + priceLabel: "49 NOK" + hidden: false + imageUrl: https://www.ft-associates.com/wp-content/uploads/2015/08/Best-Offer.jpg + financial: + repurchability: 1 + currencyLabel: "NOK" + price: 4900 + taxRate: 10.0 + product: + sku: 2 + # A possibly very long list of product parameters that are all + # dependent on the SKU's requirement. Details ignored here, + # that may pop up later. Deal with them then. + noOfBytes: 3000000000 + +# # We put the segment last, since it may have a long list of +# # members in it. We want that list to be last, since it contains +# # little information that humans are interested in, and we want +# # humans to start reading the file at the top. + +segment: + type: agent-specific-segment + description: "This is how this segment should be described" + members: + # The decryption key is what the de-anonymizer will use to + # make proper identifiers out of the members listed below. + # The special purpose key "none" indicatest that the member list + # is in clear text. + decryptionKey: none + members: + - 4790300157 + - 4790300144 + - 4333333333 diff --git a/sample-agent/demo-yamls/legacy/sample-offer-only.yaml b/sample-agent/demo-yamls/legacy/sample-offer-only.yaml new file mode 100644 index 000000000..5e11702f8 --- /dev/null +++ b/sample-agent/demo-yamls/legacy/sample-offer-only.yaml @@ -0,0 +1,12 @@ +producingAgent: + name: Simple agent + version: 1.0 + +offer: + id: test-offer + # use existing product + products: + - 1GB_249NOK + # use existing segment + segments: + - test-segment diff --git a/sample-agent/demo-yamls/legacy/sample-offer-products-segments.yaml b/sample-agent/demo-yamls/legacy/sample-offer-products-segments.yaml new file mode 100644 index 000000000..b1a6e2809 --- /dev/null +++ b/sample-agent/demo-yamls/legacy/sample-offer-products-segments.yaml @@ -0,0 +1,35 @@ +producingAgent: + name: Simple agent + version: 1.0 + +offer: + id: test-offer + + # list of existing products + # listing products to be created in this yaml is OPTIONAL + +# products: +# - 1GB_249NOK + + # list of existing segments + # listing segments to be created in this yaml is OPTIONAL + +# segments: +# - test-segment + +# These products will be created and linked to offer - 'test-offer' +products: + - sku: 1GB_249NOK + price: + amount: 249 + currency: NOK + properties: + noOfBytes: 1_000_000_000 + presentation: + isDefault: true + offerLabel: Default Offer + priceLabel: 249 NOK + +# These segments will be created and linked to offer - 'test-offer' +segments: + - id: test-segment diff --git a/sample-agent/demo-yamls/notes.txt b/sample-agent/demo-yamls/notes.txt new file mode 100644 index 000000000..7fde16d49 --- /dev/null +++ b/sample-agent/demo-yamls/notes.txt @@ -0,0 +1,148 @@ +TAKE ONE +******* + + +====: Script 1 (initialization, may or not actually be run, but should represent the situation the later scripts operate on) + +createProducts: + - sku: 1GB_200NOK + price: + amount: 200 + currency: NOK + properties: + productClass: SIMPLE_DATA + noOfBytes: 1_000_000_000 + presentation: + productClass: SIMPLE_PRESENTATION + isDefault: true + offerLabel: Top Up + priceLabel: 200 NOK + + - sku: 2GB_200NOK + price: + amount: 200 + currency: NOK + properties: + noOfBytes: 2_000_000_000 + presentation: + isDefault: true + offerLabel: Top Up + priceLabel: 200 NOK + + - sku: 1GB_50NOK + price: + amount: 50 + currency: NOK + properties: + noOfBytes: 1_000_000_000 + presentation: + isDefault: true + offerLabel: Special offer + priceLabel: 50 NOK + +createSegments: + - id: onlySignedUp_Demo + - id: hasUsedLotsOfData_Demo + - id: hasUsedEvenMoreData_Demo + +createOffers: + - id: initialOffer + segments: + - onlySignedUp_Demo + products: + - 1GB_200NOK + - id: offerForBigDataUsers + segments: + - onlySignedUp_Demo + products: + - 2GB_200NOK + - id: specialOffer + segments: + - hasUsedEvenMoreData_Demo + products: + - 1GB_50NOK + +moveToSegment: + source: any + target: onlySignedUp_Demo + subscribers: + - foo@baz.com + - bar@baz.com + + +==== Script 2: Promote SH from onlySignedUp_Demo to offerForBigDataUser segment +moveToSegment: + source: onlySignedUp_Demo + target: hasUsedLotsOfData_Demo + subscribers: + - foo@baz.com + + +==== Script 3: Promote SH from onlySignedUp_Demo to offerForBigDataUser segment + +moveToSegment: + source: hasUsedLotsOfData_Demo + target: hasUsedEvenMoreData_Demo + subscribers: + - foo@baz.com + + +TAKE 2 +****** + +== script 1 (initialization, may or not actually be run, but should represent the situation the later scripts operate on) +createOffer: + id: demoOffer1 + createProducts: + - sku: 1GB_200NOK + price: + amount: 200 + currency: NOK + properties: + noOfBytes: 1_000_000_000 + presentation: + isDefault: true + offerLabel: Top Up + priceLabel: 200 NOK + createSegments: + - id: demoSegment1 + subscribers: + - foo@baz.com + - bar@baz.com + +== script 2 +createOffer: + id: demoOffer2 + createProducts: + - sku: 2GB_200NOK + price: + amount: 200 + currency: NOK + properties: + noOfBytes: 2_000_000_000 + presentation: + isDefault: true + offerLabel: Top Up + priceLabel: 200 NOK + createSegments: + - id: demoSegment2 + + +== script 3 +createOffer: + id: demoOffer3 + createProducts: + - sku: 1GB_50NOK + price: + amount: 50 + currency: NOK + properties: + noOfBytes: 1_000_000_000 + presentation: + isDefault: true + offerLabel: Special offer + priceLabel: 50 NOK + createSegments: + - id: demoSegment3 + subscribers: + - bar@baz.com diff --git a/sample-agent/demo-yamls/script1.yaml b/sample-agent/demo-yamls/script1.yaml new file mode 100644 index 000000000..27e2e5bb9 --- /dev/null +++ b/sample-agent/demo-yamls/script1.yaml @@ -0,0 +1 @@ +createOffer: From 37e80c14689ca03d89b098db210bec7d329e73b5 Mon Sep 17 00:00:00 2001 From: "Kjell M. Myksvoll" Date: Thu, 4 Oct 2018 08:06:00 +0200 Subject: [PATCH 43/93] Minor whitespace fixes --- .../ostelco/prime/admin/importer/ImportProcessor.kt | 3 ++- sample-agent/sample-agent.sh | 11 +++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/admin-api/src/main/kotlin/org/ostelco/prime/admin/importer/ImportProcessor.kt b/admin-api/src/main/kotlin/org/ostelco/prime/admin/importer/ImportProcessor.kt index 3f1c653ee..6c3af7a09 100644 --- a/admin-api/src/main/kotlin/org/ostelco/prime/admin/importer/ImportProcessor.kt +++ b/admin-api/src/main/kotlin/org/ostelco/prime/admin/importer/ImportProcessor.kt @@ -1,3 +1,4 @@ + package org.ostelco.prime.admin.importer import arrow.core.Either @@ -23,4 +24,4 @@ class ImportAdapter : ImportProcessor { segments = importDeclaration.segments) .mapLeft { BadRequestError(it.message, ApiErrorCode.FAILED_TO_IMPORT_OFFER) } } -} \ No newline at end of file +} diff --git a/sample-agent/sample-agent.sh b/sample-agent/sample-agent.sh index d770fb2e7..6f01a2d3b 100755 --- a/sample-agent/sample-agent.sh +++ b/sample-agent/sample-agent.sh @@ -1,4 +1,4 @@ -#!/bin/bash -x +#!/bin/bash set -e @@ -34,7 +34,7 @@ trap "exit 1" TERM export TOP_PID=$$ function die() { - kill -s TERM $TOP_PID + kill -s TERM $TOP_PID } # @@ -44,9 +44,9 @@ function die() { DEPENDENCIES="gcloud kubectl gsutil" for dep in $DEPENDENCIES ; do - if [[ -z $(which $dep) ]] ; then - echo "ERROR: Could not find dependency $dep" - fi + if [[ -z $(which $dep) ]] ; then + echo "ERROR: Could not find dependency $dep" + fi done # @@ -293,7 +293,6 @@ cat > $IMPORTFILE_YML < Date: Thu, 4 Oct 2018 10:03:38 +0200 Subject: [PATCH 44/93] Script to apply a yaml from somewhere --- sample-agent/demo-yamls/apply-yaml.sh | 87 +++++++++++++++++++++++++++ sample-agent/demo-yamls/script1.yaml | 1 - 2 files changed, 87 insertions(+), 1 deletion(-) create mode 100755 sample-agent/demo-yamls/apply-yaml.sh delete mode 100644 sample-agent/demo-yamls/script1.yaml diff --git a/sample-agent/demo-yamls/apply-yaml.sh b/sample-agent/demo-yamls/apply-yaml.sh new file mode 100755 index 000000000..0536ff0f8 --- /dev/null +++ b/sample-agent/demo-yamls/apply-yaml.sh @@ -0,0 +1,87 @@ +#!/bin/bash -x + +set -e + +### +### SEND PRE_WRITTEN YAML SCRIPT TO THE IMPORTER. +### + +# +# Get command line parameter, which should be an existing +# file containing a yaml file. +# + +YAML_SCRIPTNAME=$1 +if [[ -z "$YAML_SCRIPTNAME" ]] ; then + echo "$0 Missing script" + echo "usage $0 yaml-script" + exit 1 +fi + +if [[ ! -f "$YAML_SCRIPTNAME" ]] ; then + echo "$0 $YAML_SCRIPTNAME does not exist or is not a file" + echo "usage $0 yaml-script" + exit 1 +fi + +### +### PRELIMINARIES +### + +# Be able to die from inside procedures + +# +# Check for dependencies being satisfied +# + +DEPENDENCIES="gcloud kubectl gsutil" + +for dep in $DEPENDENCIES ; do + if [[ -z $(which $dep) ]] ; then + echo "ERROR: Could not find dependency $dep" + fi +done + +# +# Figure out relevant parts of the environment and check their +# sanity. +# + +PROJECT_ID=$(gcloud config get-value project) + +if [[ -z "$PROJECT_ID" ]] ; then + echo "ERROR: Unknown google project ID" + exit 1 +fi + +PRIME_PODNAME=$(kubectl get pods | grep prime- | awk '{print $1}') +if [[ -z "$PRIME_PODNAME" ]] ; then + echo "ERROR: Unknown prime podname" + exit 1 +fi + + +## +## Checking the assumption that localhost forwarding is actually +## working. +## + +EXPECTED_FROM_GET_TO_IMPORT='{"code":405,"message":"HTTP 405 Method Not Allowed"}' +RESULT_FROM_GET_PROBE="$(curl http://127.0.0.1:8080/importer 2>/dev/null)" + +if [[ "$EXPECTED_FROM_GET_TO_IMPORT" != "$RESULT_FROM_GET_PROBE" ]] ; then + echo "$0 ERROR: Did not get expected result when probing importer, bailing out" + echo "$0: ERROR: Assuming that prime is running at $PRIME_PODNAME" + echo "$0: ERROR: and that you have done" + echo "$0: ERRIR: kubectl port-forward $PRIME_PODNAME 8080:8080" + echo "$0: ERROR: Please check if this is working" + exit 1 +fi + + +## +## Send it to the importer +## (assuming the kubectl port forwarding is enabled) + +IMPORTER_URL=http://127.0.0.1:8080/importer +curl -H "Content-type: text/vnd.yaml" --data-binary @$YAML_SCRIPTNAME $IMPORTER_URL diff --git a/sample-agent/demo-yamls/script1.yaml b/sample-agent/demo-yamls/script1.yaml deleted file mode 100644 index 27e2e5bb9..000000000 --- a/sample-agent/demo-yamls/script1.yaml +++ /dev/null @@ -1 +0,0 @@ -createOffer: From 8a6543155c592c4ff1e44e196f688d23dde9d1c7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B8rn=20Remseth?= Date: Thu, 4 Oct 2018 10:27:07 +0200 Subject: [PATCH 45/93] Moving everything to top level --- sample-agent/{demo-yamls => }/apply-yaml.sh | 0 .../legacy/sample-offer-legacy.yaml | 77 ------------------- .../demo-yamls/legacy/sample-offer-only.yaml | 12 --- .../sample-offer-products-segments.yaml | 35 --------- sample-agent/{demo-yamls => }/notes.txt | 0 5 files changed, 124 deletions(-) rename sample-agent/{demo-yamls => }/apply-yaml.sh (100%) delete mode 100644 sample-agent/demo-yamls/legacy/sample-offer-legacy.yaml delete mode 100644 sample-agent/demo-yamls/legacy/sample-offer-only.yaml delete mode 100644 sample-agent/demo-yamls/legacy/sample-offer-products-segments.yaml rename sample-agent/{demo-yamls => }/notes.txt (100%) diff --git a/sample-agent/demo-yamls/apply-yaml.sh b/sample-agent/apply-yaml.sh similarity index 100% rename from sample-agent/demo-yamls/apply-yaml.sh rename to sample-agent/apply-yaml.sh diff --git a/sample-agent/demo-yamls/legacy/sample-offer-legacy.yaml b/sample-agent/demo-yamls/legacy/sample-offer-legacy.yaml deleted file mode 100644 index 2a6f38ebe..000000000 --- a/sample-agent/demo-yamls/legacy/sample-offer-legacy.yaml +++ /dev/null @@ -1,77 +0,0 @@ -# -# This is a sample YAML format to be used by -# agents that produce offers. The general idea -# is that an offer has a set of parameters, -# and also a set of selected subscribers that will -# get it. -# -# YAML was chosen since it's more human readable than -# e.g. json or protobuffers, while still being -# easy to produce by an agent, and relatively compact, -# in particular when gzipped. -# - -producingAgent: - name: Simple agent - version: 1.0 - -# # All of the parameters below are just copied from the firebasr -# # realtime database we used in the demo, converted to -# # camel case. All the fields should be documented -# # in this document, and we should think through if this is -# # the best set of parameters we went. - -offer: - # XXX This offer does not have an ID, but if we were just - # updating the list of members of the segment, it would - # make sense to have an OfferID, or something that refers - # to a previously created offer. That id should be created - # by the importer, and used by the agent when updating - # membership. If any other parameters are going to be - # changed, it is necessary to produce a new offer. - # It may make sense to put the ID in the url when - # when we update (PUT method) changes, but then the - # ID in the yaml will be redundant. Figure out how to - # do this one way or another and just do it. - visibility: - from: "2018-02-22T12:41:49.871Z" - to: "2018-02-22T12:41:49.871Z" - presentation: - badgeLabel: "mbop" - description: "Best offer you will get today" - shortDescription: "Best offer!" - label: "3 GB" - name: "3 GB" - priceLabel: "49 NOK" - hidden: false - imageUrl: https://www.ft-associates.com/wp-content/uploads/2015/08/Best-Offer.jpg - financial: - repurchability: 1 - currencyLabel: "NOK" - price: 4900 - taxRate: 10.0 - product: - sku: 2 - # A possibly very long list of product parameters that are all - # dependent on the SKU's requirement. Details ignored here, - # that may pop up later. Deal with them then. - noOfBytes: 3000000000 - -# # We put the segment last, since it may have a long list of -# # members in it. We want that list to be last, since it contains -# # little information that humans are interested in, and we want -# # humans to start reading the file at the top. - -segment: - type: agent-specific-segment - description: "This is how this segment should be described" - members: - # The decryption key is what the de-anonymizer will use to - # make proper identifiers out of the members listed below. - # The special purpose key "none" indicatest that the member list - # is in clear text. - decryptionKey: none - members: - - 4790300157 - - 4790300144 - - 4333333333 diff --git a/sample-agent/demo-yamls/legacy/sample-offer-only.yaml b/sample-agent/demo-yamls/legacy/sample-offer-only.yaml deleted file mode 100644 index 5e11702f8..000000000 --- a/sample-agent/demo-yamls/legacy/sample-offer-only.yaml +++ /dev/null @@ -1,12 +0,0 @@ -producingAgent: - name: Simple agent - version: 1.0 - -offer: - id: test-offer - # use existing product - products: - - 1GB_249NOK - # use existing segment - segments: - - test-segment diff --git a/sample-agent/demo-yamls/legacy/sample-offer-products-segments.yaml b/sample-agent/demo-yamls/legacy/sample-offer-products-segments.yaml deleted file mode 100644 index b1a6e2809..000000000 --- a/sample-agent/demo-yamls/legacy/sample-offer-products-segments.yaml +++ /dev/null @@ -1,35 +0,0 @@ -producingAgent: - name: Simple agent - version: 1.0 - -offer: - id: test-offer - - # list of existing products - # listing products to be created in this yaml is OPTIONAL - -# products: -# - 1GB_249NOK - - # list of existing segments - # listing segments to be created in this yaml is OPTIONAL - -# segments: -# - test-segment - -# These products will be created and linked to offer - 'test-offer' -products: - - sku: 1GB_249NOK - price: - amount: 249 - currency: NOK - properties: - noOfBytes: 1_000_000_000 - presentation: - isDefault: true - offerLabel: Default Offer - priceLabel: 249 NOK - -# These segments will be created and linked to offer - 'test-offer' -segments: - - id: test-segment diff --git a/sample-agent/demo-yamls/notes.txt b/sample-agent/notes.txt similarity index 100% rename from sample-agent/demo-yamls/notes.txt rename to sample-agent/notes.txt From d88038679bbe5868ac4bd1c2ffc54982f612d715 Mon Sep 17 00:00:00 2001 From: "Kjell M. Myksvoll" Date: Thu, 4 Oct 2018 10:31:26 +0200 Subject: [PATCH 46/93] Fixes bug causing empty list with subscribers to be added to new segment --- sample-agent/sample-agent.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sample-agent/sample-agent.sh b/sample-agent/sample-agent.sh index 6f01a2d3b..4c2afa9d2 100755 --- a/sample-agent/sample-agent.sh +++ b/sample-agent/sample-agent.sh @@ -261,10 +261,11 @@ awk -F, '!/^subscriberId/{print $1'} $(importedCsvFilename "$EXPORT_ID" "$TARGET RESULTSEG_PSEUDO_BASENAME="resultsegment-pseudoanonymized" RESULTSEG_CLEARTEXT_BASENAME="resultsegment-cleartext" +RESULTSEG_CLEARTEXT_SUBSCRIBERS="resultsegment-cleartext-subscribers" RESULT_SEGMENT_PSEUDO_GS="$(gsExportCsvFilename "$EXPORT_ID" "$RESULTSEG_PSEUDO_BASENAME")" RESULT_SEGMENT_CLEAR_GS="$(gsExportCsvFilename "$EXPORT_ID" "$RESULTSEG_CLEARTEXT_BASENAME")" RESULT_SEGMENT_CLEAR="$(importedCsvFilename "$EXPORT_ID" "$TARGET_DIR" "$RESULTSEG_CLEARTEXT_BASENAME")" -RESULT_SEGMENT_SINGLE_COLUMN="$(importedCsvFilename "$EXPORT_ID" "$TARGET_DIR" "$RESULTSEG_CLEARTEXT_BASENAME")" +RESULT_SEGMENT_SINGLE_COLUMN="$(importedCsvFilename "$EXPORT_ID" "$TARGET_DIR" "$RESULTSEG_CLEARTEXT_SUBSCRIBERS")" # Copy the segment pseudo file to gs From 02d61d9ee22ebfc133a53fe590ec23d33652864d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B8rn=20Remseth?= Date: Thu, 4 Oct 2018 15:29:07 +0200 Subject: [PATCH 47/93] More notes --- sample-agent/notes.txt | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/sample-agent/notes.txt b/sample-agent/notes.txt index 7fde16d49..797cf902c 100644 --- a/sample-agent/notes.txt +++ b/sample-agent/notes.txt @@ -1,3 +1,24 @@ +INTRODUCTION +==== + + +This document contains two attempts at generating yaml representations +that would be useful to guide us through the tech due-dil demo. + +The steps ahead would be: + + 1. Determine which of these paths to take. + 2. Separate them into actual yaml files that dan be executed + using the apply-yaml.sh script + 3. Implement changes in prime so that the changes are actually done. + +The one thing missing in the descriptions below is how to reset the +simulation to its intial state. We'll fix that in a day or two +tand then we're basically done. + + + + TAKE ONE ******* From 8198d0cc5cc4af3beea6982b01f7e4806d438dc4 Mon Sep 17 00:00:00 2001 From: Vihang Patil Date: Sun, 7 Oct 2018 02:11:38 +0200 Subject: [PATCH 48/93] Notifications to Ops Slack channel --- .../ostelco/prime/storage/graph/Neo4jStore.kt | 14 ++- .../prime/notifications/OpsNotifications.kt | 6 ++ prime/build.gradle | 1 + prime/config/config.yaml | 93 ++++++++++--------- prime/config/test.yaml | 70 +++++++------- prime/infra/README.md | 6 ++ prime/infra/dev/prime.yaml | 7 ++ prime/infra/dev/slack-secrets.yaml | 7 ++ prime/infra/prod/prime.yaml | 7 ++ prime/infra/prod/slack-secrets.yaml | 7 ++ .../integration-tests/resources/config.yaml | 64 ++++++------- settings.gradle | 2 + slack/build.gradle | 39 ++++++++ .../kotlin/org/ostelco/prime/slack/Model.kt | 37 ++++++++ .../prime/slack/SlackAppenderFactory.kt | 47 ++++++++++ .../prime/slack/SlackIntegrationModule.kt | 60 ++++++++++++ .../prime/slack/SlackNotificationReporter.kt | 64 +++++++++++++ .../ostelco/prime/slack/SlackWebHookClient.kt | 27 ++++++ .../io.dropwizard.jackson.Discoverable | 1 + .../io.dropwizard.logging.AppenderFactory | 1 + .../org.ostelco.prime.module.PrimeModule | 1 + .../ostelco/prime/slack/SlackAppenderTest.kt | 55 +++++++++++ slack/src/test/resources/config.yaml | 15 +++ 23 files changed, 517 insertions(+), 114 deletions(-) create mode 100644 prime-modules/src/main/kotlin/org/ostelco/prime/notifications/OpsNotifications.kt create mode 100644 prime/infra/dev/slack-secrets.yaml create mode 100644 prime/infra/prod/slack-secrets.yaml create mode 100644 slack/build.gradle create mode 100644 slack/src/main/kotlin/org/ostelco/prime/slack/Model.kt create mode 100644 slack/src/main/kotlin/org/ostelco/prime/slack/SlackAppenderFactory.kt create mode 100644 slack/src/main/kotlin/org/ostelco/prime/slack/SlackIntegrationModule.kt create mode 100644 slack/src/main/kotlin/org/ostelco/prime/slack/SlackNotificationReporter.kt create mode 100644 slack/src/main/kotlin/org/ostelco/prime/slack/SlackWebHookClient.kt create mode 100644 slack/src/main/resources/META-INF/services/io.dropwizard.jackson.Discoverable create mode 100644 slack/src/main/resources/META-INF/services/io.dropwizard.logging.AppenderFactory create mode 100644 slack/src/main/resources/META-INF/services/org.ostelco.prime.module.PrimeModule create mode 100644 slack/src/test/kotlin/org/ostelco/prime/slack/SlackAppenderTest.kt create mode 100644 slack/src/test/resources/config.yaml diff --git a/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt b/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt index aef78f06d..3b2b1101f 100644 --- a/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt +++ b/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt @@ -15,6 +15,7 @@ import org.ostelco.prime.model.Segment import org.ostelco.prime.model.Subscriber import org.ostelco.prime.model.Subscription import org.ostelco.prime.module.getResource +import org.ostelco.prime.notifications.NOTIFY_OPS_MARKER import org.ostelco.prime.ocs.OcsAdminService import org.ostelco.prime.ocs.OcsSubscriberService import org.ostelco.prime.paymentprocessor.PaymentProcessor @@ -205,8 +206,12 @@ object Neo4jStoreSingleton : GraphStore { ocsAdminService.addBundle(Bundle(bundleId, 100_000_000)) Either.right(Unit) } - }.flatMap { subscriberToBundleStore.create(subscriber.id, bundleId, transaction) } - .ifFailedThenRollback(transaction) + }.flatMap { subscriberToBundleStore.create(subscriber.id, bundleId, transaction) + }.map { + if(subscriber.country.equals("sg", ignoreCase = true)) { + logger.info(NOTIFY_OPS_MARKER, "Created a new user with email: ${subscriber.email} for Singapore.\nProvision a SIM card for this user.") + } + }.ifFailedThenRollback(transaction) } // << END @@ -382,7 +387,6 @@ object Neo4jStoreSingleton : GraphStore { // then save the source if (!it.any { sourceDetailsInfo -> sourceDetailsInfo.id == sourceId }) { paymentProcessor.addSource(paymentCustomerId, sourceId) - // TODO payment: Should we remove the sourceId for saveCard == false even when captureCharge has failed? // For success case, saved source is removed after "capture charge" is saveCard == false. // Making sure same happens even for failure case by linking reversal action to transaction .finallyDo(transaction) { _ -> removePaymentSource(saveCard, paymentCustomerId, sourceId) } @@ -407,7 +411,7 @@ object Neo4jStoreSingleton : GraphStore { } .linkReversalActionToTransaction(transaction) { chargeId -> paymentProcessor.refundCharge(chargeId) - logger.error("failed to refund charge for paymentCustomerId $paymentCustomerId, chargeId $chargeId. Fix this in Stripe dashboard") + logger.error(NOTIFY_OPS_MARKER, "Failed to refund charge for paymentCustomerId $paymentCustomerId, chargeId $chargeId.\nFix this in Stripe dashboard.") } .map { chargeId -> Tuple3(product, paymentCustomerId, chargeId) } } @@ -447,7 +451,7 @@ object Neo4jStoreSingleton : GraphStore { paymentProcessor.captureCharge(chargeId, paymentCustomerId) .mapLeft { // TODO payment: retry capture charge - logger.error("Capture failed for paymentCustomerId $paymentCustomerId, chargeId $chargeId, Fix this in Stripe Dashboard") + logger.error(NOTIFY_OPS_MARKER, "Capture failed for paymentCustomerId $paymentCustomerId, chargeId $chargeId.\nFix this in Stripe Dashboard") } // Ignore failure to capture charge and always send Either.right() diff --git a/prime-modules/src/main/kotlin/org/ostelco/prime/notifications/OpsNotifications.kt b/prime-modules/src/main/kotlin/org/ostelco/prime/notifications/OpsNotifications.kt new file mode 100644 index 000000000..df020b539 --- /dev/null +++ b/prime-modules/src/main/kotlin/org/ostelco/prime/notifications/OpsNotifications.kt @@ -0,0 +1,6 @@ +package org.ostelco.prime.notifications + +import org.slf4j.Marker +import org.slf4j.MarkerFactory + +val NOTIFY_OPS_MARKER: Marker = MarkerFactory.getMarker("NOTIFY_OPERATIONS") \ No newline at end of file diff --git a/prime/build.gradle b/prime/build.gradle index 8226c1456..f51356c25 100644 --- a/prime/build.gradle +++ b/prime/build.gradle @@ -40,6 +40,7 @@ dependencies { runtimeOnly project(':app-notifier') runtimeOnly project(':payment-processor') runtimeOnly project(':analytics-module') + runtimeOnly project(':slack') implementation "io.dropwizard:dropwizard-http2:$dropwizardVersion" runtimeOnly "io.dropwizard:dropwizard-json-logging:$dropwizardVersion" diff --git a/prime/config/config.yaml b/prime/config/config.yaml index 7d24daf17..905e745f3 100644 --- a/prime/config/config.yaml +++ b/prime/config/config.yaml @@ -1,49 +1,57 @@ modules: - - type: firebase +- type: slack config: - configFile: /secret/pantel-prod.json - rootPath: ${FIREBASE_ROOT_PATH} - - type: neo4j - config: - host: ${NEO4J_HOST} - protocol: bolt+routing - - type: analytics - config: - projectId: pantel-2decb - dataTrafficTopicId: ${DATA_TRAFFIC_TOPIC} - purchaseInfoTopicId: ${PURCHASE_INFO_TOPIC} - activeUsersTopicId: ${ACTIVE_USERS_TOPIC} - - type: ocs - config: - lowBalanceThreshold: 100000000 - - type: pseudonymizer - config: - namespace: ${DATASTORE_NAMESPACE:-""} - - type: api - config: - authenticationCachePolicy: maximumSize=10000, expireAfterAccess=10m - jerseyClient: - timeout: 2s - - type: stripe-payment-processor - - type: firebase-app-notifier + notifications: + channel: ${SLACK_CHANNEL} + webHookUri: ${SLACK_WEBHOOK_URI} + httpClient: + timeout: 3s + connectionRequestTimeout: 1s +- type: firebase config: configFile: /secret/pantel-prod.json - - type: admin + rootPath: ${FIREBASE_ROOT_PATH} +- type: neo4j + config: + host: ${NEO4J_HOST} + protocol: bolt+routing +- type: analytics + config: + projectId: pantel-2decb + dataTrafficTopicId: ${DATA_TRAFFIC_TOPIC} + purchaseInfoTopicId: ${PURCHASE_INFO_TOPIC} + activeUsersTopicId: ${ACTIVE_USERS_TOPIC} +- type: ocs + config: + lowBalanceThreshold: 100000000 +- type: pseudonymizer + config: + namespace: ${DATASTORE_NAMESPACE:-""} +- type: api + config: + authenticationCachePolicy: maximumSize=10000, expireAfterAccess=10m + jerseyClient: + timeout: 2s +- type: stripe-payment-processor +- type: firebase-app-notifier + config: + configFile: /secret/pantel-prod.json +- type: admin server: applicationConnectors: - - type: h2c - port: 8080 - maxConcurrentStreams: 1024 - initialStreamRecvWindow: 65535 + - type: h2c + port: 8080 + maxConcurrentStreams: 1024 + initialStreamRecvWindow: 65535 requestLog: appenders: - - type: console - layout: - type: access-json - filterFactories: - - type: URI - uri: prometheus-metrics + - type: console + layout: + type: access-json + filterFactories: + - type: URI + uri: prometheus-metrics logging: level: INFO @@ -51,8 +59,9 @@ logging: org.ostelco: DEBUG org.dhatim.dropwizard.prometheus.DropwizardMetricsExporter: ERROR appenders: - - type: console - layout: - type: json - customFieldNames: - level: severity \ No newline at end of file + - type: slack + - type: console + layout: + type: json + customFieldNames: + level: severity \ No newline at end of file diff --git a/prime/config/test.yaml b/prime/config/test.yaml index 57ffe6db3..31e7c406d 100644 --- a/prime/config/test.yaml +++ b/prime/config/test.yaml @@ -1,44 +1,44 @@ # This config is used as config.yaml when prime is running using docker-compose for Acceptance Testing modules: - - type: firebase - config: - configFile: /secret/pantel-prod.json - rootPath: test - - type: neo4j - config: - host: neo4j - protocol: bolt - - type: analytics - config: - projectId: pantel-2decb - dataTrafficTopicId: data-traffic - purchaseInfoTopicId: purchase-info - activeUsersTopicId: active-users - - type: ocs - config: - lowBalanceThreshold: 0 - - type: pseudonymizer - config: - datastoreType: emulator - - type: api - config: - authenticationCachePolicy: maximumSize=10000, expireAfterAccess=10m - jerseyClient: - timeout: 3s - connectionRequestTimeout: 1s - - type: stripe-payment-processor - - type: firebase-app-notifier - config: - configFile: /secret/pantel-prod.json - - type: admin +- type: firebase + config: + configFile: /secret/pantel-prod.json + rootPath: test +- type: neo4j + config: + host: neo4j + protocol: bolt +- type: analytics + config: + projectId: pantel-2decb + dataTrafficTopicId: data-traffic + purchaseInfoTopicId: purchase-info + activeUsersTopicId: active-users +- type: ocs + config: + lowBalanceThreshold: 0 +- type: pseudonymizer + config: + datastoreType: emulator +- type: api + config: + authenticationCachePolicy: maximumSize=10000, expireAfterAccess=10m + jerseyClient: + timeout: 3s + connectionRequestTimeout: 1s +- type: stripe-payment-processor +- type: firebase-app-notifier + config: + configFile: /secret/pantel-prod.json +- type: admin server: applicationConnectors: - - type: h2c - port: 8080 - maxConcurrentStreams: 1024 - initialStreamRecvWindow: 65535 + - type: h2c + port: 8080 + maxConcurrentStreams: 1024 + initialStreamRecvWindow: 65535 logging: level: INFO diff --git a/prime/infra/README.md b/prime/infra/README.md index e1d3d696d..b5404ea10 100644 --- a/prime/infra/README.md +++ b/prime/infra/README.md @@ -204,10 +204,16 @@ gcloud container node-pools delete default-pool \ kubectl create secret generic pantel-prod.json --from-file prime/config/pantel-prod.json ``` +Note: To update the secrets defined using yaml, delete and created them again. They are not updated. + ```bash sed -e s/STRIPE_API_KEY/$(echo -n 'keep-stripe-api-key-here' | base64)/g prime/infra/dev/stripe-secrets.yaml | kubectl apply -f - ``` +```bash +sed -e s/SLACK_WEBHOOK_URI/$(echo -n 'https://hooks.slack.com/services/.../.../...' | base64)/g prime/infra/dev/slack-secrets.yaml | kubectl apply -f - +``` + ```bash kubectl create secret generic ocs-ostelco-ssl \ --from-file=certs/dev.ostelco.org/nginx.crt \ diff --git a/prime/infra/dev/prime.yaml b/prime/infra/dev/prime.yaml index 05a80140b..3d07dfae9 100644 --- a/prime/infra/dev/prime.yaml +++ b/prime/infra/dev/prime.yaml @@ -145,6 +145,13 @@ spec: image: eu.gcr.io/pantel-2decb/prime:PRIME_VERSION imagePullPolicy: Always env: + - name: SLACK_CHANNEL + value: prime-alerts + - name: SLACK_WEBHOOK_URI + valueFrom: + secretKeyRef: + name: slack-secrets + key: slackWebHookUri - name: NEO4J_HOST value: neo4j - name: DATASTORE_NAMESPACE diff --git a/prime/infra/dev/slack-secrets.yaml b/prime/infra/dev/slack-secrets.yaml new file mode 100644 index 000000000..025642bd1 --- /dev/null +++ b/prime/infra/dev/slack-secrets.yaml @@ -0,0 +1,7 @@ +apiVersion: v1 +kind: Secret +metadata: + name: slack-secrets +type: Opaque +data: + slackWebHookUri: SLACK_WEBHOOK_URI \ No newline at end of file diff --git a/prime/infra/prod/prime.yaml b/prime/infra/prod/prime.yaml index 54691ae8a..736054a2e 100644 --- a/prime/infra/prod/prime.yaml +++ b/prime/infra/prod/prime.yaml @@ -145,6 +145,13 @@ spec: image: eu.gcr.io/pantel-2decb/prime:PRIME_VERSION imagePullPolicy: Always env: + - name: SLACK_CHANNEL + value: prime-alerts + - name: SLACK_WEBHOOK_URI + valueFrom: + secretKeyRef: + name: slack-secrets + key: slackWebHookUri - name: NEO4J_HOST value: neo4j - name: FIREBASE_ROOT_PATH diff --git a/prime/infra/prod/slack-secrets.yaml b/prime/infra/prod/slack-secrets.yaml new file mode 100644 index 000000000..025642bd1 --- /dev/null +++ b/prime/infra/prod/slack-secrets.yaml @@ -0,0 +1,7 @@ +apiVersion: v1 +kind: Secret +metadata: + name: slack-secrets +type: Opaque +data: + slackWebHookUri: SLACK_WEBHOOK_URI \ No newline at end of file diff --git a/prime/src/integration-tests/resources/config.yaml b/prime/src/integration-tests/resources/config.yaml index dce9318e8..cd659ed6d 100644 --- a/prime/src/integration-tests/resources/config.yaml +++ b/prime/src/integration-tests/resources/config.yaml @@ -1,39 +1,39 @@ modules: - - type: firebase - config: - configFile: config/pantel-prod.json - rootPath: test - - type: neo4j - config: - host: 0.0.0.0 - protocol: bolt - - type: analytics - config: - projectId: pantel-2decb - dataTrafficTopicId: data-traffic - purchaseInfoTopicId: purchase-info - activeUsersTopicId: active-users - - type: ocs - config: - lowBalanceThreshold: 0 - - type: pseudonymizer - - type: api - config: - authenticationCachePolicy: maximumSize=10000, expireAfterAccess=10m - jerseyClient: - timeout: 3s - - type: stripe-payment-processor - - type: firebase-app-notifier - config: - configFile: config/pantel-prod.json - - type: admin +- type: firebase + config: + configFile: config/pantel-prod.json + rootPath: test +- type: neo4j + config: + host: 0.0.0.0 + protocol: bolt +- type: analytics + config: + projectId: pantel-2decb + dataTrafficTopicId: data-traffic + purchaseInfoTopicId: purchase-info + activeUsersTopicId: active-users +- type: ocs + config: + lowBalanceThreshold: 0 +- type: pseudonymizer +- type: api + config: + authenticationCachePolicy: maximumSize=10000, expireAfterAccess=10m + jerseyClient: + timeout: 3s +- type: stripe-payment-processor +- type: firebase-app-notifier + config: + configFile: config/pantel-prod.json +- type: admin server: applicationConnectors: - - type: h2c - port: 8080 - maxConcurrentStreams: 1024 - initialStreamRecvWindow: 65535 + - type: h2c + port: 8080 + maxConcurrentStreams: 1024 + initialStreamRecvWindow: 65535 logging: level: INFO diff --git a/settings.gradle b/settings.gradle index 50b942e94..fbd8d4601 100644 --- a/settings.gradle +++ b/settings.gradle @@ -26,6 +26,7 @@ include ':prime' include ':prime-modules' include ':prime-client-api' include ':pseudonym-server' +include ':slack' project(':acceptance-tests').projectDir = "$rootDir/acceptance-tests" as File @@ -54,3 +55,4 @@ project(':prime').projectDir = "$rootDir/prime" as File project(':prime-modules').projectDir = "$rootDir/prime-modules" as File project(':prime-client-api').projectDir = "$rootDir/prime-client-api" as File project(':pseudonym-server').projectDir = "$rootDir/pseudonym-server" as File +project(':slack').projectDir = "$rootDir/slack" as File diff --git a/slack/build.gradle b/slack/build.gradle new file mode 100644 index 000000000..acb215be8 --- /dev/null +++ b/slack/build.gradle @@ -0,0 +1,39 @@ +plugins { + id "org.jetbrains.kotlin.jvm" version "1.2.71" + id "java-library" +} + +dependencies { + implementation project(":prime-modules") + + implementation "io.dropwizard:dropwizard-client:$dropwizardVersion" + + testImplementation 'javax.xml.bind:jaxb-api:2.3.0' + testImplementation 'javax.activation:activation:1.1.1' + + testImplementation "io.dropwizard:dropwizard-testing:$dropwizardVersion" + + testImplementation "org.junit.jupiter:junit-jupiter-api:$junit5Version" + testRuntimeOnly "org.junit.jupiter:junit-jupiter-engine:$junit5Version" +} + +test { + + if (project.hasProperty("slackWebHookUri")) { + environment("SLACK_WEBHOOK_URI", slackWebHookUri) + } + + if (project.hasProperty("slackChannel")) { + environment("SLACK_CHANNEL", slackChannel) + } + + + // native support to Junit5 in Gradle 4.6+ + useJUnitPlatform { + includeEngines 'junit-jupiter' + } + testLogging { + exceptionFormat = 'full' + events "PASSED", "FAILED", "SKIPPED" + } +} \ No newline at end of file diff --git a/slack/src/main/kotlin/org/ostelco/prime/slack/Model.kt b/slack/src/main/kotlin/org/ostelco/prime/slack/Model.kt new file mode 100644 index 000000000..1a25edc5e --- /dev/null +++ b/slack/src/main/kotlin/org/ostelco/prime/slack/Model.kt @@ -0,0 +1,37 @@ +package org.ostelco.prime.slack + +import com.fasterxml.jackson.annotation.JsonProperty + +data class Message( + val channel: String, + @JsonProperty("username") val userName: String? = null, + val text: String ?= null, + @JsonProperty("icon_emoji") val iconEmoji: String ?= null, + val attachments: List = emptyList()) { + + fun format(): Message = this.copy( + channel = "#$channel", + iconEmoji = iconEmoji?.let { ":$it:" }) +} + +data class Attachment( + val fallback: String, + val color: String? = null, + val pretext: String? = null, + @JsonProperty("author_name") val authorName: String, + @JsonProperty("author_link") val authorLink: String? = null, + @JsonProperty("author_icon") val authorIcon: String? = null, + val title: String, + @JsonProperty("title_link") val titleLink: String? = null, + val text: String, + val fields: List = emptyList(), + @JsonProperty("image_url") val imageUrl: String? = null, + @JsonProperty("thumb_url") val thumbUrl: String? = null, + val footer: String? = null, + @JsonProperty("footer_icon") val footerIcon: String? = null, + @JsonProperty("ts") val timestampEpochSeconds: Long) + +data class Field( + val title: String, + val value: String, + val short: Boolean) \ No newline at end of file diff --git a/slack/src/main/kotlin/org/ostelco/prime/slack/SlackAppenderFactory.kt b/slack/src/main/kotlin/org/ostelco/prime/slack/SlackAppenderFactory.kt new file mode 100644 index 000000000..cfdf40c8e --- /dev/null +++ b/slack/src/main/kotlin/org/ostelco/prime/slack/SlackAppenderFactory.kt @@ -0,0 +1,47 @@ +package org.ostelco.prime.slack + +import ch.qos.logback.classic.LoggerContext +import ch.qos.logback.classic.spi.ILoggingEvent +import ch.qos.logback.core.Appender +import ch.qos.logback.core.AppenderBase +import com.fasterxml.jackson.annotation.JsonTypeName +import io.dropwizard.logging.AbstractAppenderFactory +import io.dropwizard.logging.async.AsyncAppenderFactory +import io.dropwizard.logging.filter.LevelFilterFactory +import io.dropwizard.logging.layout.LayoutFactory +import org.ostelco.prime.notifications.NOTIFY_OPS_MARKER +import org.slf4j.event.Level + + +@JsonTypeName("slack") +class SlackAppenderFactory : AbstractAppenderFactory() { + + override fun build( + context: LoggerContext?, + applicationName: String?, + layoutFactory: LayoutFactory?, + levelFilterFactory: LevelFilterFactory?, + asyncAppenderFactory: AsyncAppenderFactory?): Appender { + + val appender = SlackAppender() + appender.name = "slack-appender" + appender.context = context + appender.addFilter(levelFilterFactory?.build(threshold)) + filterFactories.forEach { f -> appender.addFilter(f.build()) } + appender.start() + return wrapAsync(appender, asyncAppenderFactory) + } +} + +class SlackAppender : AppenderBase() { + + override fun append(eventObject: ILoggingEvent?) { + if (eventObject != null) { + if (eventObject.marker == NOTIFY_OPS_MARKER) { + SlackNotificationReporter.notifyEvent( + level = Level.valueOf(eventObject.level.levelStr), + message = eventObject.message) + } + } + } +} \ No newline at end of file diff --git a/slack/src/main/kotlin/org/ostelco/prime/slack/SlackIntegrationModule.kt b/slack/src/main/kotlin/org/ostelco/prime/slack/SlackIntegrationModule.kt new file mode 100644 index 000000000..368016880 --- /dev/null +++ b/slack/src/main/kotlin/org/ostelco/prime/slack/SlackIntegrationModule.kt @@ -0,0 +1,60 @@ +package org.ostelco.prime.slack + +import com.fasterxml.jackson.annotation.JsonProperty +import com.fasterxml.jackson.annotation.JsonTypeName +import io.dropwizard.client.HttpClientBuilder +import io.dropwizard.client.HttpClientConfiguration +import io.dropwizard.setup.Environment +import org.ostelco.prime.module.PrimeModule + +@JsonTypeName("slack") +class SlackIntegrationModule : PrimeModule { + + @JsonProperty + var config: Config? = null + + override fun init(env: Environment) { + + config?.notificationsConfig?.apply { + + val httpClient = HttpClientBuilder(env) + .using(this.httpClientConfiguration) + .build("slack"); + + Registry.slackWebHookClient = SlackWebHookClient( + webHookUri = this.webHookUri, + httpClient = httpClient) + + Registry.channel = this.channel + Registry.userName = this.userName + Registry.isInitialized = true + } + } +} + +object Registry { + var isInitialized = false + lateinit var slackWebHookClient: SlackWebHookClient + lateinit var channel: String + lateinit var userName: String +} + +class Config { + @JsonProperty("notifications") + lateinit var notificationsConfig: NotificationsConfig +} + +class NotificationsConfig { + + @JsonProperty + lateinit var webHookUri: String + + @JsonProperty("httpClient") + var httpClientConfiguration = HttpClientConfiguration() + + @JsonProperty + var channel: String = "general" + + @JsonProperty + var userName: String = "prime" +} \ No newline at end of file diff --git a/slack/src/main/kotlin/org/ostelco/prime/slack/SlackNotificationReporter.kt b/slack/src/main/kotlin/org/ostelco/prime/slack/SlackNotificationReporter.kt new file mode 100644 index 000000000..893d9ad8f --- /dev/null +++ b/slack/src/main/kotlin/org/ostelco/prime/slack/SlackNotificationReporter.kt @@ -0,0 +1,64 @@ +package org.ostelco.prime.slack + +import org.ostelco.prime.getLogger +import org.ostelco.prime.jsonmapper.asJson +import org.slf4j.event.Level +import org.slf4j.event.Level.DEBUG +import org.slf4j.event.Level.ERROR +import org.slf4j.event.Level.INFO +import org.slf4j.event.Level.TRACE +import org.slf4j.event.Level.WARN +import java.time.Instant + +object SlackNotificationReporter { + + private val logger by getLogger() + + fun notifyEvent(level: Level, message: String) { + if (Registry.isInitialized) { + val body = asJson( + Message( + channel = Registry.channel, + // text = message, + // userName = Registry.userName, + iconEmoji = levelToEmoji(level), + attachments = listOf( + Attachment( + fallback = message, + color = levelToColor(level), + authorName = Registry.userName, + title = levelToTitle(level), + text = message, + timestampEpochSeconds = Instant.now().epochSecond)) + ).format() + ) + // logger.info(body) // for debugging only + Registry.slackWebHookClient.post(body) + } + } + + private fun levelToEmoji(level: Level): String = when (level) { + ERROR -> "fire" + WARN -> "warning" + INFO -> "information_source" + DEBUG -> "robot_face" + TRACE -> "mag" + } + + private fun levelToColor(level: Level): String = when (level) { + ERROR -> "danger" + WARN -> "warning" + INFO -> "good" + DEBUG -> "#0080FF" + TRACE -> "#C0C0C0" + } + + private fun levelToTitle(level: Level): String = when (level) { + ERROR -> "Error" + WARN -> "Warning" + INFO -> "Info" + DEBUG -> "Debug" + TRACE -> "Trace" + } +} + diff --git a/slack/src/main/kotlin/org/ostelco/prime/slack/SlackWebHookClient.kt b/slack/src/main/kotlin/org/ostelco/prime/slack/SlackWebHookClient.kt new file mode 100644 index 000000000..879538bbb --- /dev/null +++ b/slack/src/main/kotlin/org/ostelco/prime/slack/SlackWebHookClient.kt @@ -0,0 +1,27 @@ +package org.ostelco.prime.slack + +import org.apache.http.client.entity.EntityBuilder +import org.apache.http.client.methods.HttpPost +import org.apache.http.impl.client.CloseableHttpClient +import org.apache.http.util.EntityUtils +import org.ostelco.prime.getLogger + +/** + * Simple HttpClient for Slack + */ +class SlackWebHookClient( + private val webHookUri: String, + private val httpClient: CloseableHttpClient) { + + private val logger by getLogger() + + fun post(body: String) { + val entity = EntityBuilder.create().apply { this.text = body }.build() + val request = HttpPost(webHookUri).apply { this.entity = entity } + val response = httpClient.execute(request) + val responseText = EntityUtils.toString(response.entity) + if (responseText != "ok") { + logger.error("Failed to send messages to slack. Reason: {}", responseText) + } + } +} \ No newline at end of file diff --git a/slack/src/main/resources/META-INF/services/io.dropwizard.jackson.Discoverable b/slack/src/main/resources/META-INF/services/io.dropwizard.jackson.Discoverable new file mode 100644 index 000000000..8056fe23b --- /dev/null +++ b/slack/src/main/resources/META-INF/services/io.dropwizard.jackson.Discoverable @@ -0,0 +1 @@ +org.ostelco.prime.module.PrimeModule \ No newline at end of file diff --git a/slack/src/main/resources/META-INF/services/io.dropwizard.logging.AppenderFactory b/slack/src/main/resources/META-INF/services/io.dropwizard.logging.AppenderFactory new file mode 100644 index 000000000..6d45f7550 --- /dev/null +++ b/slack/src/main/resources/META-INF/services/io.dropwizard.logging.AppenderFactory @@ -0,0 +1 @@ +org.ostelco.prime.slack.SlackAppenderFactory \ No newline at end of file diff --git a/slack/src/main/resources/META-INF/services/org.ostelco.prime.module.PrimeModule b/slack/src/main/resources/META-INF/services/org.ostelco.prime.module.PrimeModule new file mode 100644 index 000000000..dae5a2822 --- /dev/null +++ b/slack/src/main/resources/META-INF/services/org.ostelco.prime.module.PrimeModule @@ -0,0 +1 @@ +org.ostelco.prime.slack.SlackIntegrationModule \ No newline at end of file diff --git a/slack/src/test/kotlin/org/ostelco/prime/slack/SlackAppenderTest.kt b/slack/src/test/kotlin/org/ostelco/prime/slack/SlackAppenderTest.kt new file mode 100644 index 000000000..95352d86e --- /dev/null +++ b/slack/src/test/kotlin/org/ostelco/prime/slack/SlackAppenderTest.kt @@ -0,0 +1,55 @@ +package org.ostelco.prime.slack + +import com.fasterxml.jackson.annotation.JsonProperty +import io.dropwizard.Application +import io.dropwizard.Configuration +import io.dropwizard.configuration.EnvironmentVariableSubstitutor +import io.dropwizard.configuration.SubstitutingSourceProvider +import io.dropwizard.setup.Bootstrap +import io.dropwizard.setup.Environment +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable +import org.ostelco.prime.getLogger +import org.ostelco.prime.module.PrimeModule +import org.ostelco.prime.notifications.NOTIFY_OPS_MARKER + + +class TestApp : Application() { + + override fun initialize(bootstrap: Bootstrap) { + bootstrap.configurationSourceProvider = SubstitutingSourceProvider( + bootstrap.configurationSourceProvider, + EnvironmentVariableSubstitutor(false)) + } + + override fun run(configuration: TestConfig, environment: Environment) { + configuration.modules.forEach { it.init(environment) } + } +} + +class TestConfig: Configuration() { + + @JsonProperty + lateinit var modules: List +} + +class SlackAppenderTest { + + private val logger by getLogger() + + @EnabledIfEnvironmentVariable(named = "SLACK_WEBHOOK_URI", matches = "https://hooks.slack.com/services/.*") + @Test + fun testSlackLogging() { + + TestApp().run("server", "src/test/resources/config.yaml") + + Thread.sleep(3000) + + logger.debug(NOTIFY_OPS_MARKER, "Some debug message text") + logger.info(NOTIFY_OPS_MARKER, "Some Info message text") + logger.warn(NOTIFY_OPS_MARKER, "Some Warning message text") + logger.error(NOTIFY_OPS_MARKER, "Some Error message text") + + Thread.sleep(7000) + } +} \ No newline at end of file diff --git a/slack/src/test/resources/config.yaml b/slack/src/test/resources/config.yaml new file mode 100644 index 000000000..eb8b0f61a --- /dev/null +++ b/slack/src/test/resources/config.yaml @@ -0,0 +1,15 @@ +modules: +- type: slack + config: + notifications: + channel: ${SLACK_CHANNEL} + webHookUri: ${SLACK_WEBHOOK_URI} + httpClient: + timeout: 3s + connectionRequestTimeout: 1s + +logging: + level: INFO + appenders: + - type: console + - type: slack \ No newline at end of file From 3e07adc8ac1d0853906e51ae7c0f9bbefa372692 Mon Sep 17 00:00:00 2001 From: Vihang Patil Date: Sun, 7 Oct 2018 16:42:42 +0200 Subject: [PATCH 49/93] Fix indentation in prime config --- prime/config/config.yaml | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/prime/config/config.yaml b/prime/config/config.yaml index 905e745f3..d26cd9028 100644 --- a/prime/config/config.yaml +++ b/prime/config/config.yaml @@ -1,16 +1,16 @@ modules: - type: slack - config: - notifications: - channel: ${SLACK_CHANNEL} - webHookUri: ${SLACK_WEBHOOK_URI} - httpClient: - timeout: 3s - connectionRequestTimeout: 1s + config: + notifications: + channel: ${SLACK_CHANNEL} + webHookUri: ${SLACK_WEBHOOK_URI} + httpClient: + timeout: 3s + connectionRequestTimeout: 1s - type: firebase - config: - configFile: /secret/pantel-prod.json - rootPath: ${FIREBASE_ROOT_PATH} + config: + configFile: /secret/pantel-prod.json + rootPath: ${FIREBASE_ROOT_PATH} - type: neo4j config: host: ${NEO4J_HOST} From 033144382bb190d02af0530d76fe0837e5933007 Mon Sep 17 00:00:00 2001 From: Vihang Patil Date: Sun, 7 Oct 2018 21:09:12 +0200 Subject: [PATCH 50/93] Corrected documentation to match new module names --- README.md | 10 +++++++-- docs/MODULES.md | 28 ++++++++++++++----------- docs/prime-dependencies/dependency.puml | 8 +++---- docs/prime-dependencies/modules.puml | 12 +++++------ docs/prime-dependencies/ocs.puml | 12 +++++------ firebase-extensions/build.gradle | 2 +- ocs-grpc-api/README.md | 28 +++++++++++++++---------- ostelco-lib/build.gradle | 2 +- prime/infra/README.md | 2 +- 9 files changed, 60 insertions(+), 44 deletions(-) diff --git a/README.md b/README.md index 5ba31bb3c..a069841c2 100644 --- a/README.md +++ b/README.md @@ -16,16 +16,22 @@ Mono Repository for core protocols and services around a OCS/BSS for packet data * [The big picture (diagram) of current work-flow](https://github.com/ostelco/ostelco-docs/blob/master/the-current-work-flow.md) * [General Documentation](./docs/README.md) - * [analytics](./analytics/README.md) * [admin-api](./admin-api/README.md) * [auth-server](./auth-server/README.md) + * [bq-metrics-extractor](./bq-metrics-extractor/README.md) * [client-api](./client-api/README.md) + * [dataflow-pipelines](./dataflow-pipelines/README.md) * [diameter-stack](./diameter-stack/README.md) * [diameter-test](./diameter-test/README.md) * [exporter](./exporter/README.md) - * [ocs-api](./ocs-api/README.md) + * [neo4j-store](./neo4j-store/README.md) + * [ocs-grpc-api](./ocs-grpc-api/README.md) * [ocsgw](./ocsgw/README.md) * [ostelco-lib](./ostelco-lib/README.md) + * [payment-processor](./payment-processor/README.md) + * [prime-client-api](./prime-client-api/README.md) * [prime](./prime/README.md) + * [infra](./prime/infra/README.md) * [pseudonym-server](./pseudonym-server/README.md) * [seagull](./seagull/README.md) + * [neo4j-admin-tools](./tools/neo4j-admin-tools/README.md) diff --git a/docs/MODULES.md b/docs/MODULES.md index f2e5beb7e..feb8f265e 100644 --- a/docs/MODULES.md +++ b/docs/MODULES.md @@ -17,22 +17,26 @@ - `prime` acts has a single deployable unit. - But, `prime` has minimal boilerplate code needed for it to act as an aggregator. - All the `functions` in `prime` are moved to separate libraries. - - `prime-api` is an library which acts as a **bridge** between `prime` and all the modules. + - `prime-modules` is an library which acts as a **bridge** between `prime` and all the modules. - Modules are of different types: - - Modules which are need access to Dropwizard's environment or configuration, which is provided via `prime-api`. - - Modules which implement an interface, which is defined in `prime-api`. + - Modules which are need access to Dropwizard's environment or configuration, which is provided via `prime-modules`. + - Modules which implement an interface, which is defined in `prime-modules`. ### Dependency -```text -[prime] --(compile-time dependency)--> [prime-api] <--(compile-time dependency)-- [Component] <--(runtime dependency) - \ ^ \ / - \__________________________(runtime dependency)________________________________/ \_________/ - +```text ++-------+ +---------------+ +-----------+ +| | | | | | +| PRIME +--(compile-time dependency)-->| prime-modules |<--(compile-time-dependency)--+ Component |<-----------+ +| | | | | | | ++---+---+ +---------------+ +-----+---+-+ | + | ^ | (runtime dependency) + | | | | + +--------------------------------(runtime dependency)-----------------------------------+ +--------------+ ``` ### Implementation - - New module library will have `prime-api` as `implementation` dependency (which is `compile` dependency in gradle). + - New module library will have `prime-modules` as `implementation` dependency (which is `compile` dependency in gradle). - Add the new module library as `runtimeOnly` dependency in `prime`. ##### Modules needing Dropwizard environment or configuration @@ -44,15 +48,15 @@ - File named `org.ostelco.prime.module.PrimeModule` which contains name of class (including package name) which implements `org.ostelco.prime.module.PrimeModule`. ##### Modules implementing an interface - - These components act as a **provider** for a **service** defined by an `interface` in `prime-api`. + - These components act as a **provider** for a **service** defined by an `interface` in `prime-modules`. - Other components **consume service provided** by these components. - - Implement the `interface` defined in `prime-api`. + - Implement the `interface` defined in `prime-modules`. - The implementing class should have a `public no-arg constructor`. - Add a file in `src/main/resources/META-INF/services`: - Name of the file should be name of interface including package name. - File should contain 1 line - name of the class (including package name) which implements the interface. - Care should be taken that there is only one such implementing class. - - The object of implementing class can then be injected using `getResource()` defined in `ResourceRegistry.kt` in `prime-api` as: + - The object of implementing class can then be injected using `getResource()` defined in `ResourceRegistry.kt` in `prime-modules` as: ```kotlin private val instance: InterfaceName = getResource() diff --git a/docs/prime-dependencies/dependency.puml b/docs/prime-dependencies/dependency.puml index bec770096..fc66e5b61 100644 --- a/docs/prime-dependencies/dependency.puml +++ b/docs/prime-dependencies/dependency.puml @@ -8,15 +8,15 @@ note left of prime other via disruptor api. end note -interface "prime-api" as primeApi -[prime] --> primeApi : <> +interface "prime-modules" as primeModules +[prime] --> primeModules : <> -note left of primeApi +note left of primeModules Code needed by modules for them to be aggregated into prime. end note -[module(s)] --> primeApi : <> +[module(s)] --> primeModules : <> [module(s)] ..> [module(s)] : <> [prime] ..> [module(s)] : <> diff --git a/docs/prime-dependencies/modules.puml b/docs/prime-dependencies/modules.puml index fd22ceb4d..bea7bc796 100644 --- a/docs/prime-dependencies/modules.puml +++ b/docs/prime-dependencies/modules.puml @@ -7,22 +7,22 @@ note left of prime deployable component. end note -interface "prime-api" as primeApi -[prime] --> primeApi +interface "prime-modules" as primeModules +[prime] --> primeModules -note left of primeApi +note left of primeModules Code needed by modules for themto be aggregated into prime. end note -note right of primeApi +note right of primeModules Interfaces which are implemented by the modules. end note -[module(s)] ..> primeApi : <> -[module(s)] ..> primeApi : <> +[module(s)] ..> primeModules : <> +[module(s)] ..> primeModules : <> @enduml \ No newline at end of file diff --git a/docs/prime-dependencies/ocs.puml b/docs/prime-dependencies/ocs.puml index b4e5775d2..d5350da35 100644 --- a/docs/prime-dependencies/ocs.puml +++ b/docs/prime-dependencies/ocs.puml @@ -7,27 +7,27 @@ note left of prime deployable component. end note -interface "prime-api" as primeApi -[prime] --> primeApi : <> +interface "prime-modules" as primeModules +[prime] --> primeModules : <> -interface "ocs-api" as ocsApi +interface "ocs-grpc-api" as ocsApi note left of ocsApi: gRPC interface for OCS [ocs] note right of ocs Online Charging System. - Implements ocs-api + Implements ocs-grpc-api for consumption. end note -primeApi ..> ocsApi : <> +primeModules ..> ocsApi : <> note left of primeApi Code needed by modules for them to be aggregated into prime. end note -[ocs] ..> primeApi : <> +[ocs] ..> primeModules : <> [ocs] ..> ocsApi : <> @enduml \ No newline at end of file diff --git a/firebase-extensions/build.gradle b/firebase-extensions/build.gradle index 8a46b088a..962e31d50 100644 --- a/firebase-extensions/build.gradle +++ b/firebase-extensions/build.gradle @@ -5,6 +5,6 @@ plugins { dependencies { implementation project(":prime-modules") - // Match netty via ocs-api + // Match netty via ocs-grpc-api api "com.google.firebase:firebase-admin:$firebaseVersion" } \ No newline at end of file diff --git a/ocs-grpc-api/README.md b/ocs-grpc-api/README.md index 0c2d6e582..be57b16b9 100644 --- a/ocs-grpc-api/README.md +++ b/ocs-grpc-api/README.md @@ -5,20 +5,26 @@ This is a translation from the DIAMETER Credit-Control-Request [RFC 4006](https://tools.ietf.org/html/rfc4006#page-9) to gRPC. Not all elements in the Credit-Control-Request is translated. Only the one we are currently using. * CreditControlRequest - - CreditControlRequestInfo ( CreditControlRequestType, String requestId, String msisdn, String imsi, MultipleServiceCreditControl[], ServiceInfo serviceInformation) - - => +``` +=> CreditControlRequestInfo( + CreditControlRequestType, + String requestId, + String msisdn, + String imsi, + MultipleServiceCreditControl[], + ServiceInfo serviceInformation) + +<= CreditControlAnswerInfo( + String requestId, + String msisdn, + MultipleServiceCreditControl[] mscc) +``` - CreditControlAnswerInfo ( String requestId, String msisdn, MultipleServiceCreditControl[] mscc) - - - **From OCS to P-GW** - * Activate - - activate (String msisdn) +``` +<= activate (String msisdn) +``` diff --git a/ostelco-lib/build.gradle b/ostelco-lib/build.gradle index a06c315b2..d492cff66 100644 --- a/ostelco-lib/build.gradle +++ b/ostelco-lib/build.gradle @@ -6,7 +6,7 @@ plugins { dependencies { implementation "io.dropwizard:dropwizard-core:$dropwizardVersion" implementation "io.dropwizard:dropwizard-auth:$dropwizardVersion" - // Match netty via ocs-api + // Match netty via ocs-grpc-api implementation "com.google.firebase:firebase-admin:$firebaseVersion" implementation 'com.lmax:disruptor:3.4.2' implementation "com.google.guava:guava:$guavaVersion" diff --git a/prime/infra/README.md b/prime/infra/README.md index b5404ea10..fc7496ff0 100644 --- a/prime/infra/README.md +++ b/prime/infra/README.md @@ -249,7 +249,7 @@ pip install grpcio grpcio-tools python -m grpc_tools.protoc \ --include_imports \ --include_source_info \ - --proto_path=ocs-api/src/main/proto \ + --proto_path=ocs-grpc-api/src/main/proto \ --descriptor_set_out=ocs_descriptor.pb \ ocs.proto From f88a75f7529f7e269d2047da275f0b3c2af51c60 Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Mon, 8 Oct 2018 10:08:47 +0200 Subject: [PATCH 51/93] Add readme for raw_purchases pipelines --- prime/infra/README.md | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/prime/infra/README.md b/prime/infra/README.md index fc7496ff0..157132c07 100644 --- a/prime/infra/README.md +++ b/prime/infra/README.md @@ -371,4 +371,26 @@ gcloud dataflow jobs run active-users \ inputTopic=projects/pantel-2decb/topics/active-users,\ outputTableSpec=pantel-2decb:ocs_gateway.raw_activeusers +``` + +## Deploy dataflow pipeline for raw_purchases + +```bash +# For dev cluster +gcloud dataflow jobs run purchase-records-dev \ + --gcs-location gs://dataflow-templates/latest/PubSub_to_BigQuery \ + --region europe-west1 \ + --parameters \ +inputTopic=projects/pantel-2decb/topics/purchase-info-dev,\ +outputTableSpec=pantel-2decb:purchases_dev.raw_purchases + + +# For production cluster +gcloud dataflow jobs run purchase-records \ + --gcs-location gs://dataflow-templates/latest/PubSub_to_BigQuery \ + --region europe-west1 \ + --parameters \ +inputTopic=projects/pantel-2decb/topics/purchase-info,\ +outputTableSpec=pantel-2decb:purchases.raw_purchases + ``` \ No newline at end of file From 743d77d616d2008892e1d0439e7fd044c5f1c130 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B8rn=20Remseth?= Date: Mon, 8 Oct 2018 11:02:07 +0200 Subject: [PATCH 52/93] First stab at demo scripts. --- sample-agent/demo-scripts/init1.yml | 21 +++++++++++++++++++++ sample-agent/demo-scripts/init2.yml | 17 +++++++++++++++++ sample-agent/demo-scripts/init3.yml | 17 +++++++++++++++++ sample-agent/demo-scripts/reset.yml | 9 +++++++++ sample-agent/demo-scripts/step1.yml | 12 ++++++++++++ sample-agent/demo-scripts/step2.yml | 11 +++++++++++ 6 files changed, 87 insertions(+) create mode 100644 sample-agent/demo-scripts/init1.yml create mode 100644 sample-agent/demo-scripts/init2.yml create mode 100644 sample-agent/demo-scripts/init3.yml create mode 100644 sample-agent/demo-scripts/reset.yml create mode 100644 sample-agent/demo-scripts/step1.yml create mode 100644 sample-agent/demo-scripts/step2.yml diff --git a/sample-agent/demo-scripts/init1.yml b/sample-agent/demo-scripts/init1.yml new file mode 100644 index 000000000..6165b5ec0 --- /dev/null +++ b/sample-agent/demo-scripts/init1.yml @@ -0,0 +1,21 @@ +createOffer: + id: demoOffer1 + createProducts: + - sku: 1GB_200NOK + price: + amount: 200 + currency: NOK + properties: + noOfBytes: 1_000_000_000 + presentation: + isDefault: true + offerLabel: Top Up + priceLabel: 200 NOK + createSegments: + - id: demoSegment1 + subscribers: + - foo@baz.com + - bar@baz.com + + + diff --git a/sample-agent/demo-scripts/init2.yml b/sample-agent/demo-scripts/init2.yml new file mode 100644 index 000000000..da27c8148 --- /dev/null +++ b/sample-agent/demo-scripts/init2.yml @@ -0,0 +1,17 @@ +createOffer: + id: demoOffer2 + createProducts: + - sku: 2GB_200NOK + price: + amount: 200 + currency: NOK + properties: + noOfBytes: 2_000_000_000 + presentation: + isDefault: true + offerLabel: Top Up + priceLabel: 200 NOK + createSegments: + - id: demoSegment2 + + diff --git a/sample-agent/demo-scripts/init3.yml b/sample-agent/demo-scripts/init3.yml new file mode 100644 index 000000000..9157fe876 --- /dev/null +++ b/sample-agent/demo-scripts/init3.yml @@ -0,0 +1,17 @@ +createOffer: + id: demoOffer3 + createProducts: + - sku: 1GB_50NOK + price: + amount: 50 + currency: NOK + properties: + noOfBytes: 1_000_000_000 + presentation: + isDefault: true + offerLabel: Special offer + priceLabel: 50 NOK + createSegments: + - id: demoSegment3 + subscribers: + - bar@baz.com diff --git a/sample-agent/demo-scripts/reset.yml b/sample-agent/demo-scripts/reset.yml new file mode 100644 index 000000000..99f5f1c5e --- /dev/null +++ b/sample-agent/demo-scripts/reset.yml @@ -0,0 +1,9 @@ +updateSegments: + - id: s1 + subscribers: + - u1 + - u2 + - id: s2 + subscribers: + - id: s3 + subscribers: diff --git a/sample-agent/demo-scripts/step1.yml b/sample-agent/demo-scripts/step1.yml new file mode 100644 index 000000000..2506fea00 --- /dev/null +++ b/sample-agent/demo-scripts/step1.yml @@ -0,0 +1,12 @@ +updateSegments: + - id: s1 + subscribers: + - u1 + - id: s2 + subscribers: + - u2 + - id: s3 + subscribers: + + + diff --git a/sample-agent/demo-scripts/step2.yml b/sample-agent/demo-scripts/step2.yml new file mode 100644 index 000000000..846882a1a --- /dev/null +++ b/sample-agent/demo-scripts/step2.yml @@ -0,0 +1,11 @@ +updateSegments: + - id: s1 + subscribers: + - u1 + - id: s2 + subscribers: + - u2 + - id: s3 + subscribers: + - u2 + From b817b0f96b0c4d9281f0873c6dbed1cf61bdc01c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B8rn=20Remseth?= Date: Mon, 8 Oct 2018 11:20:17 +0200 Subject: [PATCH 53/93] Adding script to generate test scripts --- sample-agent/generate-test-scripts.sh | 118 ++++++++++++++++++++++++++ 1 file changed, 118 insertions(+) create mode 100644 sample-agent/generate-test-scripts.sh diff --git a/sample-agent/generate-test-scripts.sh b/sample-agent/generate-test-scripts.sh new file mode 100644 index 000000000..7072bcb20 --- /dev/null +++ b/sample-agent/generate-test-scripts.sh @@ -0,0 +1,118 @@ +#!/bin/bash + +if [[ $# -ne 2 ]] ; then + echo "$0 ERROR: requires exactly three parameters" + echo "$0 ERROR: $0 target-dir userid1 userid2" + exit 1 +fi + +TARGET_DIR=$1 +USER_1=$2 +USER_2=$3 + + +if [[ ! -d "$TARGET_DIR" ]] ; then + echo "$0 ERROR: Target directory '$TARGET_DIR' does not exist or is not a directory" + exit 1 +fi + + +cat > $TARGET_DIR/init1.yml < $TARGET_DIR/init2.yml < $TARGET_DIR/init3.yml < $TARGET_DIR/step1.yml < $TARGET_DIR/step2.yml < $TARGET_DIR/reset.yml < Date: Mon, 8 Oct 2018 11:22:23 +0200 Subject: [PATCH 54/93] Parameterizing the user ids --- sample-agent/generate-test-scripts.sh | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/sample-agent/generate-test-scripts.sh b/sample-agent/generate-test-scripts.sh index 7072bcb20..9fcba9630 100644 --- a/sample-agent/generate-test-scripts.sh +++ b/sample-agent/generate-test-scripts.sh @@ -34,8 +34,8 @@ createOffer: createSegments: - id: demoSegment1 subscribers: - - foo@baz.com - - bar@baz.com + - $USER_1 + - $USER_2 EOF @@ -74,7 +74,7 @@ createOffer: createSegments: - id: demoSegment3 subscribers: - - bar@baz.com + - $USER_2 EOF @@ -82,10 +82,10 @@ cat > $TARGET_DIR/step1.yml < $TARGET_DIR/step2.yml < $TARGET_DIR/reset.yml < Date: Mon, 8 Oct 2018 11:28:25 +0200 Subject: [PATCH 55/93] Adding comments, parameterizing the segments --- sample-agent/generate-test-scripts.sh | 43 +++++++++++++++++++-------- 1 file changed, 31 insertions(+), 12 deletions(-) diff --git a/sample-agent/generate-test-scripts.sh b/sample-agent/generate-test-scripts.sh index 9fcba9630..d925dd524 100644 --- a/sample-agent/generate-test-scripts.sh +++ b/sample-agent/generate-test-scripts.sh @@ -1,5 +1,19 @@ #!/bin/bash +## +## +## This sets up a set of demo scripts that can be used +## in conjunction with the script "apply-yaml.sh" to +## apply changes to the product/segment/offer configuration +## in Prime. It is intended as a vehicle for testing +## the interaction in basic ways, and will most likely +## be removed or replaced when we are more confident that +## the design of the import mechanism is fit for purpose. +## In the mean time, we'll use this mechanism as it provides +## great flexibility and transparency in to what is actually +## applied. +## + if [[ $# -ne 2 ]] ; then echo "$0 ERROR: requires exactly three parameters" echo "$0 ERROR: $0 target-dir userid1 userid2" @@ -10,6 +24,11 @@ TARGET_DIR=$1 USER_1=$2 USER_2=$3 +SEGMENT_1="demoSegment1" +SEGMENT_2="demoSegment2" +SEGMENT_3="demoSegment3" + + if [[ ! -d "$TARGET_DIR" ]] ; then echo "$0 ERROR: Target directory '$TARGET_DIR' does not exist or is not a directory" @@ -32,7 +51,7 @@ createOffer: offerLabel: Top Up priceLabel: 200 NOK createSegments: - - id: demoSegment1 + - id: $SEGMENT_1 subscribers: - $USER_1 - $USER_2 @@ -54,7 +73,7 @@ createOffer: offerLabel: Top Up priceLabel: 200 NOK createSegments: - - id: demoSegment2 + - id: $SEGMENT_2 EOF cat > $TARGET_DIR/init3.yml < $TARGET_DIR/step1.yml < $TARGET_DIR/step2.yml < $TARGET_DIR/reset.yml < Date: Mon, 8 Oct 2018 11:28:47 +0200 Subject: [PATCH 56/93] CHmod to make test generation script executable --- sample-agent/generate-test-scripts.sh | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 sample-agent/generate-test-scripts.sh diff --git a/sample-agent/generate-test-scripts.sh b/sample-agent/generate-test-scripts.sh old mode 100644 new mode 100755 From fc3edd14157e76fdd4ac7bea7234adba8d91439b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B8rn=20Remseth?= Date: Mon, 8 Oct 2018 11:32:01 +0200 Subject: [PATCH 57/93] Three shall be the number of thy counting --- sample-agent/generate-test-scripts.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sample-agent/generate-test-scripts.sh b/sample-agent/generate-test-scripts.sh index d925dd524..aa4076747 100755 --- a/sample-agent/generate-test-scripts.sh +++ b/sample-agent/generate-test-scripts.sh @@ -14,7 +14,7 @@ ## applied. ## -if [[ $# -ne 2 ]] ; then +if [[ $# -ne 3 ]] ; then echo "$0 ERROR: requires exactly three parameters" echo "$0 ERROR: $0 target-dir userid1 userid2" exit 1 From c3b13446a2f84fa0537fc8ab195745e2979593f3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B8rn=20Remseth?= Date: Mon, 8 Oct 2018 11:41:17 +0200 Subject: [PATCH 58/93] Add some tips for the user so that it will be very simple to use the system. --- sample-agent/generate-test-scripts.sh | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/sample-agent/generate-test-scripts.sh b/sample-agent/generate-test-scripts.sh index aa4076747..055cea2d5 100755 --- a/sample-agent/generate-test-scripts.sh +++ b/sample-agent/generate-test-scripts.sh @@ -135,3 +135,19 @@ updateSegments: - id: $SEGMENT_3 subscribers: EOF + +echo "$0: INFO Successfully created demo scripts in directyory $TARGET_DIR" +echo "$0: INFO To initialize run initialization scripts:" +echo "$0: INFO" +echo "$0: INFO apply_yaml.sh $TARGET_DIR/init1.yml" +echo "$0: INFO apply_yaml.sh $TARGET_DIR/init2.yml" +echo "$0: INFO apply_yaml.sh $TARGET_DIR/init3.yml" +echo "$0: INFO" +echo "$0: INFO During the test, run the test steps:" +echo "$0: INFO" +echo "$0: INFO apply_yaml.sh $TARGET_DIR/step1.yml" +echo "$0: INFO apply_yaml.sh $TARGET_DIR/setep2.yml" +echo "$0: INFO" +echo "$0: INFO To reset to initial state (e.g. before running a demo/test again):" +echo "$0: INFO" +echo "$0: INFO apply_yaml.sh $TARGET_DIR/reset.yml" From 2587758bc5b19caf39282b8bccae116d08e90f4b Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Mon, 8 Oct 2018 13:11:19 +0200 Subject: [PATCH 59/93] Make Queries in parallel --- bq-metrics-extractor/build.gradle | 6 +- .../cronjob/extractor-dev.yaml | 2 +- .../BqMetricsExtractorApplication.kt | 74 ++++++++++++++++--- build.gradle | 1 + 4 files changed, 71 insertions(+), 12 deletions(-) diff --git a/bq-metrics-extractor/build.gradle b/bq-metrics-extractor/build.gradle index 63f260c4e..ba65c59fe 100644 --- a/bq-metrics-extractor/build.gradle +++ b/bq-metrics-extractor/build.gradle @@ -1,3 +1,7 @@ +buildscript { + ext.kotlin_version = "$kotlinVersion" +} + plugins { id "org.jetbrains.kotlin.jvm" version "1.2.71" id "application" @@ -11,6 +15,7 @@ dependencies { implementation "io.dropwizard:dropwizard-core:$dropwizardVersion" implementation "io.dropwizard:dropwizard-client:$dropwizardVersion" + implementation "org.jetbrains.kotlinx:kotlinx-coroutines-core:$kotlinXCoroutinesVersion" implementation "com.fasterxml.jackson.module:jackson-module-kotlin:$jacksonVersion" implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlinVersion" @@ -19,7 +24,6 @@ dependencies { runtimeOnly "io.dropwizard:dropwizard-json-logging:$dropwizardVersion" - testImplementation "io.dropwizard:dropwizard-testing:$dropwizardVersion" testImplementation "org.mockito:mockito-core:$mockitoVersion" testImplementation "org.jetbrains.kotlin:kotlin-test:$kotlinVersion" diff --git a/bq-metrics-extractor/cronjob/extractor-dev.yaml b/bq-metrics-extractor/cronjob/extractor-dev.yaml index 70755976d..85af0b704 100644 --- a/bq-metrics-extractor/cronjob/extractor-dev.yaml +++ b/bq-metrics-extractor/cronjob/extractor-dev.yaml @@ -3,7 +3,7 @@ kind: CronJob metadata: name: bq-metrics-extractor spec: - schedule: "*/30 * * * *" + schedule: "*/1 * * * *" jobTemplate: spec: template: diff --git a/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt b/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt index cb165f1bf..e6cc497fb 100644 --- a/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt +++ b/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt @@ -3,11 +3,8 @@ package org.ostelco.bqmetrics import com.fasterxml.jackson.annotation.JsonProperty import com.google.cloud.RetryOption -import com.google.cloud.bigquery.BigQueryOptions -import com.google.cloud.bigquery.Job -import com.google.cloud.bigquery.JobId -import com.google.cloud.bigquery.JobInfo -import com.google.cloud.bigquery.QueryJobConfiguration +import com.google.cloud.bigquery.* +import com.google.cloud.bigquery.Job as BQJob import io.dropwizard.Application import io.dropwizard.Configuration import io.dropwizard.cli.ConfiguredCommand @@ -17,6 +14,9 @@ import io.prometheus.client.CollectorRegistry import io.prometheus.client.Gauge import io.prometheus.client.Summary import io.prometheus.client.exporter.PushGateway +import kotlinx.coroutines.experimental.launch +import kotlinx.coroutines.experimental.runBlocking +import kotlinx.coroutines.experimental.Job import net.sourceforge.argparse4j.inf.Namespace import net.sourceforge.argparse4j.inf.Subparser import org.slf4j.Logger @@ -204,7 +204,7 @@ abstract class MetricBuilder( // Create a job ID so that we can safely retry. val jobId: JobId = JobId.of(UUID.randomUUID().toString()); - var queryJob: Job = bigquery.create(JobInfo.newBuilder(queryConfig).setJobId(jobId).build()); + var queryJob: BQJob = bigquery.create(JobInfo.newBuilder(queryConfig).setJobId(jobId).build()); // Wait for the query to complete. // Retry maximum 4 times for up to 2 minutes. @@ -304,7 +304,7 @@ private class BqMetricsExtractionException : RuntimeException { /** * Adapter class that will push metrics to the Prometheus push gateway. */ -private class PrometheusPusher(val pushGateway: String, val job: String) { +private class PrometheusPusher(val pushGateway: String, val jobName: String) { private val log: Logger = LoggerFactory.getLogger(PrometheusPusher::class.java) @@ -312,7 +312,11 @@ private class PrometheusPusher(val pushGateway: String, val job: String) { val env: EnvironmentVars = EnvironmentVars() fun publishMetrics(metrics: List) { + publishMetricsAsync(metrics) + publishMetricsSync(metrics) + } + fun publishMetricsAsync(metrics: List) = runBlocking { val metricSources: MutableList = mutableListOf() metrics.forEach { val typeString: String = it.type.trim().toUpperCase() @@ -340,14 +344,64 @@ private class PrometheusPusher(val pushGateway: String, val job: String) { } log.info("Querying bigquery for metric values") + val jobs = mutableListOf() + val start = System.currentTimeMillis() val pg = PushGateway(pushGateway) - metricSources.forEach({ it.buildMetric(registry) }) + metricSources.forEach { builder -> + jobs += launch { + builder.buildMetric(registry) + } + } + // Wait for the SQL queries to finish. + jobs.forEach { it.join() } + val end = System.currentTimeMillis() + log.info("Queries finished in ${end - start} ms") log.info("Pushing metrics to pushgateway") - pg.pushAdd(registry, job) + pg.pushAdd(registry, jobName) log.info("Done transmitting metrics to pushgateway") } -} + + fun publishMetricsSync(metrics: List) { + val metricSources: MutableList = mutableListOf() + metrics.forEach { + val typeString: String = it.type.trim().toUpperCase() + when (typeString) { + "SUMMARY" -> { + metricSources.add(SummaryMetricBuilder( + it.name, + it.help, + it.sql, + it.resultColumn, + env)) + } + "GAUGE" -> { + metricSources.add(GaugeMetricBuilder( + it.name, + it.help, + it.sql, + it.resultColumn, + env)) + } + else -> { + log.error("Unknown metrics type '${it.type}'") + } + } + } + + log.info("Querying bigquery for metric values") + val start = System.currentTimeMillis() + val pg = PushGateway(pushGateway) + metricSources.forEach { builder -> + builder.buildMetric(registry) + } + val end = System.currentTimeMillis() + log.info("Queries finished in ${end - start} ms") + + log.info("Pushing metrics to pushgateway") + pg.pushAdd(registry, jobName) + log.info("Done transmitting metrics to pushgateway") + }} private class CollectAndPushMetrics : ConfiguredCommand( "query", diff --git a/build.gradle b/build.gradle index 0211da753..d765b6b68 100644 --- a/build.gradle +++ b/build.gradle @@ -34,6 +34,7 @@ subprojects { ext { kotlinVersion = "1.2.71" dropwizardVersion = "1.3.5" + kotlinXCoroutinesVersion = "0.30.0" googleCloudVersion = "1.46.0" jacksonVersion = "2.9.7" stripeVersion = "7.0.0" From d00d9056ef062b7978c31bedac132646f2c2051d Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Mon, 8 Oct 2018 14:07:16 +0200 Subject: [PATCH 60/93] Remove unused functions --- .../BqMetricsExtractorApplication.kt | 60 +++---------------- 1 file changed, 9 insertions(+), 51 deletions(-) diff --git a/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt b/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt index e6cc497fb..cd21e2684 100644 --- a/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt +++ b/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt @@ -3,8 +3,10 @@ package org.ostelco.bqmetrics import com.fasterxml.jackson.annotation.JsonProperty import com.google.cloud.RetryOption -import com.google.cloud.bigquery.* -import com.google.cloud.bigquery.Job as BQJob +import com.google.cloud.bigquery.BigQueryOptions +import com.google.cloud.bigquery.JobId +import com.google.cloud.bigquery.JobInfo +import com.google.cloud.bigquery.QueryJobConfiguration import io.dropwizard.Application import io.dropwizard.Configuration import io.dropwizard.cli.ConfiguredCommand @@ -14,9 +16,9 @@ import io.prometheus.client.CollectorRegistry import io.prometheus.client.Gauge import io.prometheus.client.Summary import io.prometheus.client.exporter.PushGateway +import kotlinx.coroutines.experimental.Job import kotlinx.coroutines.experimental.launch import kotlinx.coroutines.experimental.runBlocking -import kotlinx.coroutines.experimental.Job import net.sourceforge.argparse4j.inf.Namespace import net.sourceforge.argparse4j.inf.Subparser import org.slf4j.Logger @@ -25,6 +27,7 @@ import org.threeten.bp.Duration import java.util.* import javax.validation.Valid import javax.validation.constraints.NotNull +import com.google.cloud.bigquery.Job as BQJob /** * Bridge between "latent metrics" stored in BigQuery and Prometheus @@ -311,12 +314,7 @@ private class PrometheusPusher(val pushGateway: String, val jobName: String) { val registry = CollectorRegistry() val env: EnvironmentVars = EnvironmentVars() - fun publishMetrics(metrics: List) { - publishMetricsAsync(metrics) - publishMetricsSync(metrics) - } - - fun publishMetricsAsync(metrics: List) = runBlocking { + fun publishMetrics(metrics: List) = runBlocking { val metricSources: MutableList = mutableListOf() metrics.forEach { val typeString: String = it.type.trim().toUpperCase() @@ -355,53 +353,13 @@ private class PrometheusPusher(val pushGateway: String, val jobName: String) { // Wait for the SQL queries to finish. jobs.forEach { it.join() } val end = System.currentTimeMillis() - log.info("Queries finished in ${end - start} ms") + log.info("Queries finished in ${(end - start)/1000} seconds") log.info("Pushing metrics to pushgateway") pg.pushAdd(registry, jobName) log.info("Done transmitting metrics to pushgateway") } - - fun publishMetricsSync(metrics: List) { - val metricSources: MutableList = mutableListOf() - metrics.forEach { - val typeString: String = it.type.trim().toUpperCase() - when (typeString) { - "SUMMARY" -> { - metricSources.add(SummaryMetricBuilder( - it.name, - it.help, - it.sql, - it.resultColumn, - env)) - } - "GAUGE" -> { - metricSources.add(GaugeMetricBuilder( - it.name, - it.help, - it.sql, - it.resultColumn, - env)) - } - else -> { - log.error("Unknown metrics type '${it.type}'") - } - } - } - - log.info("Querying bigquery for metric values") - val start = System.currentTimeMillis() - val pg = PushGateway(pushGateway) - metricSources.forEach { builder -> - builder.buildMetric(registry) - } - val end = System.currentTimeMillis() - log.info("Queries finished in ${end - start} ms") - - log.info("Pushing metrics to pushgateway") - pg.pushAdd(registry, jobName) - log.info("Done transmitting metrics to pushgateway") - }} +} private class CollectAndPushMetrics : ConfiguredCommand( "query", From 700c84e36d6d6aece4799675f8180a521bbeece5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B8rn=20Remseth?= Date: Mon, 8 Oct 2018 16:23:18 +0200 Subject: [PATCH 61/93] Adding gitignore --- sample-agent/.gitignore | 1 + 1 file changed, 1 insertion(+) create mode 100644 sample-agent/.gitignore diff --git a/sample-agent/.gitignore b/sample-agent/.gitignore new file mode 100644 index 000000000..1ee8128db --- /dev/null +++ b/sample-agent/.gitignore @@ -0,0 +1 @@ +demo-scripts From 58af03ad93caa240f73c4e061bb9bd1db864a97c Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Mon, 8 Oct 2018 16:31:27 +0200 Subject: [PATCH 62/93] Use async --- .../BqMetricsExtractorApplication.kt | 43 ++++++++++--------- 1 file changed, 23 insertions(+), 20 deletions(-) diff --git a/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt b/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt index cd21e2684..039149188 100644 --- a/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt +++ b/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt @@ -16,9 +16,7 @@ import io.prometheus.client.CollectorRegistry import io.prometheus.client.Gauge import io.prometheus.client.Summary import io.prometheus.client.exporter.PushGateway -import kotlinx.coroutines.experimental.Job -import kotlinx.coroutines.experimental.launch -import kotlinx.coroutines.experimental.runBlocking +import kotlinx.coroutines.experimental.* import net.sourceforge.argparse4j.inf.Namespace import net.sourceforge.argparse4j.inf.Subparser import org.slf4j.Logger @@ -168,10 +166,12 @@ abstract class MetricBuilder( val sql: String, val resultColumn: String, val env: EnvironmentVars) { + private val log: Logger = LoggerFactory.getLogger(MetricBuilder::class.java) + /** * Function which will add the current value of the metric to registry. */ - abstract fun buildMetric(registry: CollectorRegistry) + abstract suspend fun buildMetric(registry: CollectorRegistry) /** * Function to expand the environment variables in the SQL. @@ -209,6 +209,7 @@ abstract class MetricBuilder( val jobId: JobId = JobId.of(UUID.randomUUID().toString()); var queryJob: BQJob = bigquery.create(JobInfo.newBuilder(queryConfig).setJobId(jobId).build()); + log.info("Waiting for $metricName Query") // Wait for the query to complete. // Retry maximum 4 times for up to 2 minutes. queryJob = queryJob.waitFor( @@ -217,6 +218,7 @@ abstract class MetricBuilder( RetryOption.maxRetryDelay(Duration.ofSeconds(20)), RetryOption.maxAttempts(5), RetryOption.totalTimeout(Duration.ofMinutes(2))); + log.info("Finished waiting for $metricName Query") // Check for errors if (queryJob == null) { @@ -249,15 +251,14 @@ class SummaryMetricBuilder( private val log: Logger = LoggerFactory.getLogger(SummaryMetricBuilder::class.java) - override fun buildMetric(registry: CollectorRegistry) { + override suspend fun buildMetric(registry: CollectorRegistry) = coroutineScope { try { val summary: Summary = Summary.build() .name(metricName) .help(help).register(registry) - val value: Long = getNumberValueViaSql() - + log.info("Fetch async Summarizing metric $metricName") + val value: Long = async { getNumberValueViaSql() }.await() log.info("Summarizing metric $metricName to be $value") - summary.observe(value * 1.0) } catch (e: NullPointerException) { log.error(e.toString()) @@ -277,15 +278,14 @@ class GaugeMetricBuilder( private val log: Logger = LoggerFactory.getLogger(GaugeMetricBuilder::class.java) - override fun buildMetric(registry: CollectorRegistry) { + override suspend fun buildMetric(registry: CollectorRegistry) = coroutineScope { try { val gauge: Gauge = Gauge.build() .name(metricName) .help(help).register(registry) - val value: Long = getNumberValueViaSql() - + log.info("Fetch async Gauge metric $metricName") + val value: Long = async { getNumberValueViaSql() }.await() log.info("Gauge metric $metricName = $value") - gauge.set(value * 1.0) } catch (e: NullPointerException) { log.error(e.toString()) @@ -314,7 +314,7 @@ private class PrometheusPusher(val pushGateway: String, val jobName: String) { val registry = CollectorRegistry() val env: EnvironmentVars = EnvironmentVars() - fun publishMetrics(metrics: List) = runBlocking { + suspend fun publishMetrics(metrics: List) = coroutineScope { val metricSources: MutableList = mutableListOf() metrics.forEach { val typeString: String = it.type.trim().toUpperCase() @@ -342,16 +342,18 @@ private class PrometheusPusher(val pushGateway: String, val jobName: String) { } log.info("Querying bigquery for metric values") - val jobs = mutableListOf() val start = System.currentTimeMillis() val pg = PushGateway(pushGateway) - metricSources.forEach { builder -> - jobs += launch { - builder.buildMetric(registry) + log.info("Starting ${metricSources.size} Queries") + coroutineScope { + metricSources.forEach { builder -> + launch { + builder.buildMetric(registry) + } } } // Wait for the SQL queries to finish. - jobs.forEach { it.join() } + log.info("Started ${metricSources.size} Queries") val end = System.currentTimeMillis() log.info("Queries finished in ${(end - start)/1000} seconds") @@ -376,8 +378,9 @@ private class CollectAndPushMetrics : ConfiguredCommand(pushgatewayKey) - PrometheusPusher(pgw, - "bq_metrics_extractor").publishMetrics(configuration.metrics) + runBlocking { + PrometheusPusher(pgw, "bq_metrics_extractor").publishMetrics(configuration.metrics) + } } val pushgatewayKey = "pushgateway" From 793ea88db6f6d96df4a764817878b0ef41e31db9 Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Mon, 8 Oct 2018 16:37:10 +0200 Subject: [PATCH 63/93] Move async up the stack --- .../bqmetrics/BqMetricsExtractorApplication.kt | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt b/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt index 039149188..a9221b11d 100644 --- a/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt +++ b/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt @@ -171,7 +171,7 @@ abstract class MetricBuilder( /** * Function which will add the current value of the metric to registry. */ - abstract suspend fun buildMetric(registry: CollectorRegistry) + abstract fun buildMetric(registry: CollectorRegistry) /** * Function to expand the environment variables in the SQL. @@ -251,13 +251,13 @@ class SummaryMetricBuilder( private val log: Logger = LoggerFactory.getLogger(SummaryMetricBuilder::class.java) - override suspend fun buildMetric(registry: CollectorRegistry) = coroutineScope { + override fun buildMetric(registry: CollectorRegistry) { try { val summary: Summary = Summary.build() .name(metricName) .help(help).register(registry) log.info("Fetch async Summarizing metric $metricName") - val value: Long = async { getNumberValueViaSql() }.await() + val value: Long = getNumberValueViaSql() log.info("Summarizing metric $metricName to be $value") summary.observe(value * 1.0) } catch (e: NullPointerException) { @@ -278,13 +278,13 @@ class GaugeMetricBuilder( private val log: Logger = LoggerFactory.getLogger(GaugeMetricBuilder::class.java) - override suspend fun buildMetric(registry: CollectorRegistry) = coroutineScope { + override fun buildMetric(registry: CollectorRegistry) { try { val gauge: Gauge = Gauge.build() .name(metricName) .help(help).register(registry) log.info("Fetch async Gauge metric $metricName") - val value: Long = async { getNumberValueViaSql() }.await() + val value: Long = getNumberValueViaSql() log.info("Gauge metric $metricName = $value") gauge.set(value * 1.0) } catch (e: NullPointerException) { @@ -314,7 +314,7 @@ private class PrometheusPusher(val pushGateway: String, val jobName: String) { val registry = CollectorRegistry() val env: EnvironmentVars = EnvironmentVars() - suspend fun publishMetrics(metrics: List) = coroutineScope { + suspend fun publishMetrics(metrics: List) { val metricSources: MutableList = mutableListOf() metrics.forEach { val typeString: String = it.type.trim().toUpperCase() @@ -347,7 +347,9 @@ private class PrometheusPusher(val pushGateway: String, val jobName: String) { log.info("Starting ${metricSources.size} Queries") coroutineScope { metricSources.forEach { builder -> - launch { + log.info("Queue metric ${builder.metricName}") + async { + log.info("Starting fetch async metric ${builder.metricName}") builder.buildMetric(registry) } } From 93cf012a6c539c84829582fe4716a59ae6e721dc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B8rn=20Remseth?= Date: Mon, 8 Oct 2018 16:43:32 +0200 Subject: [PATCH 64/93] Fixing bugs --- sample-agent/apply-yaml.sh | 10 +++++++--- sample-agent/demo-scripts/init1.yml | 21 --------------------- sample-agent/demo-scripts/init2.yml | 17 ----------------- sample-agent/demo-scripts/init3.yml | 17 ----------------- sample-agent/demo-scripts/reset.yml | 9 --------- sample-agent/demo-scripts/step1.yml | 12 ------------ sample-agent/demo-scripts/step2.yml | 11 ----------- sample-agent/generate-test-scripts.sh | 4 ---- 8 files changed, 7 insertions(+), 94 deletions(-) delete mode 100644 sample-agent/demo-scripts/init1.yml delete mode 100644 sample-agent/demo-scripts/init2.yml delete mode 100644 sample-agent/demo-scripts/init3.yml delete mode 100644 sample-agent/demo-scripts/reset.yml delete mode 100644 sample-agent/demo-scripts/step1.yml delete mode 100644 sample-agent/demo-scripts/step2.yml diff --git a/sample-agent/apply-yaml.sh b/sample-agent/apply-yaml.sh index 0536ff0f8..9608ee390 100755 --- a/sample-agent/apply-yaml.sh +++ b/sample-agent/apply-yaml.sh @@ -67,7 +67,7 @@ fi ## EXPECTED_FROM_GET_TO_IMPORT='{"code":405,"message":"HTTP 405 Method Not Allowed"}' -RESULT_FROM_GET_PROBE="$(curl http://127.0.0.1:8080/importer 2>/dev/null)" +RESULT_FROM_GET_PROBE="$(curl http://127.0.0.1:8080/import/offer 2>/dev/null)" if [[ "$EXPECTED_FROM_GET_TO_IMPORT" != "$RESULT_FROM_GET_PROBE" ]] ; then echo "$0 ERROR: Did not get expected result when probing importer, bailing out" @@ -83,5 +83,9 @@ fi ## Send it to the importer ## (assuming the kubectl port forwarding is enabled) -IMPORTER_URL=http://127.0.0.1:8080/importer -curl -H "Content-type: text/vnd.yaml" --data-binary @$YAML_SCRIPTNAME $IMPORTER_URL +# SEGMENT_IMPORTER_URL=http://127.0.0.1:8080/import/segments +# curl -X PUT -H "Content-type: text/vnd.yaml" --data-binary @$YAML_SCRIPTNAME $SEGMENT_IMPORTER_URL + + +IMPORTER_URL=http://127.0.0.1:8080/import/offer +curl -X POST -H "Content-type: text/vnd.yaml" --data-binary @$YAML_SCRIPTNAME $IMPORTER_URL diff --git a/sample-agent/demo-scripts/init1.yml b/sample-agent/demo-scripts/init1.yml deleted file mode 100644 index 6165b5ec0..000000000 --- a/sample-agent/demo-scripts/init1.yml +++ /dev/null @@ -1,21 +0,0 @@ -createOffer: - id: demoOffer1 - createProducts: - - sku: 1GB_200NOK - price: - amount: 200 - currency: NOK - properties: - noOfBytes: 1_000_000_000 - presentation: - isDefault: true - offerLabel: Top Up - priceLabel: 200 NOK - createSegments: - - id: demoSegment1 - subscribers: - - foo@baz.com - - bar@baz.com - - - diff --git a/sample-agent/demo-scripts/init2.yml b/sample-agent/demo-scripts/init2.yml deleted file mode 100644 index da27c8148..000000000 --- a/sample-agent/demo-scripts/init2.yml +++ /dev/null @@ -1,17 +0,0 @@ -createOffer: - id: demoOffer2 - createProducts: - - sku: 2GB_200NOK - price: - amount: 200 - currency: NOK - properties: - noOfBytes: 2_000_000_000 - presentation: - isDefault: true - offerLabel: Top Up - priceLabel: 200 NOK - createSegments: - - id: demoSegment2 - - diff --git a/sample-agent/demo-scripts/init3.yml b/sample-agent/demo-scripts/init3.yml deleted file mode 100644 index 9157fe876..000000000 --- a/sample-agent/demo-scripts/init3.yml +++ /dev/null @@ -1,17 +0,0 @@ -createOffer: - id: demoOffer3 - createProducts: - - sku: 1GB_50NOK - price: - amount: 50 - currency: NOK - properties: - noOfBytes: 1_000_000_000 - presentation: - isDefault: true - offerLabel: Special offer - priceLabel: 50 NOK - createSegments: - - id: demoSegment3 - subscribers: - - bar@baz.com diff --git a/sample-agent/demo-scripts/reset.yml b/sample-agent/demo-scripts/reset.yml deleted file mode 100644 index 99f5f1c5e..000000000 --- a/sample-agent/demo-scripts/reset.yml +++ /dev/null @@ -1,9 +0,0 @@ -updateSegments: - - id: s1 - subscribers: - - u1 - - u2 - - id: s2 - subscribers: - - id: s3 - subscribers: diff --git a/sample-agent/demo-scripts/step1.yml b/sample-agent/demo-scripts/step1.yml deleted file mode 100644 index 2506fea00..000000000 --- a/sample-agent/demo-scripts/step1.yml +++ /dev/null @@ -1,12 +0,0 @@ -updateSegments: - - id: s1 - subscribers: - - u1 - - id: s2 - subscribers: - - u2 - - id: s3 - subscribers: - - - diff --git a/sample-agent/demo-scripts/step2.yml b/sample-agent/demo-scripts/step2.yml deleted file mode 100644 index 846882a1a..000000000 --- a/sample-agent/demo-scripts/step2.yml +++ /dev/null @@ -1,11 +0,0 @@ -updateSegments: - - id: s1 - subscribers: - - u1 - - id: s2 - subscribers: - - u2 - - id: s3 - subscribers: - - u2 - diff --git a/sample-agent/generate-test-scripts.sh b/sample-agent/generate-test-scripts.sh index 055cea2d5..8868b34e4 100755 --- a/sample-agent/generate-test-scripts.sh +++ b/sample-agent/generate-test-scripts.sh @@ -53,8 +53,6 @@ createOffer: createSegments: - id: $SEGMENT_1 subscribers: - - $USER_1 - - $USER_2 EOF @@ -93,8 +91,6 @@ createOffer: createSegments: - id: $SEGMENT_3 subscribers: - - $USER_2 - EOF cat > $TARGET_DIR/step1.yml < Date: Mon, 8 Oct 2018 16:44:59 +0200 Subject: [PATCH 65/93] Removing segments altogether from the initial segments --- sample-agent/generate-test-scripts.sh | 2 -- 1 file changed, 2 deletions(-) diff --git a/sample-agent/generate-test-scripts.sh b/sample-agent/generate-test-scripts.sh index 8868b34e4..97f3e8396 100755 --- a/sample-agent/generate-test-scripts.sh +++ b/sample-agent/generate-test-scripts.sh @@ -52,7 +52,6 @@ createOffer: priceLabel: 200 NOK createSegments: - id: $SEGMENT_1 - subscribers: EOF @@ -90,7 +89,6 @@ createOffer: priceLabel: 50 NOK createSegments: - id: $SEGMENT_3 - subscribers: EOF cat > $TARGET_DIR/step1.yml < Date: Mon, 8 Oct 2018 17:04:39 +0200 Subject: [PATCH 66/93] Fixing misc issues --- sample-agent/apply-yaml.sh | 26 +++++++++++++++++++------- sample-agent/generate-test-scripts.sh | 16 +++++++--------- 2 files changed, 26 insertions(+), 16 deletions(-) diff --git a/sample-agent/apply-yaml.sh b/sample-agent/apply-yaml.sh index 9608ee390..8884ed61a 100755 --- a/sample-agent/apply-yaml.sh +++ b/sample-agent/apply-yaml.sh @@ -11,7 +11,15 @@ set -e # file containing a yaml file. # -YAML_SCRIPTNAME=$1 +IMPORT_TYPE=$1 + +if [[ "$IMPORT_TYPE" != "offer" && "$IMPORT_TYPE" != "segments" ]] ; then + echo "$0: ERROR Import type must be 'offer' or 'segments'" + echo "usage $0 {offer,segment} yaml-script" + exit 1 +fi + +YAML_SCRIPTNAME=$2 if [[ -z "$YAML_SCRIPTNAME" ]] ; then echo "$0 Missing script" echo "usage $0 yaml-script" @@ -78,14 +86,18 @@ if [[ "$EXPECTED_FROM_GET_TO_IMPORT" != "$RESULT_FROM_GET_PROBE" ]] ; then exit 1 fi - ## ## Send it to the importer ## (assuming the kubectl port forwarding is enabled) -# SEGMENT_IMPORTER_URL=http://127.0.0.1:8080/import/segments -# curl -X PUT -H "Content-type: text/vnd.yaml" --data-binary @$YAML_SCRIPTNAME $SEGMENT_IMPORTER_URL - +if [[ "$IMPORT_TYPE" = "segments" ]] ; then + SEGMENT_IMPORTER_URL=http://127.0.0.1:8080/import/segments + curl -X PUT -H "Content-type: text/vnd.yaml" --data-binary @$YAML_SCRIPTNAME $SEGMENT_IMPORTER_URL + exit 0 +fi -IMPORTER_URL=http://127.0.0.1:8080/import/offer -curl -X POST -H "Content-type: text/vnd.yaml" --data-binary @$YAML_SCRIPTNAME $IMPORTER_URL +if [[ "$IMPORT_TYPE" = "offer" ]] ; then + IMPORTER_URL=http://127.0.0.1:8080/import/offer + curl -X POST -H "Content-type: text/vnd.yaml" --data-binary @$YAML_SCRIPTNAME $IMPORTER_URL + exit 0 +fi diff --git a/sample-agent/generate-test-scripts.sh b/sample-agent/generate-test-scripts.sh index 97f3e8396..05d20e34c 100755 --- a/sample-agent/generate-test-scripts.sh +++ b/sample-agent/generate-test-scripts.sh @@ -120,28 +120,26 @@ EOF cat > $TARGET_DIR/reset.yml < Date: Mon, 8 Oct 2018 17:16:41 +0200 Subject: [PATCH 67/93] Correct usage of concurrency using coroutines --- .../BqMetricsExtractorApplication.kt | 35 ++++++++----------- 1 file changed, 15 insertions(+), 20 deletions(-) diff --git a/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt b/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt index a9221b11d..b78b71314 100644 --- a/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt +++ b/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt @@ -171,7 +171,7 @@ abstract class MetricBuilder( /** * Function which will add the current value of the metric to registry. */ - abstract fun buildMetric(registry: CollectorRegistry) + abstract suspend fun buildMetric(registry: CollectorRegistry) /** * Function to expand the environment variables in the SQL. @@ -194,7 +194,7 @@ abstract class MetricBuilder( /** * Execute the SQL and get a single number value. */ - fun getNumberValueViaSql(): Long { + suspend fun getNumberValueViaSql(): Long = coroutineScope { // Instantiate a client. If you don't specify credentials when constructing a client, the // client library will look for credentials in the environment, such as the // GOOGLE_APPLICATION_CREDENTIALS environment variable. @@ -212,12 +212,14 @@ abstract class MetricBuilder( log.info("Waiting for $metricName Query") // Wait for the query to complete. // Retry maximum 4 times for up to 2 minutes. - queryJob = queryJob.waitFor( - RetryOption.initialRetryDelay(Duration.ofSeconds(10)), - RetryOption.retryDelayMultiplier(2.0), - RetryOption.maxRetryDelay(Duration.ofSeconds(20)), - RetryOption.maxAttempts(5), - RetryOption.totalTimeout(Duration.ofMinutes(2))); + queryJob = async { + queryJob.waitFor( + RetryOption.initialRetryDelay(Duration.ofSeconds(10)), + RetryOption.retryDelayMultiplier(2.0), + RetryOption.maxRetryDelay(Duration.ofSeconds(20)), + RetryOption.maxAttempts(5), + RetryOption.totalTimeout(Duration.ofMinutes(2))); + }.await() log.info("Finished waiting for $metricName Query") // Check for errors @@ -234,7 +236,7 @@ abstract class MetricBuilder( } val count = result.iterateAll().iterator().next().get(resultColumn).longValue - return count + count } } @@ -251,14 +253,12 @@ class SummaryMetricBuilder( private val log: Logger = LoggerFactory.getLogger(SummaryMetricBuilder::class.java) - override fun buildMetric(registry: CollectorRegistry) { + override suspend fun buildMetric(registry: CollectorRegistry) { try { val summary: Summary = Summary.build() .name(metricName) .help(help).register(registry) - log.info("Fetch async Summarizing metric $metricName") val value: Long = getNumberValueViaSql() - log.info("Summarizing metric $metricName to be $value") summary.observe(value * 1.0) } catch (e: NullPointerException) { log.error(e.toString()) @@ -278,14 +278,12 @@ class GaugeMetricBuilder( private val log: Logger = LoggerFactory.getLogger(GaugeMetricBuilder::class.java) - override fun buildMetric(registry: CollectorRegistry) { + override suspend fun buildMetric(registry: CollectorRegistry) { try { val gauge: Gauge = Gauge.build() .name(metricName) .help(help).register(registry) - log.info("Fetch async Gauge metric $metricName") val value: Long = getNumberValueViaSql() - log.info("Gauge metric $metricName = $value") gauge.set(value * 1.0) } catch (e: NullPointerException) { log.error(e.toString()) @@ -347,15 +345,12 @@ private class PrometheusPusher(val pushGateway: String, val jobName: String) { log.info("Starting ${metricSources.size} Queries") coroutineScope { metricSources.forEach { builder -> - log.info("Queue metric ${builder.metricName}") - async { - log.info("Starting fetch async metric ${builder.metricName}") + launch { builder.buildMetric(registry) } } } - // Wait for the SQL queries to finish. - log.info("Started ${metricSources.size} Queries") + // coroutineScope waits for all children to finish. val end = System.currentTimeMillis() log.info("Queries finished in ${(end - start)/1000} seconds") From 4796b5c5362444acfdc2642a8f6e6682f4c2ac35 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B8rn=20Remseth?= Date: Mon, 8 Oct 2018 17:17:11 +0200 Subject: [PATCH 68/93] Making it work --- sample-agent/apply-yaml.sh | 2 +- sample-agent/generate-test-scripts.sh | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/sample-agent/apply-yaml.sh b/sample-agent/apply-yaml.sh index 8884ed61a..8c88ff058 100755 --- a/sample-agent/apply-yaml.sh +++ b/sample-agent/apply-yaml.sh @@ -1,4 +1,4 @@ -#!/bin/bash -x +#!/bin/bash set -e diff --git a/sample-agent/generate-test-scripts.sh b/sample-agent/generate-test-scripts.sh index 05d20e34c..4b47c29cf 100755 --- a/sample-agent/generate-test-scripts.sh +++ b/sample-agent/generate-test-scripts.sh @@ -100,13 +100,12 @@ updateSegments: subscribers: - $USER_2 - id: $SEGMENT_3 - subscribers: EOF cat > $TARGET_DIR/step2.yml < Date: Mon, 8 Oct 2018 17:34:29 +0200 Subject: [PATCH 69/93] Revert back to original schedule --- bq-metrics-extractor/cronjob/extractor-dev.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bq-metrics-extractor/cronjob/extractor-dev.yaml b/bq-metrics-extractor/cronjob/extractor-dev.yaml index 85af0b704..70755976d 100644 --- a/bq-metrics-extractor/cronjob/extractor-dev.yaml +++ b/bq-metrics-extractor/cronjob/extractor-dev.yaml @@ -3,7 +3,7 @@ kind: CronJob metadata: name: bq-metrics-extractor spec: - schedule: "*/1 * * * *" + schedule: "*/30 * * * *" jobTemplate: spec: template: From a3d0af8dc904632e59012bae1208c25aa59a59c5 Mon Sep 17 00:00:00 2001 From: Vihang Patil Date: Sun, 7 Oct 2018 22:30:17 +0200 Subject: [PATCH 70/93] Updated dependency versions --- acceptance-tests/build.gradle | 2 +- admin-api/build.gradle | 3 +++ analytics-module/build.gradle | 2 +- auth-server/build.gradle | 6 +++--- bq-metrics-extractor/build.gradle | 2 +- build.gradle | 14 ++++++++------ client-api/build.gradle | 4 ++-- dataflow-pipelines/build.gradle | 2 +- ext-auth-provider/build.gradle | 6 +++--- ocsgw/build.gradle | 6 +++--- prime/build.gradle | 2 +- slack/build.gradle | 4 ++-- tools/neo4j-admin-tools/build.gradle | 2 +- 13 files changed, 30 insertions(+), 25 deletions(-) diff --git a/acceptance-tests/build.gradle b/acceptance-tests/build.gradle index 6c934663e..8ee2826c8 100644 --- a/acceptance-tests/build.gradle +++ b/acceptance-tests/build.gradle @@ -1,7 +1,7 @@ plugins { id "org.jetbrains.kotlin.jvm" version "1.2.71" id "application" - id "com.github.johnrengelman.shadow" version "4.0.0" + id "com.github.johnrengelman.shadow" version "4.0.1" } dependencies { diff --git a/admin-api/build.gradle b/admin-api/build.gradle index 8eb890594..33ebfeb53 100644 --- a/admin-api/build.gradle +++ b/admin-api/build.gradle @@ -6,6 +6,9 @@ plugins { dependencies { implementation project(":prime-modules") + implementation "javax.xml.bind:jaxb-api:$jaxbVersion" + implementation "javax.activation:activation:$javaxActivationVersion" + testImplementation "io.dropwizard:dropwizard-testing:$dropwizardVersion" } diff --git a/analytics-module/build.gradle b/analytics-module/build.gradle index b02d49302..2ecf5b533 100644 --- a/analytics-module/build.gradle +++ b/analytics-module/build.gradle @@ -11,7 +11,7 @@ dependencies { implementation "com.google.cloud:google-cloud-pubsub:$googleCloudVersion" implementation 'com.google.code.gson:gson:2.8.5' - testImplementation 'com.google.api:gax-grpc:1.32.0' + testImplementation 'com.google.api:gax-grpc:1.33.0' testImplementation "io.dropwizard:dropwizard-testing:$dropwizardVersion" testImplementation "org.mockito:mockito-core:$mockitoVersion" diff --git a/auth-server/build.gradle b/auth-server/build.gradle index 39c560193..6b2068029 100644 --- a/auth-server/build.gradle +++ b/auth-server/build.gradle @@ -1,7 +1,7 @@ plugins { id "org.jetbrains.kotlin.jvm" version "1.2.71" id "application" - id "com.github.johnrengelman.shadow" version "4.0.0" + id "com.github.johnrengelman.shadow" version "4.0.1" id "idea" } @@ -12,8 +12,8 @@ dependencies { implementation project(":firebase-extensions") implementation "com.fasterxml.jackson.module:jackson-module-kotlin:$jacksonVersion" - implementation 'javax.xml.bind:jaxb-api:2.3.0' - implementation 'javax.activation:activation:1.1.1' + implementation "javax.xml.bind:jaxb-api:$jaxbVersion" + implementation "javax.activation:activation:$javaxActivationVersion" testImplementation "io.dropwizard:dropwizard-testing:$dropwizardVersion" testImplementation "org.jetbrains.kotlin:kotlin-test-junit:$kotlinVersion" diff --git a/bq-metrics-extractor/build.gradle b/bq-metrics-extractor/build.gradle index 63f260c4e..cdea9bfc7 100644 --- a/bq-metrics-extractor/build.gradle +++ b/bq-metrics-extractor/build.gradle @@ -1,7 +1,7 @@ plugins { id "org.jetbrains.kotlin.jvm" version "1.2.71" id "application" - id "com.github.johnrengelman.shadow" version "4.0.0" + id "com.github.johnrengelman.shadow" version "4.0.1" id "idea" } diff --git a/build.gradle b/build.gradle index 0211da753..6cf180eb9 100644 --- a/build.gradle +++ b/build.gradle @@ -33,19 +33,21 @@ subprojects { } ext { kotlinVersion = "1.2.71" - dropwizardVersion = "1.3.5" - googleCloudVersion = "1.46.0" + dropwizardVersion = "1.3.7" + googleCloudVersion = "1.48.0" jacksonVersion = "2.9.7" stripeVersion = "7.0.0" guavaVersion = "26.0-jre" junit5Version = "5.3.1" assertJVersion = "3.11.1" - mockitoVersion = "2.22.0" + mockitoVersion = "2.23.0" firebaseVersion = "6.5.0" beamVersion = "2.7.0" - // Keeping it version 1.15.0 to be consistent with grpc via PubSub client lib - // Keeping it version 1.15.0 to be consistent with netty via Firebase lib - grpcVersion = "1.15.0" + // Keeping it version 1.15.1 to be consistent with grpc via PubSub client lib + // Keeping it version 1.15.1 to be consistent with netty via Firebase lib + grpcVersion = "1.15.1" + jaxbVersion = "2.3.0" + javaxActivationVersion = "1.1.1" } } diff --git a/client-api/build.gradle b/client-api/build.gradle index 782439b07..08217a648 100644 --- a/client-api/build.gradle +++ b/client-api/build.gradle @@ -14,8 +14,8 @@ dependencies { implementation "com.google.guava:guava:$guavaVersion" implementation 'io.jsonwebtoken:jjwt:0.9.1' - implementation 'javax.xml.bind:jaxb-api:2.3.0' - implementation 'javax.activation:activation:1.1.1' + implementation "javax.xml.bind:jaxb-api:$jaxbVersion" + implementation "javax.activation:activation:$javaxActivationVersion" testImplementation "io.dropwizard:dropwizard-client:$dropwizardVersion" testImplementation "io.dropwizard:dropwizard-testing:$dropwizardVersion" diff --git a/dataflow-pipelines/build.gradle b/dataflow-pipelines/build.gradle index b315d2b2f..9f6985a36 100644 --- a/dataflow-pipelines/build.gradle +++ b/dataflow-pipelines/build.gradle @@ -1,7 +1,7 @@ plugins { id "org.jetbrains.kotlin.jvm" version "1.2.71" id "application" - id "com.github.johnrengelman.shadow" version "4.0.0" + id "com.github.johnrengelman.shadow" version "4.0.1" id "idea" } diff --git a/ext-auth-provider/build.gradle b/ext-auth-provider/build.gradle index 2917c7371..b90dfed3f 100644 --- a/ext-auth-provider/build.gradle +++ b/ext-auth-provider/build.gradle @@ -1,7 +1,7 @@ plugins { id "org.jetbrains.kotlin.jvm" version "1.2.71" id "application" - id "com.github.johnrengelman.shadow" version "4.0.0" + id "com.github.johnrengelman.shadow" version "4.0.1" } dependencies { @@ -10,8 +10,8 @@ dependencies { implementation "io.dropwizard:dropwizard-core:$dropwizardVersion" implementation 'io.jsonwebtoken:jjwt:0.9.1' - implementation 'javax.xml.bind:jaxb-api:2.3.0' - implementation 'javax.activation:activation:1.1.1' + implementation "javax.xml.bind:jaxb-api:$jaxbVersion" + implementation "javax.activation:activation:$javaxActivationVersion" testImplementation "io.dropwizard:dropwizard-testing:$dropwizardVersion" testImplementation "org.jetbrains.kotlin:kotlin-test:$kotlinVersion" diff --git a/ocsgw/build.gradle b/ocsgw/build.gradle index 0c92891f4..cd9de8a6e 100644 --- a/ocsgw/build.gradle +++ b/ocsgw/build.gradle @@ -11,13 +11,13 @@ dependencies { implementation project(':diameter-stack') implementation "com.google.cloud:google-cloud-core-grpc:$googleCloudVersion" - implementation 'javax.xml.bind:jaxb-api:2.3.0' - implementation 'javax.activation:activation:1.1.1' + implementation "javax.xml.bind:jaxb-api:$jaxbVersion" + implementation "javax.activation:activation:$javaxActivationVersion" implementation 'ch.qos.logback:logback-classic:1.2.3' // log to gcp stack-driver - implementation 'com.google.cloud:google-cloud-logging-logback:0.64.0-alpha' + implementation 'com.google.cloud:google-cloud-logging-logback:0.66.0-alpha' testImplementation project(':diameter-test') testImplementation "org.junit.jupiter:junit-jupiter-api:$junit5Version" diff --git a/prime/build.gradle b/prime/build.gradle index f51356c25..1a602b479 100644 --- a/prime/build.gradle +++ b/prime/build.gradle @@ -1,7 +1,7 @@ plugins { id "org.jetbrains.kotlin.jvm" version "1.2.71" id "application" - id "com.github.johnrengelman.shadow" version "4.0.0" + id "com.github.johnrengelman.shadow" version "4.0.1" id "idea" } diff --git a/slack/build.gradle b/slack/build.gradle index acb215be8..6a80228f2 100644 --- a/slack/build.gradle +++ b/slack/build.gradle @@ -8,8 +8,8 @@ dependencies { implementation "io.dropwizard:dropwizard-client:$dropwizardVersion" - testImplementation 'javax.xml.bind:jaxb-api:2.3.0' - testImplementation 'javax.activation:activation:1.1.1' + testImplementation "javax.xml.bind:jaxb-api:$jaxbVersion" + testImplementation "javax.activation:activation:$javaxActivationVersion" testImplementation "io.dropwizard:dropwizard-testing:$dropwizardVersion" diff --git a/tools/neo4j-admin-tools/build.gradle b/tools/neo4j-admin-tools/build.gradle index c22bf1e04..130c3e0af 100644 --- a/tools/neo4j-admin-tools/build.gradle +++ b/tools/neo4j-admin-tools/build.gradle @@ -1,7 +1,7 @@ plugins { id "org.jetbrains.kotlin.jvm" version "1.2.71" id "application" - id "com.github.johnrengelman.shadow" version "4.0.0" + id "com.github.johnrengelman.shadow" version "4.0.1" id "idea" } From 3a3a11d4e355b1b67153fb9162a3462c9d6f0893 Mon Sep 17 00:00:00 2001 From: Vihang Patil Date: Mon, 8 Oct 2018 17:34:55 +0200 Subject: [PATCH 71/93] Added product to Singapore --- .../src/main/resources/init.cypher | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/tools/neo4j-admin-tools/src/main/resources/init.cypher b/tools/neo4j-admin-tools/src/main/resources/init.cypher index d66abcab9..0dace3e4c 100644 --- a/tools/neo4j-admin-tools/src/main/resources/init.cypher +++ b/tools/neo4j-admin-tools/src/main/resources/init.cypher @@ -76,6 +76,17 @@ CREATE (:Product {`id`: '1GB_1SGD', `properties/noOfBytes`: '1_000_000_000', `sku`: '1GB_1SGD'}); +CREATE (:Product {`id`: '3GB_1.5SGD', + `presentation/isDefault`: 'true', + `presentation/isOffer`: 'true', + `presentation/offerLabel`: 'Default Offer', + `presentation/priceLabel`: '1.5 SGD', + `presentation/productLabel`: '+3GB', + `price/amount`: '150', + `price/currency`: 'SGD', + `properties/noOfBytes`: '3_000_000_000', + `sku`: '3GB_1.5SGD'}); + CREATE (:Segment {`id`: 'country-sg'}); CREATE (:Offer {`id`: 'default_offer-sg'}); @@ -85,6 +96,11 @@ WITH n MATCH (m:Product {id: '1GB_1SGD'}) CREATE (n)-[:OFFER_HAS_PRODUCT]->(m); +MATCH (n:Offer {id: 'default_offer-sg'}) +WITH n +MATCH (m:Product {id: '3GB_1.5SGD'}) +CREATE (n)-[:OFFER_HAS_PRODUCT]->(m); + MATCH (n:Offer {id: 'default_offer-sg'}) WITH n MATCH (m:Segment {id: 'country-sg'}) From 8795fb5ae657de3a8d1f7f4c24dc5af8b28b18bb Mon Sep 17 00:00:00 2001 From: Vihang Patil Date: Sun, 7 Oct 2018 20:59:52 +0200 Subject: [PATCH 72/93] Added multipe operations for segment import --- .../{api/ImporterResource.kt => Jersey.kt} | 28 +---- .../ostelco/prime/admin/api/AdminModule.kt | 1 + .../ostelco/prime/admin/api/ImportResource.kt | 118 ++++++++++++++++++ .../prime/admin/importer/ImportProcessor.kt | 48 ++++++- .../org/ostelco/prime/admin/importer/Model.kt | 88 +++++++------ .../ostelco/importer/ImporterResourceTest.kt | 78 +++++++----- .../src/test/resources/sample-offer-only.yaml | 14 +-- .../sample-offer-products-segments.yaml | 50 ++++---- .../org/ostelco/prime/model/Entities.kt | 5 + .../ostelco/prime/storage/graph/Neo4jStore.kt | 58 +++++++-- .../org/ostelco/prime/storage/graph/Schema.kt | 17 +++ .../prime/storage/graph/GraphStoreTest.kt | 6 +- .../org/ostelco/prime/storage/Variants.kt | 10 +- 13 files changed, 371 insertions(+), 150 deletions(-) rename admin-api/src/main/kotlin/org/ostelco/prime/admin/{api/ImporterResource.kt => Jersey.kt} (71%) create mode 100644 admin-api/src/main/kotlin/org/ostelco/prime/admin/api/ImportResource.kt diff --git a/admin-api/src/main/kotlin/org/ostelco/prime/admin/api/ImporterResource.kt b/admin-api/src/main/kotlin/org/ostelco/prime/admin/Jersey.kt similarity index 71% rename from admin-api/src/main/kotlin/org/ostelco/prime/admin/api/ImporterResource.kt rename to admin-api/src/main/kotlin/org/ostelco/prime/admin/Jersey.kt index 4f89b1597..8d829b154 100644 --- a/admin-api/src/main/kotlin/org/ostelco/prime/admin/api/ImporterResource.kt +++ b/admin-api/src/main/kotlin/org/ostelco/prime/admin/Jersey.kt @@ -1,46 +1,20 @@ -package org.ostelco.prime.admin.api +package org.ostelco.prime.admin import com.fasterxml.jackson.core.JsonProcessingException import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.dataformat.yaml.YAMLFactory import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper import com.fasterxml.jackson.module.kotlin.registerKotlinModule -import org.ostelco.prime.admin.importer.ImportDeclaration -import org.ostelco.prime.admin.importer.ImportProcessor import org.ostelco.prime.getLogger import java.io.InputStream import java.lang.reflect.Type import javax.ws.rs.Consumes -import javax.ws.rs.POST -import javax.ws.rs.Path import javax.ws.rs.WebApplicationException import javax.ws.rs.core.MediaType import javax.ws.rs.core.MultivaluedMap -import javax.ws.rs.core.Response import javax.ws.rs.core.Response.Status.BAD_REQUEST import javax.ws.rs.ext.MessageBodyReader - -/** - * Resource used to handle the importer related REST calls. - */ -@Path("/importer") -class ImporterResource(val processor: ImportProcessor) { - - private val logger by getLogger() - - @POST - @Consumes("text/vnd.yaml") - fun postStatus(declaration: ImportDeclaration): Response { - logger.info("POST status for importer") - - return processor.import(declaration).fold( - { apiError -> Response.status(apiError.status).entity(asJson(apiError)) }, - { Response.status(Response.Status.CREATED) } - ).build() - } -} - /// XXX This is a very generic message body reader, should // be available anywhere we read yaml files. @Consumes("text/vnd.yaml") diff --git a/admin-api/src/main/kotlin/org/ostelco/prime/admin/api/AdminModule.kt b/admin-api/src/main/kotlin/org/ostelco/prime/admin/api/AdminModule.kt index ef913f837..8e5983f99 100644 --- a/admin-api/src/main/kotlin/org/ostelco/prime/admin/api/AdminModule.kt +++ b/admin-api/src/main/kotlin/org/ostelco/prime/admin/api/AdminModule.kt @@ -2,6 +2,7 @@ package org.ostelco.prime.admin.api import com.fasterxml.jackson.annotation.JsonTypeName import io.dropwizard.setup.Environment +import org.ostelco.prime.admin.YamlMessageBodyReader import org.ostelco.prime.admin.importer.ImportAdapter import org.ostelco.prime.module.PrimeModule diff --git a/admin-api/src/main/kotlin/org/ostelco/prime/admin/api/ImportResource.kt b/admin-api/src/main/kotlin/org/ostelco/prime/admin/api/ImportResource.kt new file mode 100644 index 000000000..607fd44c5 --- /dev/null +++ b/admin-api/src/main/kotlin/org/ostelco/prime/admin/api/ImportResource.kt @@ -0,0 +1,118 @@ +package org.ostelco.prime.admin.api + +import org.ostelco.prime.admin.asJson +import org.ostelco.prime.admin.importer.AddToSegments +import org.ostelco.prime.admin.importer.ChangeSegments +import org.ostelco.prime.admin.importer.CreateOffer +import org.ostelco.prime.admin.importer.CreateSegments +import org.ostelco.prime.admin.importer.ImportProcessor +import org.ostelco.prime.admin.importer.RemoveFromSegments +import org.ostelco.prime.admin.importer.UpdateSegments +import org.ostelco.prime.getLogger +import javax.ws.rs.Consumes +import javax.ws.rs.DELETE +import javax.ws.rs.POST +import javax.ws.rs.PUT +import javax.ws.rs.Path +import javax.ws.rs.core.Response + + +/** + * Resource used to handle the import related REST calls. + */ +@Path("/import") +class ImporterResource(private val processor: ImportProcessor) { + + private val logger by getLogger() + + /** + * Create new [Offer]. + * Link to new or existing [Product]. + * Link to new or existing [Segment]. + */ + @POST + @Path("/offer") + @Consumes("text/vnd.yaml") + fun createOffer(createOffer: CreateOffer): Response { + logger.info("POST for /import/offer") + return processor.createOffer(createOffer).fold( + { apiError -> Response.status(apiError.status).entity(asJson(apiError)) }, + { Response.status(Response.Status.CREATED) } + ).build() + } + + /** + * Create new [Segment]. + */ + @POST + @Path("/segments") + @Consumes("text/vnd.yaml") + fun createSegment(createSegments: CreateSegments): Response { + logger.info("POST for /import/segments") + + return processor.createSegments(createSegments).fold( + { apiError -> Response.status(apiError.status).entity(asJson(apiError)) }, + { Response.status(Response.Status.CREATED) } + ).build() + } + + /** + * Update [Segment] - Replace all [Subscriber]s under this [Segment]. + */ + @PUT + @Path("/segments") + @Consumes("text/vnd.yaml") + fun importSegment(updateSegments: UpdateSegments): Response { + logger.info("PUT for /import/segments") + + return processor.updateSegments(updateSegments).fold( + { apiError -> Response.status(apiError.status).entity(asJson(apiError)) }, + { Response.status(Response.Status.OK) } + ).build() + } + + /** + * Add [Subscriber]s to [Segment] + */ + @POST + @Path("/segments/subscribers") + @Consumes("text/vnd.yaml") + fun importSegment(addToSegments: AddToSegments): Response { + logger.info("POST for /import/segments/subscribers") + + return processor.addToSegments(addToSegments).fold( + { apiError -> Response.status(apiError.status).entity(asJson(apiError)) }, + { Response.status(Response.Status.OK) } + ).build() + } + + /** + * Remove [Subscriber]s from [Segment] + */ + @DELETE + @Path("/segments/subscribers") + @Consumes("text/vnd.yaml") + fun importSegment(removeFromSegments: RemoveFromSegments): Response { + logger.info("DELETE for /import/segments/subscribers") + + return processor.removeFromSegments(removeFromSegments).fold( + { apiError -> Response.status(apiError.status).entity(asJson(apiError)) }, + { Response.status(Response.Status.OK) } + ).build() + } + + /** + * Move [Subscriber]s from one [Segment] to another. + */ + @PUT + @Path("/segments/subscribers") + @Consumes("text/vnd.yaml") + fun importSegment(changeSegments: ChangeSegments): Response { + logger.info("PUT for /import/segments/subscribers") + + return processor.changeSegments(changeSegments).fold( + { apiError -> Response.status(apiError.status).entity(asJson(apiError)) }, + { Response.status(Response.Status.OK) } + ).build() + } +} \ No newline at end of file diff --git a/admin-api/src/main/kotlin/org/ostelco/prime/admin/importer/ImportProcessor.kt b/admin-api/src/main/kotlin/org/ostelco/prime/admin/importer/ImportProcessor.kt index 3f1c653ee..ade525cf2 100644 --- a/admin-api/src/main/kotlin/org/ostelco/prime/admin/importer/ImportProcessor.kt +++ b/admin-api/src/main/kotlin/org/ostelco/prime/admin/importer/ImportProcessor.kt @@ -4,23 +4,59 @@ import arrow.core.Either import org.ostelco.prime.apierror.ApiError import org.ostelco.prime.apierror.ApiErrorCode import org.ostelco.prime.apierror.BadRequestError +import org.ostelco.prime.model.Offer +import org.ostelco.prime.model.Segment import org.ostelco.prime.module.getResource import org.ostelco.prime.storage.AdminDataSource interface ImportProcessor { - fun import(importDeclaration: ImportDeclaration): Either + fun createOffer(createOffer: CreateOffer): Either + fun createSegments(createSegments: CreateSegments): Either + fun updateSegments(updateSegments: UpdateSegments): Either + fun addToSegments(addToSegments: AddToSegments): Either + fun removeFromSegments(removeFromSegments: RemoveFromSegments): Either + fun changeSegments(changeSegments: ChangeSegments): Either } class ImportAdapter : ImportProcessor { private val adminDataStore by lazy { getResource() } - override fun import(importDeclaration: ImportDeclaration): Either { + override fun createOffer(createOffer: CreateOffer): Either { + return adminDataStore.atomicCreateOffer( + offer = createOffer.createOffer.let { + Offer(id = it.id, segments = it.existingSegments, products = it.existingProducts) + }, + products = createOffer.createOffer.createProducts, + segments = createOffer.createOffer.createSegments) + .mapLeft { BadRequestError(it.message, ApiErrorCode.FAILED_TO_IMPORT_OFFER) } + } + + override fun createSegments(createSegments: CreateSegments): Either { + return adminDataStore.atomicCreateSegments(createSegments = createSegments.createSegments) + .mapLeft { BadRequestError(it.message, ApiErrorCode.FAILED_TO_IMPORT_OFFER) } + } + + override fun updateSegments(updateSegments: UpdateSegments): Either { + return adminDataStore.atomicUpdateSegments( + updateSegments = updateSegments.updateSegments.map { Segment(id = it.id, subscribers = it.subscribers) } + ).mapLeft { BadRequestError(it.message, ApiErrorCode.FAILED_TO_IMPORT_OFFER) } + } + + override fun addToSegments(addToSegments: AddToSegments): Either { + return adminDataStore.atomicAddToSegments( + addToSegments = addToSegments.addToSegments.map { Segment(id = it.id, subscribers = it.subscribers) } + ).mapLeft { BadRequestError(it.message, ApiErrorCode.FAILED_TO_IMPORT_OFFER) } + } + + override fun removeFromSegments(removeFromSegments: RemoveFromSegments): Either { + return adminDataStore.atomicRemoveFromSegments( + removeFromSegments = removeFromSegments.removeFromSegments.map { Segment(id = it.id, subscribers = it.subscribers) } + ).mapLeft { BadRequestError(it.message, ApiErrorCode.FAILED_TO_IMPORT_OFFER) } + } - return adminDataStore.atomicImport( - offer = importDeclaration.offer, - products = importDeclaration.products, - segments = importDeclaration.segments) + override fun changeSegments(changeSegments: ChangeSegments): Either { + return adminDataStore.atomicChangeSegments(changeSegments = changeSegments.changeSegments) .mapLeft { BadRequestError(it.message, ApiErrorCode.FAILED_TO_IMPORT_OFFER) } } } \ No newline at end of file diff --git a/admin-api/src/main/kotlin/org/ostelco/prime/admin/importer/Model.kt b/admin-api/src/main/kotlin/org/ostelco/prime/admin/importer/Model.kt index e0be4c00f..f609eb914 100644 --- a/admin-api/src/main/kotlin/org/ostelco/prime/admin/importer/Model.kt +++ b/admin-api/src/main/kotlin/org/ostelco/prime/admin/importer/Model.kt @@ -1,68 +1,78 @@ package org.ostelco.prime.admin.importer -import org.ostelco.prime.model.Offer +import org.ostelco.prime.model.ChangeSegment import org.ostelco.prime.model.Product import org.ostelco.prime.model.Segment /** * The input classes being parsed (as yaml). */ +data class CreateOffer(val createOffer: Offer) -data class ProducingAgent(val name: String, val version: String) +data class Offer( + val id:String, + val createProducts: Collection = emptyList(), + val existingProducts: Collection = emptyList(), + val createSegments: Collection = emptyList(), + val existingSegments: Collection = emptyList()) + +data class CreateSegments(val createSegments: Collection) +data class UpdateSegments(val updateSegments: Collection) +data class AddToSegments(val addToSegments: Collection) +data class RemoveFromSegments(val removeFromSegments: Collection) +data class ChangeSegments(val changeSegments: Collection) -class ImportDeclaration( - val producingAgent: ProducingAgent, - val offer: Offer, - val segments: Collection = emptyList(), - val products: Collection = emptyList()) +data class NonEmptySegment( + val id: String, + val subscribers: Collection) /* -class TimeInterval(var from: String?= null, var to: String? = null) +data class ProducingAgent(val name: String, val version: String) -class Presentation( - var badgeLabel: String? = null, - var description: String? = null, - var shortDescription: String? = null, - var label: String? = null, - var name: String? = null, - var priceLabel: String? = null, - var hidden: Boolean? = null, - var imageUrl: String? = null -) +data class TimeInterval(val from: String, val to: String) -class OfferFinancials( - var repurchability: String? = null, - var currencyLabel: String? = null, - var price: Int? = null, - var taxRate: BigDecimal? = null +data class Presentation( + val badgeLabel: String, + val description: String, + val shortDescription: String, + val label: String, + val name: String, + val priceLabel: String, + val hidden: Boolean, + val imageUrl: String ) -class SubscriberIdCollection( - var decryptionKey: String? = null, - var members : MutableList? = null +data class OfferFinancials( + val repurchability: String, + val currencyLabel: String, + val price: Int, + val taxRate: BigDecimal ) +data class SubscriberIdCollection( + val decryptionKey: String, + val members : MutableList +) -class Segment( - var type: String? = null, - var description: String? = null, - var members: SubscriberIdCollection? = null +data class Segment( + val type: String, + val description: String, + val members: SubscriberIdCollection ) // XXX Should perhaps, apart from SKU, be a // a keyword/value map, to be interpreted by // something, somewhere that knows something about // technical product parameters? -class Product( - var sku: String? = null, - var noOfBytes: BigInteger? = null +data class Product( + val sku: String, + val noOfBytes: BigInteger ) - -class Offer( - var visibility: TimeInterval? = null, - var presentation: Presentation? = null, - var financial: OfferFinancials? = null, - var product: Product? = null +data class Offer( + val visibility: TimeInterval, + val presentation: Presentation, + val financial: OfferFinancials, + val product: Product ) */ \ No newline at end of file diff --git a/admin-api/src/test/kotlin/org/ostelco/importer/ImporterResourceTest.kt b/admin-api/src/test/kotlin/org/ostelco/importer/ImporterResourceTest.kt index 0b6473fe2..d3940b79c 100644 --- a/admin-api/src/test/kotlin/org/ostelco/importer/ImporterResourceTest.kt +++ b/admin-api/src/test/kotlin/org/ostelco/importer/ImporterResourceTest.kt @@ -6,10 +6,16 @@ import io.dropwizard.testing.junit.ResourceTestRule import org.junit.Assert.assertEquals import org.junit.ClassRule import org.junit.Test +import org.ostelco.prime.admin.YamlMessageBodyReader import org.ostelco.prime.admin.api.ImporterResource -import org.ostelco.prime.admin.api.YamlMessageBodyReader -import org.ostelco.prime.admin.importer.ImportDeclaration +import org.ostelco.prime.admin.importer.AddToSegments +import org.ostelco.prime.admin.importer.ChangeSegments +import org.ostelco.prime.admin.importer.CreateOffer +import org.ostelco.prime.admin.importer.CreateSegments import org.ostelco.prime.admin.importer.ImportProcessor +import org.ostelco.prime.admin.importer.Offer +import org.ostelco.prime.admin.importer.RemoveFromSegments +import org.ostelco.prime.admin.importer.UpdateSegments import org.ostelco.prime.apierror.ApiError import org.ostelco.prime.model.Price import javax.ws.rs.client.Entity @@ -23,11 +29,31 @@ class ImporterResourceTest { companion object { - lateinit var importedResource: ImportDeclaration + lateinit var offer: Offer private val processor: ImportProcessor = object : ImportProcessor { - override fun import(importDeclaration: ImportDeclaration): Either { - importedResource = importDeclaration + override fun createOffer(createOffer: CreateOffer): Either { + Companion.offer = createOffer.createOffer + return Either.right(Unit) + } + + override fun createSegments(createSegments: CreateSegments): Either { + return Either.right(Unit) + } + + override fun updateSegments(updateSegments: UpdateSegments): Either { + return Either.right(Unit) + } + + override fun addToSegments(addToSegments: AddToSegments): Either { + return Either.right(Unit) + } + + override fun removeFromSegments(removeFromSegments: RemoveFromSegments): Either { + return Either.right(Unit) + } + + override fun changeSegments(changeSegments: ChangeSegments): Either { return Either.right(Unit) } } @@ -46,35 +72,33 @@ class ImporterResourceTest { val text: String = fixture("sample-offer-products-segments.yaml") val response = resources - ?.target("/importer") + ?.target("/import/offer") ?.request("text/vnd.yaml") ?.post(Entity.entity(text, "text/vnd.yaml")) assertEquals(response?.readEntity(String::class.java), Status.CREATED.statusCode, response?.status) - assertEquals("Simple agent", importedResource.producingAgent.name) - assertEquals("1.0", importedResource.producingAgent.version) // check offer - assertEquals("test-offer", importedResource.offer.id) - assertEquals(emptyList(), importedResource.offer.products) - assertEquals(emptyList(), importedResource.offer.segments) + assertEquals("test-offer", offer.id) + assertEquals(listOf("1GB_249NOK"), offer.existingProducts) + assertEquals(listOf("test-segment"), offer.existingSegments) // check product - assertEquals(1, importedResource.products.size) - val product = importedResource.products.first() - assertEquals("1GB_249NOK", product.sku) - assertEquals(Price(249, "NOK"), product.price) - assertEquals(mapOf("noOfBytes" to "1_000_000_000"), product.properties) + assertEquals(1, offer.createProducts.size) + val product = offer.createProducts.first() + assertEquals("10GB_449NOK", product.sku) + assertEquals(Price(449, "NOK"), product.price) + assertEquals(mapOf("noOfBytes" to "10_000_000_000"), product.properties) assertEquals( mapOf("isDefault" to "true", "offerLabel" to "Default Offer", - "priceLabel" to "249 NOK"), + "priceLabel" to "449 NOK"), product.presentation) // check segment - assertEquals(1, importedResource.segments.size) - val segment = importedResource.segments.first() - assertEquals("test-segment", segment.id) + assertEquals(1, offer.createSegments.size) + val segment = offer.createSegments.first() + assertEquals("test-new-segment", segment.id) assertEquals(emptyList(), segment.subscribers) } @@ -84,24 +108,22 @@ class ImporterResourceTest { val text: String = fixture("sample-offer-only.yaml") val response = resources - ?.target("/importer") + ?.target("/import/offer") ?.request("text/vnd.yaml") ?.post(Entity.entity(text, "text/vnd.yaml")) assertEquals(response?.readEntity(String::class.java), Status.CREATED.statusCode, response?.status) - assertEquals("Simple agent", importedResource.producingAgent.name) - assertEquals("1.0", importedResource.producingAgent.version) // check offer - assertEquals("test-offer", importedResource.offer.id) - assertEquals(listOf("1GB_249NOK"), importedResource.offer.products) - assertEquals(listOf("test-segment"), importedResource.offer.segments) + assertEquals("test-offer", offer.id) + assertEquals(listOf("1GB_249NOK"), offer.existingProducts) + assertEquals(listOf("test-segment"), offer.existingSegments) // check product - assertEquals(0, importedResource.products.size) + assertEquals(0, offer.createProducts.size) // check segment - assertEquals(0, importedResource.segments.size) + assertEquals(0, offer.createSegments.size) } /** diff --git a/admin-api/src/test/resources/sample-offer-only.yaml b/admin-api/src/test/resources/sample-offer-only.yaml index 5e11702f8..dee163331 100644 --- a/admin-api/src/test/resources/sample-offer-only.yaml +++ b/admin-api/src/test/resources/sample-offer-only.yaml @@ -1,12 +1,6 @@ -producingAgent: - name: Simple agent - version: 1.0 - -offer: +createOffer: id: test-offer - # use existing product - products: + existingProducts: - 1GB_249NOK - # use existing segment - segments: - - test-segment + existingSegments: + - test-segment \ No newline at end of file diff --git a/admin-api/src/test/resources/sample-offer-products-segments.yaml b/admin-api/src/test/resources/sample-offer-products-segments.yaml index b1a6e2809..c0ce6b8f9 100644 --- a/admin-api/src/test/resources/sample-offer-products-segments.yaml +++ b/admin-api/src/test/resources/sample-offer-products-segments.yaml @@ -1,35 +1,29 @@ -producingAgent: - name: Simple agent - version: 1.0 - -offer: +createOffer: id: test-offer - # list of existing products - # listing products to be created in this yaml is OPTIONAL - -# products: -# - 1GB_249NOK - - # list of existing segments - # listing segments to be created in this yaml is OPTIONAL +# list of existing products +# listing products to be created in this yaml is OPTIONAL + existingProducts: + - 1GB_249NOK -# segments: -# - test-segment +# list of existing segments +# listing segments to be created in this yaml is OPTIONAL + existingSegments: + - test-segment # These products will be created and linked to offer - 'test-offer' -products: - - sku: 1GB_249NOK - price: - amount: 249 - currency: NOK - properties: - noOfBytes: 1_000_000_000 - presentation: - isDefault: true - offerLabel: Default Offer - priceLabel: 249 NOK + createProducts: + - sku: 10GB_449NOK + price: + amount: 449 + currency: NOK + properties: + noOfBytes: 10_000_000_000 + presentation: + isDefault: true + offerLabel: Default Offer + priceLabel: 449 NOK # These segments will be created and linked to offer - 'test-offer' -segments: - - id: test-segment + createSegments: + - id: test-new-segment diff --git a/model/src/main/kotlin/org/ostelco/prime/model/Entities.kt b/model/src/main/kotlin/org/ostelco/prime/model/Entities.kt index 60c64ad04..6e68893fa 100644 --- a/model/src/main/kotlin/org/ostelco/prime/model/Entities.kt +++ b/model/src/main/kotlin/org/ostelco/prime/model/Entities.kt @@ -16,6 +16,11 @@ data class Segment( override val id: String, val subscribers: Collection = emptyList()) : HasId +data class ChangeSegment( + val sourceSegmentId: String, + val targetSegmentId: String, + val subscribers: Collection) + data class Subscriber( val email: String, val name: String = "", diff --git a/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt b/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt index 3b2b1101f..fa99b8217 100644 --- a/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt +++ b/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Neo4jStore.kt @@ -7,6 +7,7 @@ import org.neo4j.driver.v1.Transaction import org.ostelco.prime.analytics.AnalyticsService import org.ostelco.prime.getLogger import org.ostelco.prime.model.Bundle +import org.ostelco.prime.model.ChangeSegment import org.ostelco.prime.model.Offer import org.ostelco.prime.model.Product import org.ostelco.prime.model.ProductClass @@ -665,10 +666,15 @@ object Neo4jStoreSingleton : GraphStore { } override fun updateSegment(segment: Segment): Either = writeTransaction { - subscriberToSegmentStore.create(segment.id, segment.subscribers, transaction) + updateSegment(segment, transaction) .ifFailedThenRollback(transaction) } + private fun updateSegment(segment: Segment, transaction: Transaction): Either { + return subscriberToSegmentStore.removeAll(toId = segment.id, transaction = transaction) + .flatMap { subscriberToSegmentStore.create(segment.subscribers, segment.id, transaction) } + } + // // Offer // @@ -685,9 +691,13 @@ object Neo4jStoreSingleton : GraphStore { } // - // Atomic Import of Offer + Product + Segment + // Atomic Imports // - override fun atomicImport( + + /** + * Create of Offer + Product + Segment + */ + override fun atomicCreateOffer( offer: Offer, segments: Collection, products: Collection): Either = writeTransaction { @@ -735,13 +745,45 @@ object Neo4jStoreSingleton : GraphStore { .ifFailedThenRollback(transaction) } -// override fun getOffers(): Collection = offerStore.getAll().values.map { Offer().apply { id = it.id } } + /** + * Create Segments + */ + override fun atomicCreateSegments(createSegments: Collection): Either = writeTransaction { + + createSegments.fold( + initial = Either.right(Unit) as Either, + operation = { acc, segment -> + acc.flatMap { createSegment(segment, transaction) } + }) + .ifFailedThenRollback(transaction) + } + + /** + * Update segments + */ + override fun atomicUpdateSegments(updateSegments: Collection): Either = writeTransaction { + + updateSegments.fold( + initial = Either.right(Unit) as Either, + operation = { acc, segment -> + acc.flatMap { updateSegment(segment, transaction) } + }) + .ifFailedThenRollback(transaction) + } + + override fun atomicAddToSegments(addToSegments: Collection): Either { TODO() } + + override fun atomicRemoveFromSegments(removeFromSegments: Collection): Either { TODO() } + + override fun atomicChangeSegments(changeSegments: Collection): Either { TODO() } + + // override fun getOffers(): Collection = offerStore.getAll().values.map { Offer().apply { id = it.id } } -// override fun getSegments(): Collection = segmentStore.getAll().values.map { Segment().apply { id = it.id } } + // override fun getSegments(): Collection = segmentStore.getAll().values.map { Segment().apply { id = it.id } } -// override fun getOffer(id: String): Offer? = offerStore.get(id)?.let { Offer().apply { this.id = it.id } } + // override fun getOffer(id: String): Offer? = offerStore.get(id)?.let { Offer().apply { this.id = it.id } } -// override fun getSegment(id: String): Segment? = segmentStore.get(id)?.let { Segment().apply { this.id = it.id } } + // override fun getSegment(id: String): Segment? = segmentStore.get(id)?.let { Segment().apply { this.id = it.id } } -// override fun getProductClass(id: String): ProductClass? = productClassStore.get(id) + // override fun getProductClass(id: String): ProductClass? = productClassStore.get(id) } \ No newline at end of file diff --git a/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Schema.kt b/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Schema.kt index 63f0366a4..32ee8c881 100644 --- a/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Schema.kt +++ b/neo4j-store/src/main/kotlin/org/ostelco/prime/storage/graph/Schema.kt @@ -28,6 +28,8 @@ data class EntityType( private val dataClass: Class, val name: String = dataClass.simpleName) { + var entityStore: EntityStore? = null + fun createEntity(map: Map): ENTITY = ObjectHandler.getObject(map, dataClass) } @@ -44,6 +46,10 @@ data class RelationType( class EntityStore(private val entityType: EntityType) { + init { + entityType.entityStore = this + } + fun get(id: String, transaction: Transaction): Either { return read("""MATCH (node:${entityType.name} {id: '$id'}) RETURN node;""", transaction) { if (it.hasNext()) @@ -249,6 +255,7 @@ class RelationStore(private val relationType: Relation CREATE (from)-[:${relationType.relation.name}]->(to); """.trimIndent(), transaction) { + // TODO vihang: validate if 'from' and 'to' node exists val actualCount = it.summary().counters().relationshipsCreated() Either.cond( test = actualCount == toIds.size, @@ -269,6 +276,7 @@ class RelationStore(private val relationType: Relation CREATE (from)-[:${relationType.relation.name}]->(to); """.trimIndent(), transaction) { + // TODO vihang: validate if 'from' and 'to' node exists val actualCount = it.summary().counters().relationshipsCreated() Either.cond( test = actualCount == fromIds.size, @@ -280,6 +288,15 @@ class RelationStore(private val relationType: Relation actualCount = actualCount) }) } + + fun removeAll(toId: String, transaction: Transaction): Either = write(""" + MATCH (from:${relationType.from.name})-[r:${relationType.relation.name}]->(to:${relationType.to.name} { id: '$toId' }) + DELETE r; + """.trimIndent(), + transaction) { + // TODO vihang: validate if 'to' node exists + Either.right(Unit) + } } // diff --git a/neo4j-store/src/test/kotlin/org/ostelco/prime/storage/graph/GraphStoreTest.kt b/neo4j-store/src/test/kotlin/org/ostelco/prime/storage/graph/GraphStoreTest.kt index f0ca74eb5..afb070bac 100644 --- a/neo4j-store/src/test/kotlin/org/ostelco/prime/storage/graph/GraphStoreTest.kt +++ b/neo4j-store/src/test/kotlin/org/ostelco/prime/storage/graph/GraphStoreTest.kt @@ -176,7 +176,7 @@ class GraphStoreTest { val offer = Offer(id = "some_offer", products = listOf("1GB_249NOK", "2GB_299NOK")) - Neo4jStoreSingleton.atomicImport(offer = offer, products = products, segments = segments) + Neo4jStoreSingleton.atomicCreateOffer(offer = offer, products = products, segments = segments) .mapLeft { fail(it.message) } } @@ -199,7 +199,7 @@ class GraphStoreTest { val offer = Offer(id = "some_offer", products = listOf("1GB_249NOK", "2GB_299NOK")) - Neo4jStoreSingleton.atomicImport(offer = offer, products = products, segments = segments) + Neo4jStoreSingleton.atomicCreateOffer(offer = offer, products = products, segments = segments) .mapLeft { fail(it.message) } val duplicateOffer = Offer( @@ -207,7 +207,7 @@ class GraphStoreTest { products = (products.map { it.sku } + offer.products).toSet(), segments = segments.map { it.id }) - Neo4jStoreSingleton.atomicImport(offer = duplicateOffer).bimap( + Neo4jStoreSingleton.atomicCreateOffer(offer = duplicateOffer).bimap( { assertEquals("Offer - some_offer already exists.", it.message) }, { fail("Expected import to fail since offer already exists.") }) } diff --git a/prime-modules/src/main/kotlin/org/ostelco/prime/storage/Variants.kt b/prime-modules/src/main/kotlin/org/ostelco/prime/storage/Variants.kt index aa157e587..c8613aa33 100644 --- a/prime-modules/src/main/kotlin/org/ostelco/prime/storage/Variants.kt +++ b/prime-modules/src/main/kotlin/org/ostelco/prime/storage/Variants.kt @@ -3,6 +3,7 @@ package org.ostelco.prime.storage import arrow.core.Either import org.ostelco.prime.model.ApplicationToken import org.ostelco.prime.model.Bundle +import org.ostelco.prime.model.ChangeSegment import org.ostelco.prime.model.Offer import org.ostelco.prime.model.Product import org.ostelco.prime.model.ProductClass @@ -149,11 +150,18 @@ interface AdminGraphStore { fun getPaidSubscriberCount(): Long // atomic import of Offer + Product + Segment - fun atomicImport( + fun atomicCreateOffer( offer: Offer, segments: Collection = emptyList(), products: Collection = emptyList()) : Either + fun atomicCreateSegments(createSegments: Collection): Either + + fun atomicUpdateSegments(updateSegments: Collection): Either + fun atomicAddToSegments(addToSegments: Collection): Either + fun atomicRemoveFromSegments(removeFromSegments: Collection): Either + fun atomicChangeSegments(changeSegments: Collection): Either + // simple getAll // fun getOffers(): Collection // fun getSegments(): Collection From 8acef7d80eae47db7b5371ea84bf5a5bfc29c7ac Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Mon, 8 Oct 2018 17:36:47 +0200 Subject: [PATCH 73/93] Fix the logs --- .../org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt b/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt index b78b71314..ff986641f 100644 --- a/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt +++ b/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt @@ -166,7 +166,6 @@ abstract class MetricBuilder( val sql: String, val resultColumn: String, val env: EnvironmentVars) { - private val log: Logger = LoggerFactory.getLogger(MetricBuilder::class.java) /** * Function which will add the current value of the metric to registry. @@ -209,8 +208,7 @@ abstract class MetricBuilder( val jobId: JobId = JobId.of(UUID.randomUUID().toString()); var queryJob: BQJob = bigquery.create(JobInfo.newBuilder(queryConfig).setJobId(jobId).build()); - log.info("Waiting for $metricName Query") - // Wait for the query to complete. + // Wait for the query to complete. // Retry maximum 4 times for up to 2 minutes. queryJob = async { queryJob.waitFor( @@ -220,7 +218,6 @@ abstract class MetricBuilder( RetryOption.maxAttempts(5), RetryOption.totalTimeout(Duration.ofMinutes(2))); }.await() - log.info("Finished waiting for $metricName Query") // Check for errors if (queryJob == null) { @@ -259,6 +256,7 @@ class SummaryMetricBuilder( .name(metricName) .help(help).register(registry) val value: Long = getNumberValueViaSql() + log.info("Summarizing metric $metricName to be $value") summary.observe(value * 1.0) } catch (e: NullPointerException) { log.error(e.toString()) @@ -284,6 +282,7 @@ class GaugeMetricBuilder( .name(metricName) .help(help).register(registry) val value: Long = getNumberValueViaSql() + log.info("Gauge metric $metricName = $value") gauge.set(value * 1.0) } catch (e: NullPointerException) { log.error(e.toString()) From 7c9eb47a09a43d769c18176621d96a8e7c4c298e Mon Sep 17 00:00:00 2001 From: Prasanth Ullattil Date: Mon, 8 Oct 2018 17:59:59 +0200 Subject: [PATCH 74/93] Change logs --- .../org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt b/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt index ff986641f..a0e6b0874 100644 --- a/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt +++ b/bq-metrics-extractor/src/main/kotlin/org/ostelco/bqmetrics/BqMetricsExtractorApplication.kt @@ -338,10 +338,9 @@ private class PrometheusPusher(val pushGateway: String, val jobName: String) { } } - log.info("Querying bigquery for metric values") + log.info("Querying BQ for total ${metricSources.size} metric values") val start = System.currentTimeMillis() val pg = PushGateway(pushGateway) - log.info("Starting ${metricSources.size} Queries") coroutineScope { metricSources.forEach { builder -> launch { From 1ff77c69b5ba103e7b5957ab212ae1f0a8d8839a Mon Sep 17 00:00:00 2001 From: Martin Cederlof Date: Tue, 9 Oct 2018 10:37:46 +0200 Subject: [PATCH 75/93] Adding IMEI module to Prime --- .gitignore | 5 +- certs/.gitignore | 2 + imei-lookup/build.gradle | 16 ++++++ .../prime/imei/imeilookup/ImeiLookupModule.kt | 34 ++++++++++++ .../prime/imei/imeilookup/ImeiSqliteDb.kt | 28 ++++++++++ .../io.dropwizard.jackson.Discoverable | 1 + .../org.ostelco.prime.imei.ImeiLookup | 1 + .../org.ostelco.prime.module.PrimeModule | 1 + .../prime/imei/imeilookup/ImeiSqliteDbTest.kt | 55 +++++++++++++++++++ imei-lookup/src/test/resources/config.yaml | 11 ++++ payment-processor/build.gradle | 5 +- .../StripePaymentProcessorTest.kt | 2 - .../org/ostelco/prime/imei/ImeiLookup.kt | 9 +++ .../prime/imei/core/ImeiLookupError.kt | 9 +++ .../org/ostelco/prime/imei/core/Model.kt | 10 ++++ prime/build.gradle | 1 + settings.gradle | 3 + 17 files changed, 186 insertions(+), 7 deletions(-) create mode 100644 certs/.gitignore create mode 100644 imei-lookup/build.gradle create mode 100644 imei-lookup/src/main/kotlin/org/ostelco/prime/imei/imeilookup/ImeiLookupModule.kt create mode 100644 imei-lookup/src/main/kotlin/org/ostelco/prime/imei/imeilookup/ImeiSqliteDb.kt create mode 100644 imei-lookup/src/main/resources/META-INF/services/io.dropwizard.jackson.Discoverable create mode 100644 imei-lookup/src/main/resources/META-INF/services/org.ostelco.prime.imei.ImeiLookup create mode 100644 imei-lookup/src/main/resources/META-INF/services/org.ostelco.prime.module.PrimeModule create mode 100644 imei-lookup/src/test/kotlin/org/ostelco/prime/imei/imeilookup/ImeiSqliteDbTest.kt create mode 100644 imei-lookup/src/test/resources/config.yaml create mode 100644 prime-modules/src/main/kotlin/org/ostelco/prime/imei/ImeiLookup.kt create mode 100644 prime-modules/src/main/kotlin/org/ostelco/prime/imei/core/ImeiLookupError.kt create mode 100644 prime-modules/src/main/kotlin/org/ostelco/prime/imei/core/Model.kt diff --git a/.gitignore b/.gitignore index 1902c8e88..2278a1e2f 100644 --- a/.gitignore +++ b/.gitignore @@ -37,4 +37,7 @@ secrets/* .swagger_gen_dir ocs_descriptor.pb -metrics_descriptor.pb \ No newline at end of file +metrics_descriptor.pb + +.Mac +*.DS_Store diff --git a/certs/.gitignore b/certs/.gitignore new file mode 100644 index 000000000..0d313d1e5 --- /dev/null +++ b/certs/.gitignore @@ -0,0 +1,2 @@ +*.key +*.crt \ No newline at end of file diff --git a/imei-lookup/build.gradle b/imei-lookup/build.gradle new file mode 100644 index 000000000..b578bdc39 --- /dev/null +++ b/imei-lookup/build.gradle @@ -0,0 +1,16 @@ +plugins { + id "org.jetbrains.kotlin.jvm" version "1.2.71" + id "java-library" + id "idea" +} + +dependencies { + implementation project(":prime-modules") + implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlinVersion" + implementation "io.dropwizard:dropwizard-jdbi3:$dropwizardVersion" + + testImplementation "io.dropwizard:dropwizard-testing:$dropwizardVersion" + + testImplementation "org.jetbrains.kotlin:kotlin-test:$kotlinVersion" + testImplementation "org.jetbrains.kotlin:kotlin-test-junit:$kotlinVersion" +} \ No newline at end of file diff --git a/imei-lookup/src/main/kotlin/org/ostelco/prime/imei/imeilookup/ImeiLookupModule.kt b/imei-lookup/src/main/kotlin/org/ostelco/prime/imei/imeilookup/ImeiLookupModule.kt new file mode 100644 index 000000000..14657a11a --- /dev/null +++ b/imei-lookup/src/main/kotlin/org/ostelco/prime/imei/imeilookup/ImeiLookupModule.kt @@ -0,0 +1,34 @@ +package org.ostelco.prime.imei.ImeiDb + +import com.fasterxml.jackson.annotation.JsonProperty +import com.fasterxml.jackson.annotation.JsonTypeName +import io.dropwizard.setup.Environment +import org.ostelco.prime.getLogger +import org.ostelco.prime.module.PrimeModule + + +@JsonTypeName("Imei-lookup") +class ImeiLookupModule : PrimeModule { + + private val logger by getLogger() + + @JsonProperty + var config: Config? = null + + override fun init(env: Environment) { + + logger.info("ImeiLookupModule env: $env") + logger.info("CSV file set to ${config?.imeiLookupConfig?.csvFile}") + } +} + + +class Config { + @JsonProperty("sqlite") + lateinit var imeiLookupConfig: ImeiLookupConfig +} + +class ImeiLookupConfig { + @JsonProperty + var csvFile: String = "default.txt" +} \ No newline at end of file diff --git a/imei-lookup/src/main/kotlin/org/ostelco/prime/imei/imeilookup/ImeiSqliteDb.kt b/imei-lookup/src/main/kotlin/org/ostelco/prime/imei/imeilookup/ImeiSqliteDb.kt new file mode 100644 index 000000000..618e7f7a4 --- /dev/null +++ b/imei-lookup/src/main/kotlin/org/ostelco/prime/imei/imeilookup/ImeiSqliteDb.kt @@ -0,0 +1,28 @@ +package org.ostelco.prime.imei.imeilookup + +import arrow.core.Either +import org.ostelco.prime.getLogger +import org.ostelco.prime.imei.ImeiLookup +import org.ostelco.prime.imei.core.Imei +import org.ostelco.prime.imei.core.ImeiLookupError +import org.ostelco.prime.imei.core.ImeaiNotFoundError + + +/** + * SQLite implementation of the IMEI lookup service + */ +class ImeiSqliteDb : ImeiLookup by jdbcSingleton { + + object jdbcSingleton : ImeiLookup { + + private val logger by getLogger() + + init { + logger.info("Singleton created") + } + + override fun getImeiInformation(imeisv: String): Either { + return Either.left(ImeaiNotFoundError("Not implemented jet")) + } + } +} diff --git a/imei-lookup/src/main/resources/META-INF/services/io.dropwizard.jackson.Discoverable b/imei-lookup/src/main/resources/META-INF/services/io.dropwizard.jackson.Discoverable new file mode 100644 index 000000000..8056fe23b --- /dev/null +++ b/imei-lookup/src/main/resources/META-INF/services/io.dropwizard.jackson.Discoverable @@ -0,0 +1 @@ +org.ostelco.prime.module.PrimeModule \ No newline at end of file diff --git a/imei-lookup/src/main/resources/META-INF/services/org.ostelco.prime.imei.ImeiLookup b/imei-lookup/src/main/resources/META-INF/services/org.ostelco.prime.imei.ImeiLookup new file mode 100644 index 000000000..ed9ad33f3 --- /dev/null +++ b/imei-lookup/src/main/resources/META-INF/services/org.ostelco.prime.imei.ImeiLookup @@ -0,0 +1 @@ +org.ostelco.prime.imei.imeilookup.ImeiSqliteDb \ No newline at end of file diff --git a/imei-lookup/src/main/resources/META-INF/services/org.ostelco.prime.module.PrimeModule b/imei-lookup/src/main/resources/META-INF/services/org.ostelco.prime.module.PrimeModule new file mode 100644 index 000000000..7ae579be0 --- /dev/null +++ b/imei-lookup/src/main/resources/META-INF/services/org.ostelco.prime.module.PrimeModule @@ -0,0 +1 @@ +org.ostelco.prime.imei.ImeiDb.ImeiLookupModule \ No newline at end of file diff --git a/imei-lookup/src/test/kotlin/org/ostelco/prime/imei/imeilookup/ImeiSqliteDbTest.kt b/imei-lookup/src/test/kotlin/org/ostelco/prime/imei/imeilookup/ImeiSqliteDbTest.kt new file mode 100644 index 000000000..fb3571aa1 --- /dev/null +++ b/imei-lookup/src/test/kotlin/org/ostelco/prime/imei/imeilookup/ImeiSqliteDbTest.kt @@ -0,0 +1,55 @@ +package org.ostelco.prime.imei.imeilookup + +import com.fasterxml.jackson.annotation.JsonProperty +import io.dropwizard.Application +import io.dropwizard.Configuration +import io.dropwizard.configuration.EnvironmentVariableSubstitutor +import io.dropwizard.configuration.SubstitutingSourceProvider +import io.dropwizard.setup.Bootstrap +import io.dropwizard.setup.Environment +import org.junit.Before +import org.junit.Test +import org.ostelco.prime.getLogger +import org.ostelco.prime.imei.ImeiLookup +import org.ostelco.prime.module.PrimeModule +import org.ostelco.prime.module.getResource +import kotlin.test.assertEquals + + +class TestApp : Application() { + + override fun initialize(bootstrap: Bootstrap) { + bootstrap.configurationSourceProvider = SubstitutingSourceProvider( + bootstrap.configurationSourceProvider, + EnvironmentVariableSubstitutor(false)) + } + + override fun run(configuration: TestConfig, environment: Environment) { + configuration.modules.forEach { it.init(environment) } + } +} + +class TestConfig: Configuration() { + + @JsonProperty + lateinit var modules: List +} + + +class ImeiSqliteDbTest { + + private val imeiLookup by lazy { getResource() } + + companion object { + init { + TestApp().run("server", "src/test/resources/config.yaml") + } + } + + @Test + fun getImeiResult() { + val result = imeiLookup.getImeiInformation("3550900831237501") + assertEquals(true, result.isRight()) + } + +} diff --git a/imei-lookup/src/test/resources/config.yaml b/imei-lookup/src/test/resources/config.yaml new file mode 100644 index 000000000..309e733db --- /dev/null +++ b/imei-lookup/src/test/resources/config.yaml @@ -0,0 +1,11 @@ +modules: +- type: Imei-lookup + config: + sqlite: + csvFile: test.txt +logging: + level: INFO + loggers: + org.ostelco: DEBUG + appenders: + - type: console \ No newline at end of file diff --git a/payment-processor/build.gradle b/payment-processor/build.gradle index 9c2069846..ff3d9c8b6 100644 --- a/payment-processor/build.gradle +++ b/payment-processor/build.gradle @@ -5,11 +5,8 @@ plugins { } sourceSets { - test { - java.srcDirs = ['src/test/kotlin'] - } integration { - java.srcDirs = ['src/test/kotlin', 'src/integration-tests/kotlin'] + java.srcDirs = ['src/integration-tests/kotlin'] resources.srcDir 'src/integration-tests/resources' compileClasspath += main.output + test.output runtimeClasspath += main.output + test.output diff --git a/payment-processor/src/integration-tests/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessorTest.kt b/payment-processor/src/integration-tests/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessorTest.kt index 4f1987749..d27422cbc 100644 --- a/payment-processor/src/integration-tests/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessorTest.kt +++ b/payment-processor/src/integration-tests/kotlin/org/ostelco/prime/paymentprocessor/StripePaymentProcessorTest.kt @@ -1,8 +1,6 @@ package org.ostelco.prime.paymentprocessor import arrow.core.getOrElse -import arrow.core.right -import arrow.core.some import com.stripe.Stripe import com.stripe.model.Source import com.stripe.model.Token diff --git a/prime-modules/src/main/kotlin/org/ostelco/prime/imei/ImeiLookup.kt b/prime-modules/src/main/kotlin/org/ostelco/prime/imei/ImeiLookup.kt new file mode 100644 index 000000000..4414e7afa --- /dev/null +++ b/prime-modules/src/main/kotlin/org/ostelco/prime/imei/ImeiLookup.kt @@ -0,0 +1,9 @@ +package org.ostelco.prime.imei + +import arrow.core.Either +import org.ostelco.prime.imei.core.Imei +import org.ostelco.prime.imei.core.ImeiLookupError + +interface ImeiLookup { + fun getImeiInformation(imeisv: String) : Either +} diff --git a/prime-modules/src/main/kotlin/org/ostelco/prime/imei/core/ImeiLookupError.kt b/prime-modules/src/main/kotlin/org/ostelco/prime/imei/core/ImeiLookupError.kt new file mode 100644 index 000000000..6251ed020 --- /dev/null +++ b/prime-modules/src/main/kotlin/org/ostelco/prime/imei/core/ImeiLookupError.kt @@ -0,0 +1,9 @@ +package org.ostelco.prime.imei.core + +import org.ostelco.prime.apierror.InternalError + +sealed class ImeiLookupError(val description: String, var externalErrorMessage : String? = null) : InternalError() + +class ImeaiNotFoundError(description: String, externalErrorMessage: String? = null) : ImeiLookupError(description, externalErrorMessage ) + +class BadGatewayError(description: String, externalErrorMessage: String? = null) : ImeiLookupError(description, externalErrorMessage) \ No newline at end of file diff --git a/prime-modules/src/main/kotlin/org/ostelco/prime/imei/core/Model.kt b/prime-modules/src/main/kotlin/org/ostelco/prime/imei/core/Model.kt new file mode 100644 index 000000000..a069f001d --- /dev/null +++ b/prime-modules/src/main/kotlin/org/ostelco/prime/imei/core/Model.kt @@ -0,0 +1,10 @@ +package org.ostelco.prime.imei.core + +data class Imei(val tac: String, + val marketingName: String, + val manufacturer: String, + val brandName: String, + val modelName: String, + val operatingSystem: String, + val deviceType: String, + val oem: String) \ No newline at end of file diff --git a/prime/build.gradle b/prime/build.gradle index 1a602b479..933f6faec 100644 --- a/prime/build.gradle +++ b/prime/build.gradle @@ -41,6 +41,7 @@ dependencies { runtimeOnly project(':payment-processor') runtimeOnly project(':analytics-module') runtimeOnly project(':slack') + runtimeOnly project(':imei-lookup') implementation "io.dropwizard:dropwizard-http2:$dropwizardVersion" runtimeOnly "io.dropwizard:dropwizard-json-logging:$dropwizardVersion" diff --git a/settings.gradle b/settings.gradle index fbd8d4601..2a16637bd 100644 --- a/settings.gradle +++ b/settings.gradle @@ -27,6 +27,8 @@ include ':prime-modules' include ':prime-client-api' include ':pseudonym-server' include ':slack' +include ':imei-lookup' + project(':acceptance-tests').projectDir = "$rootDir/acceptance-tests" as File @@ -56,3 +58,4 @@ project(':prime-modules').projectDir = "$rootDir/prime-modules" as File project(':prime-client-api').projectDir = "$rootDir/prime-client-api" as File project(':pseudonym-server').projectDir = "$rootDir/pseudonym-server" as File project(':slack').projectDir = "$rootDir/slack" as File +project(':imei-lookup').projectDir = "$rootDir/imei-lookup" as File From 864beef9afe8935a007e5ddcbecb2347c5c8f45a Mon Sep 17 00:00:00 2001 From: mpeterss Date: Tue, 9 Oct 2018 14:03:37 +0200 Subject: [PATCH 76/93] Reading TAC DB and query DB --- imei-lookup/config/testDB.txt | 3 + .../ostelco/prime/imei/imeilookup/ImeiDb.kt | 94 +++++++++++++++++++ .../prime/imei/imeilookup/ImeiLookupModule.kt | 14 +-- .../prime/imei/imeilookup/ImeiSqliteDb.kt | 28 ------ .../org.ostelco.prime.imei.ImeiLookup | 2 +- ...iSqliteDbTest.kt => ImeiInmemoryDbTest.kt} | 7 +- imei-lookup/src/test/resources/config.yaml | 3 +- .../prime/imei/core/ImeiLookupError.kt | 4 +- 8 files changed, 109 insertions(+), 46 deletions(-) create mode 100644 imei-lookup/config/testDB.txt create mode 100644 imei-lookup/src/main/kotlin/org/ostelco/prime/imei/imeilookup/ImeiDb.kt delete mode 100644 imei-lookup/src/main/kotlin/org/ostelco/prime/imei/imeilookup/ImeiSqliteDb.kt rename imei-lookup/src/test/kotlin/org/ostelco/prime/imei/imeilookup/{ImeiSqliteDbTest.kt => ImeiInmemoryDbTest.kt} (90%) diff --git a/imei-lookup/config/testDB.txt b/imei-lookup/config/testDB.txt new file mode 100644 index 000000000..648cf2590 --- /dev/null +++ b/imei-lookup/config/testDB.txt @@ -0,0 +1,3 @@ +TAC|Marketing Name|Manufacturer|Bands|Allocation Date|Country Code|Fixed Code|Manufacturer Code|Radio Interface|Brand Name|Model Name|Operating System|NFC|Bluetooth|WLAN|Device Type|OEM|Removable UICC|Removable EUICC|NonRemovable UICC|NonRemovable EUICC|LPWAN +00100732|This is a Test IMEI to be used with multiple prototype models. The frequency bands for each model may not match what is listed in this record|Test|GSM 1900|07-Jul-1998|208|MANU|205079|NONE|Not Known|This is a Test IMEI to be used with multiple prototype models. The frequency bands for each model may not match what is listed in this record|Not Known|Not Known|Not Known|Not Known|Handheld|Not Known|Not Known|Not Known|Not Known|Not Known|Not Known +00100931|This is a Test IMEI to be used with multiple prototype models. The frequency bands for each model may not match what is listed in this record|Test 2|GSM 1900|07-Jul-1998|310|MANU|205092|NONE|Not Known|This is a Test IMEI to be used with multiple prototype models. The frequency bands for each model may not match what is listed in this record|Not Known|Not Known|Not Known|Not Known|Handheld|Not Known|Not Known|Not Known|Not Known|Not Known|Not Known diff --git a/imei-lookup/src/main/kotlin/org/ostelco/prime/imei/imeilookup/ImeiDb.kt b/imei-lookup/src/main/kotlin/org/ostelco/prime/imei/imeilookup/ImeiDb.kt new file mode 100644 index 000000000..2fb36a76e --- /dev/null +++ b/imei-lookup/src/main/kotlin/org/ostelco/prime/imei/imeilookup/ImeiDb.kt @@ -0,0 +1,94 @@ +package org.ostelco.prime.imei.imeilookup + +import arrow.core.Either +import org.ostelco.prime.getLogger +import org.ostelco.prime.imei.ImeiLookup +import org.ostelco.prime.imei.core.BadRequestError +import org.ostelco.prime.imei.core.Imei +import org.ostelco.prime.imei.core.ImeiLookupError +import org.ostelco.prime.imei.core.ImeiNotFoundError +import java.io.BufferedReader +import java.io.FileReader +import java.io.IOException + + +/** + * In memory implementation of the IMEI lookup service + */ +class ImeiDb : ImeiLookup by ImeiDdSingleton { + + object ImeiDdSingleton : ImeiLookup { + + private val TAC_IDX = 0 + private val MARKETING_NAME_IDX = 1 + private val MANUFACTURER_IDX = 2 + private val BRAND_NAME_IDX = 9 + private val MODEL_NAME_IDX = 10 + private val OPERATING_SYSTEM_IDX = 11 + private val DEVICE_TYPE_IDX = 15 + private val OEM_IDX = 16 + + private val logger by getLogger() + + val db = HashMap() + + init { + logger.info("Singleton created") + } + + override fun getImeiInformation(imei: String): Either { + + if ( !(15 <= imei.length) && (imei.length <= 16) ) { + return Either.left(BadRequestError("Malformed IMEI. Size should be 15 digit for IMEI or 16 digit for IMEISV")) + } + + val tac = imei.substring(0,8) + + val imeiInformation = db.get(tac) + if (imeiInformation != null) { + return Either.right(imeiInformation) + } + return Either.left(ImeiNotFoundError("Not implemented jet")) + } + + fun loadFile(fileName: String): Either { + logger.info("Loading file $fileName") + + var fileReader: BufferedReader? = null + + try { + fileReader = BufferedReader(FileReader(fileName)) + + // Read CSV header + fileReader.readLine() + + var line = fileReader.readLine() + while (line != null) { + val tokens = line.split("|") + if (tokens.size > 0) { + val imei = Imei( + tokens[TAC_IDX], + tokens[MARKETING_NAME_IDX], + tokens[MANUFACTURER_IDX], + tokens[BRAND_NAME_IDX], + tokens[MODEL_NAME_IDX], + tokens[OPERATING_SYSTEM_IDX], + tokens[DEVICE_TYPE_IDX], + tokens[OEM_IDX]) + db.put(imei.tac, imei) + } + line = fileReader.readLine() + } + } catch (e: Exception) { + logger.error("Reading CSV Error!", e) + } finally { + try { + fileReader!!.close() + } catch (e: IOException) { + logger.error("Closing fileReader Error!", e) + } + } + return Either.right(true) + } + } +} diff --git a/imei-lookup/src/main/kotlin/org/ostelco/prime/imei/imeilookup/ImeiLookupModule.kt b/imei-lookup/src/main/kotlin/org/ostelco/prime/imei/imeilookup/ImeiLookupModule.kt index 14657a11a..6810f08d6 100644 --- a/imei-lookup/src/main/kotlin/org/ostelco/prime/imei/imeilookup/ImeiLookupModule.kt +++ b/imei-lookup/src/main/kotlin/org/ostelco/prime/imei/imeilookup/ImeiLookupModule.kt @@ -5,6 +5,7 @@ import com.fasterxml.jackson.annotation.JsonTypeName import io.dropwizard.setup.Environment import org.ostelco.prime.getLogger import org.ostelco.prime.module.PrimeModule +import org.ostelco.prime.imei.imeilookup.ImeiDb @JsonTypeName("Imei-lookup") @@ -16,19 +17,14 @@ class ImeiLookupModule : PrimeModule { var config: Config? = null override fun init(env: Environment) { - - logger.info("ImeiLookupModule env: $env") - logger.info("CSV file set to ${config?.imeiLookupConfig?.csvFile}") + + val fileName = config?.csvFile ?: "" + logger.info("CSV file set to $fileName") + ImeiDb.ImeiDdSingleton.loadFile(fileName); } } - class Config { - @JsonProperty("sqlite") - lateinit var imeiLookupConfig: ImeiLookupConfig -} - -class ImeiLookupConfig { @JsonProperty var csvFile: String = "default.txt" } \ No newline at end of file diff --git a/imei-lookup/src/main/kotlin/org/ostelco/prime/imei/imeilookup/ImeiSqliteDb.kt b/imei-lookup/src/main/kotlin/org/ostelco/prime/imei/imeilookup/ImeiSqliteDb.kt deleted file mode 100644 index 618e7f7a4..000000000 --- a/imei-lookup/src/main/kotlin/org/ostelco/prime/imei/imeilookup/ImeiSqliteDb.kt +++ /dev/null @@ -1,28 +0,0 @@ -package org.ostelco.prime.imei.imeilookup - -import arrow.core.Either -import org.ostelco.prime.getLogger -import org.ostelco.prime.imei.ImeiLookup -import org.ostelco.prime.imei.core.Imei -import org.ostelco.prime.imei.core.ImeiLookupError -import org.ostelco.prime.imei.core.ImeaiNotFoundError - - -/** - * SQLite implementation of the IMEI lookup service - */ -class ImeiSqliteDb : ImeiLookup by jdbcSingleton { - - object jdbcSingleton : ImeiLookup { - - private val logger by getLogger() - - init { - logger.info("Singleton created") - } - - override fun getImeiInformation(imeisv: String): Either { - return Either.left(ImeaiNotFoundError("Not implemented jet")) - } - } -} diff --git a/imei-lookup/src/main/resources/META-INF/services/org.ostelco.prime.imei.ImeiLookup b/imei-lookup/src/main/resources/META-INF/services/org.ostelco.prime.imei.ImeiLookup index ed9ad33f3..790020cc7 100644 --- a/imei-lookup/src/main/resources/META-INF/services/org.ostelco.prime.imei.ImeiLookup +++ b/imei-lookup/src/main/resources/META-INF/services/org.ostelco.prime.imei.ImeiLookup @@ -1 +1 @@ -org.ostelco.prime.imei.imeilookup.ImeiSqliteDb \ No newline at end of file +org.ostelco.prime.imei.imeilookup.ImeiDb \ No newline at end of file diff --git a/imei-lookup/src/test/kotlin/org/ostelco/prime/imei/imeilookup/ImeiSqliteDbTest.kt b/imei-lookup/src/test/kotlin/org/ostelco/prime/imei/imeilookup/ImeiInmemoryDbTest.kt similarity index 90% rename from imei-lookup/src/test/kotlin/org/ostelco/prime/imei/imeilookup/ImeiSqliteDbTest.kt rename to imei-lookup/src/test/kotlin/org/ostelco/prime/imei/imeilookup/ImeiInmemoryDbTest.kt index fb3571aa1..48d84821f 100644 --- a/imei-lookup/src/test/kotlin/org/ostelco/prime/imei/imeilookup/ImeiSqliteDbTest.kt +++ b/imei-lookup/src/test/kotlin/org/ostelco/prime/imei/imeilookup/ImeiInmemoryDbTest.kt @@ -7,9 +7,7 @@ import io.dropwizard.configuration.EnvironmentVariableSubstitutor import io.dropwizard.configuration.SubstitutingSourceProvider import io.dropwizard.setup.Bootstrap import io.dropwizard.setup.Environment -import org.junit.Before import org.junit.Test -import org.ostelco.prime.getLogger import org.ostelco.prime.imei.ImeiLookup import org.ostelco.prime.module.PrimeModule import org.ostelco.prime.module.getResource @@ -36,7 +34,7 @@ class TestConfig: Configuration() { } -class ImeiSqliteDbTest { +class ImeiInmemoryDbTest { private val imeiLookup by lazy { getResource() } @@ -48,8 +46,7 @@ class ImeiSqliteDbTest { @Test fun getImeiResult() { - val result = imeiLookup.getImeiInformation("3550900831237501") + val result = imeiLookup.getImeiInformation("0010073231237501") assertEquals(true, result.isRight()) } - } diff --git a/imei-lookup/src/test/resources/config.yaml b/imei-lookup/src/test/resources/config.yaml index 309e733db..8e2468d9d 100644 --- a/imei-lookup/src/test/resources/config.yaml +++ b/imei-lookup/src/test/resources/config.yaml @@ -1,8 +1,7 @@ modules: - type: Imei-lookup config: - sqlite: - csvFile: test.txt + csvFile: config/testDB.txt logging: level: INFO loggers: diff --git a/prime-modules/src/main/kotlin/org/ostelco/prime/imei/core/ImeiLookupError.kt b/prime-modules/src/main/kotlin/org/ostelco/prime/imei/core/ImeiLookupError.kt index 6251ed020..1b9303898 100644 --- a/prime-modules/src/main/kotlin/org/ostelco/prime/imei/core/ImeiLookupError.kt +++ b/prime-modules/src/main/kotlin/org/ostelco/prime/imei/core/ImeiLookupError.kt @@ -4,6 +4,8 @@ import org.ostelco.prime.apierror.InternalError sealed class ImeiLookupError(val description: String, var externalErrorMessage : String? = null) : InternalError() -class ImeaiNotFoundError(description: String, externalErrorMessage: String? = null) : ImeiLookupError(description, externalErrorMessage ) +class ImeiNotFoundError(description: String, externalErrorMessage: String? = null) : ImeiLookupError(description, externalErrorMessage ) + +class BadRequestError(description: String, externalErrorMessage: String? = null) : ImeiLookupError(description, externalErrorMessage ) class BadGatewayError(description: String, externalErrorMessage: String? = null) : ImeiLookupError(description, externalErrorMessage) \ No newline at end of file From f8d4eac1691dca8b105c1cb7f78680dfa55b63b8 Mon Sep 17 00:00:00 2001 From: mpeterss Date: Tue, 9 Oct 2018 14:10:46 +0200 Subject: [PATCH 77/93] Fix package name --- .../org/ostelco/prime/imei/imeilookup/ImeiLookupModule.kt | 5 ++--- .../META-INF/services/org.ostelco.prime.module.PrimeModule | 2 +- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/imei-lookup/src/main/kotlin/org/ostelco/prime/imei/imeilookup/ImeiLookupModule.kt b/imei-lookup/src/main/kotlin/org/ostelco/prime/imei/imeilookup/ImeiLookupModule.kt index 6810f08d6..f7ef3544d 100644 --- a/imei-lookup/src/main/kotlin/org/ostelco/prime/imei/imeilookup/ImeiLookupModule.kt +++ b/imei-lookup/src/main/kotlin/org/ostelco/prime/imei/imeilookup/ImeiLookupModule.kt @@ -1,11 +1,10 @@ -package org.ostelco.prime.imei.ImeiDb +package org.ostelco.prime.imei.imeilookup import com.fasterxml.jackson.annotation.JsonProperty import com.fasterxml.jackson.annotation.JsonTypeName import io.dropwizard.setup.Environment import org.ostelco.prime.getLogger import org.ostelco.prime.module.PrimeModule -import org.ostelco.prime.imei.imeilookup.ImeiDb @JsonTypeName("Imei-lookup") @@ -17,7 +16,7 @@ class ImeiLookupModule : PrimeModule { var config: Config? = null override fun init(env: Environment) { - + val fileName = config?.csvFile ?: "" logger.info("CSV file set to $fileName") ImeiDb.ImeiDdSingleton.loadFile(fileName); diff --git a/imei-lookup/src/main/resources/META-INF/services/org.ostelco.prime.module.PrimeModule b/imei-lookup/src/main/resources/META-INF/services/org.ostelco.prime.module.PrimeModule index 7ae579be0..c158dfa3b 100644 --- a/imei-lookup/src/main/resources/META-INF/services/org.ostelco.prime.module.PrimeModule +++ b/imei-lookup/src/main/resources/META-INF/services/org.ostelco.prime.module.PrimeModule @@ -1 +1 @@ -org.ostelco.prime.imei.ImeiDb.ImeiLookupModule \ No newline at end of file +org.ostelco.prime.imei.imeilookup.ImeiLookupModule \ No newline at end of file From 3ec2bdfdff242bbc2803a732fa8d17f648111e9a Mon Sep 17 00:00:00 2001 From: mpeterss Date: Tue, 9 Oct 2018 14:16:57 +0200 Subject: [PATCH 78/93] Removed unused library --- imei-lookup/build.gradle | 1 - 1 file changed, 1 deletion(-) diff --git a/imei-lookup/build.gradle b/imei-lookup/build.gradle index b578bdc39..7fd551beb 100644 --- a/imei-lookup/build.gradle +++ b/imei-lookup/build.gradle @@ -7,7 +7,6 @@ plugins { dependencies { implementation project(":prime-modules") implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlinVersion" - implementation "io.dropwizard:dropwizard-jdbi3:$dropwizardVersion" testImplementation "io.dropwizard:dropwizard-testing:$dropwizardVersion" From 9d4679c985413e47af5071a603101a8163d2a3e6 Mon Sep 17 00:00:00 2001 From: mpeterss Date: Tue, 9 Oct 2018 15:01:07 +0200 Subject: [PATCH 79/93] Move csv file --- .../ostelco/prime/imei/imeilookup/ImeiDb.kt | 119 +++++++++--------- .../prime/imei/imeilookup/ImeiLookupModule.kt | 12 +- imei-lookup/src/test/resources/config.yaml | 2 +- .../test/resources/testDB.csv} | 0 prime/Dockerfile.test | 3 +- prime/config/config.yaml | 3 + prime/config/test.yaml | 3 + prime/config/testDB.csv | 3 + prime/infra/dev/prime.yaml | 6 + 9 files changed, 78 insertions(+), 73 deletions(-) rename imei-lookup/{config/testDB.txt => src/test/resources/testDB.csv} (100%) create mode 100644 prime/config/testDB.csv diff --git a/imei-lookup/src/main/kotlin/org/ostelco/prime/imei/imeilookup/ImeiDb.kt b/imei-lookup/src/main/kotlin/org/ostelco/prime/imei/imeilookup/ImeiDb.kt index 2fb36a76e..4d7c95adf 100644 --- a/imei-lookup/src/main/kotlin/org/ostelco/prime/imei/imeilookup/ImeiDb.kt +++ b/imei-lookup/src/main/kotlin/org/ostelco/prime/imei/imeilookup/ImeiDb.kt @@ -15,80 +15,75 @@ import java.io.IOException /** * In memory implementation of the IMEI lookup service */ -class ImeiDb : ImeiLookup by ImeiDdSingleton { +class ImeiDb : ImeiLookup by ImeiDdSingleton - object ImeiDdSingleton : ImeiLookup { +object ImeiDdSingleton : ImeiLookup { - private val TAC_IDX = 0 - private val MARKETING_NAME_IDX = 1 - private val MANUFACTURER_IDX = 2 - private val BRAND_NAME_IDX = 9 - private val MODEL_NAME_IDX = 10 - private val OPERATING_SYSTEM_IDX = 11 - private val DEVICE_TYPE_IDX = 15 - private val OEM_IDX = 16 + private val TAC_IDX = 0 + private val MARKETING_NAME_IDX = 1 + private val MANUFACTURER_IDX = 2 + private val BRAND_NAME_IDX = 9 + private val MODEL_NAME_IDX = 10 + private val OPERATING_SYSTEM_IDX = 11 + private val DEVICE_TYPE_IDX = 15 + private val OEM_IDX = 16 - private val logger by getLogger() + private val logger by getLogger() - val db = HashMap() + val db = HashMap() - init { - logger.info("Singleton created") - } - - override fun getImeiInformation(imei: String): Either { - - if ( !(15 <= imei.length) && (imei.length <= 16) ) { - return Either.left(BadRequestError("Malformed IMEI. Size should be 15 digit for IMEI or 16 digit for IMEISV")) - } - - val tac = imei.substring(0,8) + override fun getImeiInformation(imei: String): Either { - val imeiInformation = db.get(tac) - if (imeiInformation != null) { - return Either.right(imeiInformation) - } - return Either.left(ImeiNotFoundError("Not implemented jet")) + if (!(15 <= imei.length) && (imei.length <= 16)) { + return Either.left(BadRequestError("Malformed IMEI. Size should be 15 digit for IMEI or 16 digit for IMEISV")) } - fun loadFile(fileName: String): Either { - logger.info("Loading file $fileName") + val tac = imei.substring(0, 8) - var fileReader: BufferedReader? = null + val imeiInformation = db.get(tac) + if (imeiInformation != null) { + return Either.right(imeiInformation) + } + return Either.left(ImeiNotFoundError("Not implemented jet")) + } - try { - fileReader = BufferedReader(FileReader(fileName)) - - // Read CSV header - fileReader.readLine() - - var line = fileReader.readLine() - while (line != null) { - val tokens = line.split("|") - if (tokens.size > 0) { - val imei = Imei( - tokens[TAC_IDX], - tokens[MARKETING_NAME_IDX], - tokens[MANUFACTURER_IDX], - tokens[BRAND_NAME_IDX], - tokens[MODEL_NAME_IDX], - tokens[OPERATING_SYSTEM_IDX], - tokens[DEVICE_TYPE_IDX], - tokens[OEM_IDX]) - db.put(imei.tac, imei) - } - line = fileReader.readLine() - } - } catch (e: Exception) { - logger.error("Reading CSV Error!", e) - } finally { - try { - fileReader!!.close() - } catch (e: IOException) { - logger.error("Closing fileReader Error!", e) + fun loadFile(fileName: String): Either { + logger.info("Loading file $fileName") + + var fileReader: BufferedReader? = null + + try { + fileReader = BufferedReader(FileReader(fileName)) + + // Read CSV header + fileReader.readLine() + + var line = fileReader.readLine() + while (line != null) { + val tokens = line.split("|") + if (tokens.size > 0) { + val imei = Imei( + tokens[TAC_IDX], + tokens[MARKETING_NAME_IDX], + tokens[MANUFACTURER_IDX], + tokens[BRAND_NAME_IDX], + tokens[MODEL_NAME_IDX], + tokens[OPERATING_SYSTEM_IDX], + tokens[DEVICE_TYPE_IDX], + tokens[OEM_IDX]) + db.put(imei.tac, imei) } + line = fileReader.readLine() + } + } catch (e: Exception) { + logger.error("Reading CSV Error!", e) + } finally { + try { + fileReader!!.close() + } catch (e: IOException) { + logger.error("Closing fileReader Error!", e) } - return Either.right(true) } + return Either.right(true) } } diff --git a/imei-lookup/src/main/kotlin/org/ostelco/prime/imei/imeilookup/ImeiLookupModule.kt b/imei-lookup/src/main/kotlin/org/ostelco/prime/imei/imeilookup/ImeiLookupModule.kt index f7ef3544d..3e1ce82bd 100644 --- a/imei-lookup/src/main/kotlin/org/ostelco/prime/imei/imeilookup/ImeiLookupModule.kt +++ b/imei-lookup/src/main/kotlin/org/ostelco/prime/imei/imeilookup/ImeiLookupModule.kt @@ -3,27 +3,21 @@ package org.ostelco.prime.imei.imeilookup import com.fasterxml.jackson.annotation.JsonProperty import com.fasterxml.jackson.annotation.JsonTypeName import io.dropwizard.setup.Environment -import org.ostelco.prime.getLogger import org.ostelco.prime.module.PrimeModule @JsonTypeName("Imei-lookup") class ImeiLookupModule : PrimeModule { - private val logger by getLogger() - @JsonProperty - var config: Config? = null + lateinit var config: Config override fun init(env: Environment) { - - val fileName = config?.csvFile ?: "" - logger.info("CSV file set to $fileName") - ImeiDb.ImeiDdSingleton.loadFile(fileName); + ImeiDdSingleton.loadFile(config.csvFile); } } class Config { @JsonProperty - var csvFile: String = "default.txt" + lateinit var csvFile: String } \ No newline at end of file diff --git a/imei-lookup/src/test/resources/config.yaml b/imei-lookup/src/test/resources/config.yaml index 8e2468d9d..137e87cc5 100644 --- a/imei-lookup/src/test/resources/config.yaml +++ b/imei-lookup/src/test/resources/config.yaml @@ -1,7 +1,7 @@ modules: - type: Imei-lookup config: - csvFile: config/testDB.txt + csvFile: src/test/resources/testDB.csv logging: level: INFO loggers: diff --git a/imei-lookup/config/testDB.txt b/imei-lookup/src/test/resources/testDB.csv similarity index 100% rename from imei-lookup/config/testDB.txt rename to imei-lookup/src/test/resources/testDB.csv diff --git a/prime/Dockerfile.test b/prime/Dockerfile.test index 7e088682f..33c98ff2d 100644 --- a/prime/Dockerfile.test +++ b/prime/Dockerfile.test @@ -15,8 +15,9 @@ COPY script/start.sh /start.sh COPY script/wait.sh /wait.sh # test.yaml is copied as config.yaml for AT. -COPY config/test.yaml /config/config.yaml COPY config/pantel-prod.json /secret/pantel-prod.json +COPY config/testDb.csv /imei/imeiDb.csv +COPY config/test.yaml /config/config.yaml COPY build/libs/prime-uber.jar /prime.jar diff --git a/prime/config/config.yaml b/prime/config/config.yaml index d26cd9028..b60c0077c 100644 --- a/prime/config/config.yaml +++ b/prime/config/config.yaml @@ -7,6 +7,9 @@ modules: httpClient: timeout: 3s connectionRequestTimeout: 1s +- type: Imei-lookup + config: + csvFile: secret/imeiDb.txt - type: firebase config: configFile: /secret/pantel-prod.json diff --git a/prime/config/test.yaml b/prime/config/test.yaml index 31e7c406d..0ddeb85ed 100644 --- a/prime/config/test.yaml +++ b/prime/config/test.yaml @@ -5,6 +5,9 @@ modules: config: configFile: /secret/pantel-prod.json rootPath: test +- type: Imei-lookup + config: + csvFile: /imei/imeiDb.csv - type: neo4j config: host: neo4j diff --git a/prime/config/testDB.csv b/prime/config/testDB.csv new file mode 100644 index 000000000..648cf2590 --- /dev/null +++ b/prime/config/testDB.csv @@ -0,0 +1,3 @@ +TAC|Marketing Name|Manufacturer|Bands|Allocation Date|Country Code|Fixed Code|Manufacturer Code|Radio Interface|Brand Name|Model Name|Operating System|NFC|Bluetooth|WLAN|Device Type|OEM|Removable UICC|Removable EUICC|NonRemovable UICC|NonRemovable EUICC|LPWAN +00100732|This is a Test IMEI to be used with multiple prototype models. The frequency bands for each model may not match what is listed in this record|Test|GSM 1900|07-Jul-1998|208|MANU|205079|NONE|Not Known|This is a Test IMEI to be used with multiple prototype models. The frequency bands for each model may not match what is listed in this record|Not Known|Not Known|Not Known|Not Known|Handheld|Not Known|Not Known|Not Known|Not Known|Not Known|Not Known +00100931|This is a Test IMEI to be used with multiple prototype models. The frequency bands for each model may not match what is listed in this record|Test 2|GSM 1900|07-Jul-1998|310|MANU|205092|NONE|Not Known|This is a Test IMEI to be used with multiple prototype models. The frequency bands for each model may not match what is listed in this record|Not Known|Not Known|Not Known|Not Known|Handheld|Not Known|Not Known|Not Known|Not Known|Not Known|Not Known diff --git a/prime/infra/dev/prime.yaml b/prime/infra/dev/prime.yaml index 3d07dfae9..4a7b4e2d6 100644 --- a/prime/infra/dev/prime.yaml +++ b/prime/infra/dev/prime.yaml @@ -173,6 +173,9 @@ spec: - name: secret-config mountPath: "/secret" readOnly: true + - name: imei-db + mountPath: "/imei" + readOnly: true ports: - containerPort: 8080 - containerPort: 8081 @@ -182,6 +185,9 @@ spec: - name: secret-config secret: secretName: pantel-prod.json + - type: Imei-lookup + config: + csvFile: /imei/imeiDb.csv - name: api-ostelco-ssl secret: secretName: api-ostelco-ssl From f2ec8e66a1764432ac8f8fd112988996d30c762b Mon Sep 17 00:00:00 2001 From: mpeterss Date: Tue, 9 Oct 2018 15:51:42 +0200 Subject: [PATCH 80/93] Rename testDb --- prime/config/{testDB.csv => testDb.csv} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename prime/config/{testDB.csv => testDb.csv} (100%) diff --git a/prime/config/testDB.csv b/prime/config/testDb.csv similarity index 100% rename from prime/config/testDB.csv rename to prime/config/testDb.csv From bc86cfe0de1b254a95a6f03519170ba008811689 Mon Sep 17 00:00:00 2001 From: "Kjell M. Myksvoll" Date: Mon, 8 Oct 2018 15:41:39 +0200 Subject: [PATCH 81/93] Adds HTTP request tracking to logs --- .../prime/client/api/ClientApiModule.kt | 4 +++ .../logging/TrackRequestsLoggingFilter.kt | 34 +++++++++++++++++++ 2 files changed, 38 insertions(+) create mode 100644 prime-modules/src/main/kotlin/org/ostelco/prime/logging/TrackRequestsLoggingFilter.kt diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/ClientApiModule.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/ClientApiModule.kt index 52fdad641..228a0a413 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/ClientApiModule.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/ClientApiModule.kt @@ -13,6 +13,7 @@ import io.dropwizard.setup.Environment import org.eclipse.jetty.servlets.CrossOriginFilter import org.ostelco.prime.client.api.auth.AccessTokenPrincipal import org.ostelco.prime.client.api.auth.OAuthAuthenticator +import org.ostelco.prime.logging.TrackRequestsLoggingFilter import org.ostelco.prime.client.api.metrics.reportMetricsAtStartUp import org.ostelco.prime.client.api.resources.AnalyticsResource import org.ostelco.prime.client.api.resources.ApplicationTokenResource @@ -69,6 +70,9 @@ class ClientApiModule : PrimeModule { .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)) .build(env.name) + /* Add filters/interceptors. */ + jerseyEnv.register(TrackRequestsLoggingFilter()) + /* APIs. */ jerseyEnv.register(AnalyticsResource(dao)) jerseyEnv.register(ConsentsResource(dao)) diff --git a/prime-modules/src/main/kotlin/org/ostelco/prime/logging/TrackRequestsLoggingFilter.kt b/prime-modules/src/main/kotlin/org/ostelco/prime/logging/TrackRequestsLoggingFilter.kt new file mode 100644 index 000000000..ac96a3b3b --- /dev/null +++ b/prime-modules/src/main/kotlin/org/ostelco/prime/logging/TrackRequestsLoggingFilter.kt @@ -0,0 +1,34 @@ +package org.ostelco.prime.logging + +import javax.ws.rs.container.ContainerRequestContext +import javax.ws.rs.container.ContainerRequestFilter +import javax.ws.rs.container.ContainerResponseContext +import javax.ws.rs.container.ContainerResponseFilter +import org.slf4j.MDC +import java.util.UUID +import javax.ws.rs.ext.Provider + +/** + * Add an unique id to each request simplyfying tracking of requests in logs. + */ +@Provider +class TrackRequestsLoggingFilter : ContainerRequestFilter, ContainerResponseFilter { + + /* Commonly used HTTP header for tracing requests. */ + val REQUEST_TRACE_HEADER = "X-RequestTrace" + + /* MDC tracking. */ + val TRACE_ID = "TraceId" + + override fun filter(ctx: ContainerRequestContext) { + val traceHeader = ctx.getHeaderString(REQUEST_TRACE_HEADER) + MDC.put("InvocationId", if (!traceHeader.isNullOrBlank()) + traceHeader + else + UUID.randomUUID().toString()) + } + + override fun filter(reqCtx: ContainerRequestContext, rspCtx: ContainerResponseContext) { + MDC.remove(TRACE_ID) + } +} From 615d51d5951252276d57e6ce2919b4195c116c5f Mon Sep 17 00:00:00 2001 From: "Kjell M. Myksvoll" Date: Thu, 11 Oct 2018 11:25:39 +0200 Subject: [PATCH 82/93] Updates 'active users' metrics extracted from BQ Updated/new metrics: - active users last 24 hours - active users yesterday - active users today The standard Firebase events: - session_start : user engages with the app - screen_view : user switches between screens in app - user_engagement : periodic events when app is in foreground are used to determine whether an user is active or not. As all of the events are subject to some limitations (see ref. below) all of are included in the "active user" detection. Ref.: https://support.google.com/firebase/answer/6317485 --- bq-metrics-extractor/config/config.yaml | 26 +++++++++++++++++++++---- 1 file changed, 22 insertions(+), 4 deletions(-) diff --git a/bq-metrics-extractor/config/config.yaml b/bq-metrics-extractor/config/config.yaml index a125f31d7..b1f9ffeaa 100644 --- a/bq-metrics-extractor/config/config.yaml +++ b/bq-metrics-extractor/config/config.yaml @@ -10,13 +10,31 @@ logging: level: severity bqmetrics: - - type: summary - name: active_users - help: Number of active users + - type: gauge + name: active_app_users_last24hours + help: Number of active application users last 24 hours + resultColumn: count + sql: > + SELECT count(distinct user_pseudo_id) AS count FROM `${DATASET_PROJECT}.analytics_160712959.events_*` + WHERE (event_name = "session_start" OR event_name = "screen_view" OR event_name = "user_engagement") + AND timestamp_micros(event_timestamp) >= TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 1 DAY) + - type: gauge + name: active_app_users_yesterday + help: Number of active application users yesterday + resultColumn: count + sql: > + SELECT count(distinct user_pseudo_id) AS count FROM `${DATASET_PROJECT}.analytics_160712959.events_*` + WHERE (event_name = "session_start" OR event_name = "screen_view" OR event_name = "user_engagement") + AND timestamp_micros(event_timestamp) >= TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 1 DAY) + AND timestamp_micros(event_timestamp) < TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY) + - type: gauge + name: active_app_users_today + help: Number of active application users today resultColumn: count sql: > SELECT count(distinct user_pseudo_id) AS count FROM `${DATASET_PROJECT}.analytics_160712959.events_*` - WHERE event_name = "first_open" + WHERE (event_name = "session_start" OR event_name = "screen_view" OR event_name = "user_engagement") + AND timestamp_micros(event_timestamp) >= TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY) - type: gauge name: sims_who_have_used_data help: Number of SIMs that has used data last 24 hours From 51bd2658b0a67ad91566515296e4c1f69a99d7ca Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B8rn=20Remseth?= Date: Thu, 11 Oct 2018 11:35:15 +0200 Subject: [PATCH 83/93] Fix bugs --- sample-agent/{apply-yaml.sh => apply_yaml.sh} | 0 sample-agent/generate-test-scripts.sh | 12 ++++++------ 2 files changed, 6 insertions(+), 6 deletions(-) rename sample-agent/{apply-yaml.sh => apply_yaml.sh} (100%) diff --git a/sample-agent/apply-yaml.sh b/sample-agent/apply_yaml.sh similarity index 100% rename from sample-agent/apply-yaml.sh rename to sample-agent/apply_yaml.sh diff --git a/sample-agent/generate-test-scripts.sh b/sample-agent/generate-test-scripts.sh index 4b47c29cf..6273a1ed1 100755 --- a/sample-agent/generate-test-scripts.sh +++ b/sample-agent/generate-test-scripts.sh @@ -130,15 +130,15 @@ EOF echo "$0: INFO Successfully created demo scripts in directyory $TARGET_DIR" echo "$0: INFO To initialize run initialization scripts:" echo "$0: INFO" -echo "$0: INFO apply_yaml.sh offer $TARGET_DIR/init1.yml" -echo "$0: INFO apply_yaml.sh offer $TARGET_DIR/init2.yml" -echo "$0: INFO apply_yaml.sh offer $TARGET_DIR/init3.yml" +echo "$0: INFO ./apply_yaml.sh offer $TARGET_DIR/init1.yml" +echo "$0: INFO ./apply_yaml.sh offer $TARGET_DIR/init2.yml" +echo "$0: INFO ./apply_yaml.sh offer $TARGET_DIR/init3.yml" echo "$0: INFO" echo "$0: INFO During the test, run the test steps:" echo "$0: INFO" -echo "$0: INFO apply_yaml.sh segments $TARGET_DIR/step1.yml" -echo "$0: INFO apply_yaml.sh segments $TARGET_DIR/setep2.yml" +echo "$0: INFO ./apply_yaml.sh segments $TARGET_DIR/step1.yml" +echo "$0: INFO ./apply_yaml.sh segments $TARGET_DIR/step2.yml" echo "$0: INFO" echo "$0: INFO To reset to initial state (e.g. before running a demo/test again):" echo "$0: INFO" -echo "$0: INFO apply_yaml.sh segments $TARGET_DIR/reset.yml" +echo "$0: INFO ./apply_yaml.sh segments $TARGET_DIR/reset.yml" From 770b9285567a0dcfef83a33b07bd6115f75ecd12 Mon Sep 17 00:00:00 2001 From: Vihang Patil Date: Thu, 11 Oct 2018 13:16:41 +0200 Subject: [PATCH 84/93] Changes in presentation properties of products --- sample-agent/generate-test-scripts.sh | 3 +++ tools/neo4j-admin-tools/src/main/resources/init.cypher | 1 - 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/sample-agent/generate-test-scripts.sh b/sample-agent/generate-test-scripts.sh index 6273a1ed1..108e13964 100755 --- a/sample-agent/generate-test-scripts.sh +++ b/sample-agent/generate-test-scripts.sh @@ -50,6 +50,7 @@ createOffer: isDefault: true offerLabel: Top Up priceLabel: 200 NOK + productLabel: +1GB createSegments: - id: $SEGMENT_1 EOF @@ -69,6 +70,7 @@ createOffer: isDefault: true offerLabel: Top Up priceLabel: 200 NOK + productLabel: +2GB createSegments: - id: $SEGMENT_2 EOF @@ -87,6 +89,7 @@ createOffer: isDefault: true offerLabel: Special offer priceLabel: 50 NOK + productLabel: +1GB createSegments: - id: $SEGMENT_3 EOF diff --git a/tools/neo4j-admin-tools/src/main/resources/init.cypher b/tools/neo4j-admin-tools/src/main/resources/init.cypher index 0dace3e4c..53bf0113e 100644 --- a/tools/neo4j-admin-tools/src/main/resources/init.cypher +++ b/tools/neo4j-admin-tools/src/main/resources/init.cypher @@ -78,7 +78,6 @@ CREATE (:Product {`id`: '1GB_1SGD', CREATE (:Product {`id`: '3GB_1.5SGD', `presentation/isDefault`: 'true', - `presentation/isOffer`: 'true', `presentation/offerLabel`: 'Default Offer', `presentation/priceLabel`: '1.5 SGD', `presentation/productLabel`: '+3GB', From 49f216e4a757cb2c617f746adb0f94747de709bb Mon Sep 17 00:00:00 2001 From: Vihang Patil Date: Thu, 11 Oct 2018 13:19:38 +0200 Subject: [PATCH 85/93] More fixes in presentation properties --- tools/neo4j-admin-tools/src/main/resources/init.cypher | 1 - 1 file changed, 1 deletion(-) diff --git a/tools/neo4j-admin-tools/src/main/resources/init.cypher b/tools/neo4j-admin-tools/src/main/resources/init.cypher index 53bf0113e..545f186fe 100644 --- a/tools/neo4j-admin-tools/src/main/resources/init.cypher +++ b/tools/neo4j-admin-tools/src/main/resources/init.cypher @@ -77,7 +77,6 @@ CREATE (:Product {`id`: '1GB_1SGD', `sku`: '1GB_1SGD'}); CREATE (:Product {`id`: '3GB_1.5SGD', - `presentation/isDefault`: 'true', `presentation/offerLabel`: 'Default Offer', `presentation/priceLabel`: '1.5 SGD', `presentation/productLabel`: '+3GB', From 7f5a2a820770ba81685731a4ca1c71ca965028bd Mon Sep 17 00:00:00 2001 From: "Kjell M. Myksvoll" Date: Wed, 10 Oct 2018 08:58:33 +0200 Subject: [PATCH 86/93] Removes stale client API docs --- client-api/README.md | 663 +----------------- client-api/diagrams/signin-flow.svg | 22 - client-api/generate-diagrams.sh | 3 - .../images/user-subscription-data-model.svg | 4 - client-api/puml/signin-flow.puml | 9 - 5 files changed, 2 insertions(+), 699 deletions(-) delete mode 100644 client-api/diagrams/signin-flow.svg delete mode 100755 client-api/generate-diagrams.sh delete mode 100644 client-api/images/user-subscription-data-model.svg delete mode 100644 client-api/puml/signin-flow.puml diff --git a/client-api/README.md b/client-api/README.md index e46ec2808..073d2afdb 100644 --- a/client-api/README.md +++ b/client-api/README.md @@ -1,662 +1,3 @@ -# Rest API for the "project pi" client +# Module Client API -This document describes the API between the client and the backend. - - - * [Introduction](#introduction) - * [Data model](#data-model) - * [Common for all API methods](#common-for-all-api-methods) - * [Preferred language](#preferred-language) - * [Language indication sent from client](#language-indication-sent-from-client) - * [Format multi-language text strings sent in responses to client](#format-multi-language-text-strings-sent-in-reponses-to-client) - * [HTTP status codes and error reporting](#http-status-codes-and-error-reporting) - * [If a request gives no error](#if-a-request-gives-no-error) - * [If a request results in an error (Bad Request or Forbidden)](#if-a-request-results-in-an-error-bad-request-or-forbidden) - * [Unknown path (Not Found)](#unknown-path-not-found) - * [Server side errors](#server-side-errors) - * [The API](#the-api) - * [Sign up and authentication](#sign-up-and-authentication) - * [Register personal information](#register-personal-information) - * [Authenticate using verification code](#authenticate-using-verification-code) - * [Refreshing the access token](#refreshing-the-access-token) - * [Sign in](#sign-in) - * [User profile](#user-profile) - * [Fetch profile](#fetch-profile) - * [Update profile](#update-profile) - * [Subscriptions](#subscriptions) - * [Get subscription status](#get-subscription-status) - * [Offers](#offers) - * [Get list of new offers](#get-list-of-new-offers) - * [Accept or reject an offer](#accept-or-reject-an-offer) - * [Undo a previously accepted offer](#undo-a-previously-accepted-offer) - * [Dismiss an offer](#dismiss-an-offer) - * [Consents](#consents) - * [Get list of consents](#get-list-of-consents) - * [Set or update consents](#set-or-update-consents) - * [Analytics](#analytics) - * [Report an analytics event](#report-an-analytics-event) - * [Appendix](#appendix) - - - - - -## Introduction - -The API described is based on a simplified data model, suitable for handling the "100 users" test case. The -model and the corresponding API will have to be reworked in order to handle 100+ users. - -Furthermore: - - - The API is a REST API. - - Assumes that some OAuth2 or similar based service is used for authentication. - - All client interactions goes through the backend, including handling of authentication, payment etc. - - Subscriptions as such has already been activated through the CRM system including registration of email - address etc. - -The API is developed partly through this document. Partly through the swagger specification of the -prime/infra/prod/prime-api.yaml file that is more or less reliably mirrored in the swagger-generated static website [swagger doc](https://ostelco.github.io/). - -## Data model - -Figure describing the data model: - -![Data model](images/user-subscription-data-model.svg) - -The model assumes that: - - 1. A user has only one subscription, which is then associated with only one handset/SIM. - 2. Offers are given to this subscription. - -This is a simplified model. In reality a user might have multiple subscriptions and one subscription might be -"managed" by an user different from the user using the subscription, etc. - -For cases where a subscription (user) has been given multiple offers and accepted them, the offers are "consumed" -in sequence. The ordering can typically be by the offers "expire" date. That is the offer that expires first, -is used up first and then the next etc. - -## Common for all API methods -### Preferred language - - 1. The preferred language is indicated in every request from client. - 2. The client can indicate request for one or more languages. - 3. Text strings in responses to the client should be in all languages indicated in the request. This - will allow the client to switch between languages without connecting to backend. - 4. If a language that is not supported is indicated in a request, it should be ignored. - 5. Text strings in the default language should always be included in a response. - -#### Language indication sent from client - - 1. As a query parameter in the URI. - -``` - /long/url/to/somewhere?lang=no,en -``` - - 2. Using the "Accept-Language" HTTP header. - -``` - Accept-Language: no, en-gb;q=0.8, en;q=0.7 - Accept-Language: * -``` - - 3. With no `lang` query parameter in URI or `Accept-Language` HTTP header the default language should be used. - 4. If none of the requested languages are supported, fall back to the default language. - 5. The `lang` query parameter values has priority over the `Accept-Language` HTTP header. - -#### Format multi-language text strings sent in responses to client - - "message": [{ - "lang": "en", // ISO 639-1 - "text": "an error" - }, - { - "lang": "no", - "text": "en feil" - }] - -The `en` (english) language is the default language and is always included. Sections for additional languages -are added according to the language specification included in the request if available. - -### HTTP status codes and error reporting - -The API uses the following HTTP status codes. - -code | meaning ------|-------------------- - 200 | OK - 201 | Created - 400 | Bad Request - 401 | Unauthorized - 403 | Forbidden - 404 | Not Found - 500 | Internal Server Error - 503 | Service Unavailable - -In addition a service specific error code in included in the document describing the error in the error response. - -#### If a request gives no error - - -> - <- 200 OK - <- 201 Created - -#### If a request results in an error (Bad Request or Forbidden) - - -> GET /somewhere/out/there - <- 400 Bad Request - { - "code": "", // Service specific error code - "error_id": "", // An id tracing the API call (for lookup in logs) - "message": [{ - "lang": "en", // ISO 639-1 - "text": "an error" - }, - { - "lang": "no", - "text": "en feil" - }] - } - -For cases where the request contains a list of updates and one or more of them are incorrect, a list -with error messages are returned. Each error message in the list then includes an index value pointing -to the request that caused an error. - - -> POST /somewhere/out/there - { ... - } - <- 400 Bad Request - [{ - "index": , // Index of the list element that caused the error - "code": "", // Service specific error code - "error_id": "", // An id tracing the API call (for lookup in logs) - "message": [{ - "lang": "en", // ISO 639-1 - "text": "an error" - }, - { - "lang": "no", - "text": "en feil" - }] - }, - { - "index": , // Index of the list element that caused the error - "code": "", // Service specific error code - "error_id": "", // id tracing the API call (for lookup in logs) - "message": [ ... - }] - }] - -Note that an error text is provided with each error report. This will make it possible to report different -types of error messages depending on the type of error. - -Entries that are not referenced to in the error response with an index value, have been processed without -error. - -#### Unknown path (Not Found) - - -> GET /somewhere/out/there - <- 404 Not Found - { - "code": "", // Service specific error code - "error_id": "", // An id tracing the API call (for lookup in logs) - "message": [{ - "lang": "en", // ISO 639-1 - "text": "Can not find requested address" - }, - { - "lang": "no", - "text": "Kan ikke finne adressen" - }] - } - -#### Server side errors - -On errors in the backend. - - -> GET /somewhere/out/there - <- 500 Unknown Error - { - "error_id": "", // An id tracing the API call (for lookup in logs) - "message": [{ - "lang": "en", // ISO 639-1 - "text": "Something went wrong" - }, - { - "lang": "no", - "text": "Noe gikk galt" - }] - } - -When the service is down for maintenance or similar. - - -> GET /somewhere/out/there - <- 503 Service Unavailable - { - "error_id": "", // An id tracing the API call (for lookup in logs) - "message": [{ - "lang": "en", // ISO 639-1 - "text": "Service is down, try again later" - }, - { - "lang": "no", - "text": "Tjenesten er nede, prøv igjen senere" - }] - } - -## The API - -### Sign up and authentication - -![Sign-in flow sequence diagram](diagrams/signin-flow.svg) - -The client will initiate the login by contacting the [Auth0](http://auth0.com) service that helps us interface with -identity providers. What's returned by auth0 is a combination of an ID token and an access token. -The ID token can contain many types of information, including email, address etc. The access token -is simply used to authenticate the user. We use only the access token from Auth0 and ignore the ID token. - -The authentication token a [JSON web token](https://jwt.io/introduction/), that is used as -a [http bearer token](https://developer.mozilla.org/en-US/docs/Web/HTTP/Authentication) when authenticating the -client towards the Prime component. - -The client then asks for a user profile. The request is sent to a cloud endpoint, that will use the AWT token to -authenticate the user. The web endpoint terminates the https connection and sends it on, with a header -that indicates that it has been authenticated, to the Prime component. The authentication process will include -a conversation between the cloud endpoint and the identity provider. That conversation is not described -in this document. - -The user profile is then displayed. If it is empty, as it will be the first time, then the client will ask for information to be filled in and -uploaded. Populating the user profile is part of the _sign up_ procedure. During sign up the user will be asked about -name and email address. The email address will be previously registered as part of the subscription activation. - Email address (previously registered as part of the subscription activation) - -An email with a verification code is then sent to the registered email address. The verification code is then -entered into the client and the sign up procedure has been completed. - -#### Register personal information - - -> POST /register - { - "name": "", - "email": "" - } - <- 201 Created - -Provided that the given email address is know, an email with a verification code is sent to the registered -email address. - -On unknown email address a HTTP `403` status code is returned. - - -> POST /register - { - "name": "", - "email": "me@illegal-address.com" - } - <- 403 Forbidden - { - "code": "", // Service specific error code - "error_id": "", // An id tracing the API call (for lookup in logs) - "message": [{ - "lang": "en", // ISO 639-1 - "text": "Unknown email address" - }, - { - "lang": "no", - "text": "Ukjent email addresse" - }] - } - -#### Authenticate using verification code - -The verification code, sent to the subscriber by email as part of the [sign up](#sign-up) process, is used -to authenticate with the service. - - -> POST /auth/token - { - "grant_type": "authorization_code", - "code": "", - "email": "" - } - <- 201 Created - { - "token_type": "bearer", - "access_token": "", - "refresh_token": "", - "expires_in": - } - -The response is an OAuth2 Bearer token, including a _refresh_ token. The client should refresh the _access_ -token when it has expired with the _refresh_ token at the `/register` endpoint. - -Note! On successful return of an OAuth2 token, the client should first obtain the _subscriber-id_ using the -`/profile` endpoint - see the [Fetch profile](#fetch-profile) section. - -On an unknown verification code a HTTP `403` status code is returned. - - -> POST /auth/token - { - "verification-code": "", - "email": "" - } - <- 403 Forbidden - { - "code": "", // Service specific error code - "error_id": "", // An id tracing the API call (for lookup in logs) - "message": [{ - "lang": "en", // ISO 639-1 - "text": "Unknown code" - }, - { - "lang": "no", - "text": "Ukjent kode" - }] - } - -#### Refreshing the access token - - -> POST /auth/token - { - "grant_type": "refresh_token", - "refresh_token": "" - } - <- 201 Created - { - "token_type": "bearer", - "access_token": "", - "expires_in": - } - -The response is an OAuth2 Bearer token, but without the _refresh_ token. - -HTTP `403` is returned on unknown refresh token. - - -> POST /auth/token - { - "grant_type": "refresh_token", - "refresh_token": "" - } - <- 403 Forbidden - { - "code": "", // Service specific error code - "error_id": "", // An id tracing the API call (for lookup in logs) - "message": [{ - "lang": "en", // ISO 639-1 - "text": "Unknown token" - }, - { - "lang": "no", - "text": "Ukjent token" - }] - } - -### Sign in - -If the JWT _access token_ has expired, then the client should try to [refresh](#refreshing-the-access-token) -the token using the _expire token_. If this fails then a new [sign up](#sign-up) should be done. - -### User profile -#### Fetch profile - -Fetch profile content. - - -> GET /profile - <- 200 OK - { - "name": ", - "email": "", - "subscription_id": "" - } - -Note that the returned profile contains the `subscription-id`. - -On error a HTTP `404` status code is returned. - - <- 404 Not Found - { - "code": "", // Service specific error code - "error_id": "", // An id tracing the API call (for lookup in logs) - "message": [{ - "lang": "en", // ISO 639-1 - "text": "Can't find profile" - }, - { - "lang": "no", - "text": "Fant ingen profil" - }] - } - -#### Update profile - -One or more fields in the profile can be updated at once. The exception is the `subscription-id` which -cannot be updated. - - -> PUT /profile - { - "name": "new name" - } - <- 200 OK (no body) - -On error a HTTP `400` status code is returned. - - <- 400 Bad Request - { - "code": "", // Service specific error code - "error_id": "", // An id tracing the API call (for lookup in logs) - "message": [{ - "lang": "en", // ISO 639-1 - "text": "Illegal field value" - }, - { - "lang": "no", - "text": "Feil i oppgitt verdi" - }] - } - -### Subscriptions - - 1. A user can (with the simplified data model) only have one or no _subscription_. - 2. A _subscription_ is identified with an `subscription-id`. - 3. The `subscription-id` is included in requests. - 4. If a user has a _subscription_ the user also has a handset (where the client is running). - 5. An offer might be to one _subscription_ (a particular user) or to multiple subscriptions (more than one user). - -#### Get info for one subscription - - -> GET /subscription/status - <- 200 OK - { - "id": "", - "remaining": , // Remaining data quota in KB (long) - "accepted_offers": [{ - "offer_id": "SKU_1", - "value": , // In KB - "usage": , // In KB - "expires": // ms since epoch - }, - { - "offer_id": "SKU_3", - "value": , - "usage": , - "expires": - }] - } - -Notes: - - - The `value` field could be qualified with `dimension` field with the values KB, MB or GB. But it seems to be easier to just - assume that KB is the unit and let the client do the conversion as needed. (The `value` should then be a `long`). Btw., to - convert maybe just divide by 1000, not 1024. - -#### Get info for all subscriptions - - -> GET /subscriptions - <- 200 OK - [{ - "id": "", - ... - }, - { - ... - }] - -#### Create and delete subscriptions - -Creation and deletion of subscriptions is handled by the CRM system (handled by customer service). - -### Offers -#### Get list of new offers - - -> GET /offers/ - <- 200 OK - [{ - "id": "", - "label": "A big offer", // Name of the offer - "price": 99.99, // Two-digit float - "value": 100, // How much the offer tops up in KB (long) - "expires": // ms since epoch - }, - { ... - }] - -Notes: - - - The same list will be returned the next time, unless one or more offers has expired or been retracted or new ones has been added. - - If an offer has been accepted then this offer will not reappear in the list the next time (is now an "accepted offer"). - - It is up to the client to ensure that previously seen and rejected offers does not reappear. This will allow the client to display previously rejected offers again. - -#### Accept or reject an offer - -Accepting one offer. - - -> PUT /offers/?accepted=[true,false] - <- 200 OK (no body) - -On error a HTTP `403` status code is returned. - - <- 403 Forbidden - { - "code": "", // Service specific error code - "error_id": "", // An id tracing the API call (for lookup in logs) - "message": [{ - "lang": "en", // ISO 639-1 - "text": "Unknown offer, or expired offer" - }, - { - "lang": "no", - "text": "Ukjent tilbud, eller tilbudet har utløpt" - }] - } - -#### Undo a previously accepted offer - -A previously accepted offer can be undone (reverted) if: - - 1. It is done within a certain time limit, f.ex. within 10 min. This should also be possible even if the the offer has started to "run". - 2. The offer has not started to "run" yet. - -``` - -> PUT /offers/?accepted=false - <- 200 OK (no body) -``` - -On error a HTTP `403` status code is returned. - - <- 403 Forbidden - { - "code": "", // Service specific error code - "error_id": "", // An id tracing the API call (for lookup in logs) - "message": [{ - "lang": "en", // ISO 639-1 - "text": "Offer already taken into use, contact customer service" - }, - { - "lang": "no", - "text": "Tilbudet er allerede tatt i bruk, kontakt kundeservice" - }] - } - -#### Dismiss an offer - -If the user is not interested in an offer, it can be removed from the list of outstanding offers. - -This event is reported back as an [analytics](#analytics) event. - -### Consents - -Before collecting analytics, the user should be asked whether it this is OK or not. The "consent request" should -contain information about what data that is collected and why. - -A consent is valid for all "subscriptions" (devices) registered on a user. - -#### Get list of consents - - -> GET /consents - <- 200 OK - [{ - "id": "", - "description": "", - "accepted": - }, - { - ... - }] - -#### Set or update consents - - -> PUT /consents/?accepted=[true|false] - <- 200 OK (no body) - -On error a HTTP `403` status code is returned. - - <- 403 Forbidden - { - "code": "", // Service specific error code - "error_id": "", // An id tracing the API call (for lookup in logs) - "message": [{ - "lang": "en", // ISO 639-1 - "text": "Unknown consent" - }, - { - "lang": "no", - "text": "Ukjent avtale" - }] - } - -### Analytics - -Sending of analytics events depends on the [consents](#consents) set. - -Analytics events will normally be implicitly reported as part of normal client/backend interaction flow. F.ex. when an offer -is [accepted](#accept an offer). - -In cases where an event is not part of the normal client/backend interaction flow, an explicit analytics event will be sent. F.ex. if -an offer is [dismissed](#dismiss an offer). - -In both cases reporting of the event for analytics purposes is subject to the consents given by the user. - -#### Report an analytics event - -The API for reporting events that are not implicitly given by the normal client/backend interaction flow, uses an `event-type` field -to specify which event that is reported. Only the `event-type` is common to such reports, the remaining content is dependent upon -the type of event reported. - - -> POST /analytics/ - [{ - "event-type": "DELETES_AN_OFFER", - "offer-id": <"offer-id>" - }, - { - "event": "....", - - }] - <- 201 Created (no body) - -The analytics `event-type` determines what kind of information that is provided with the report. - -event-type | parameters ------------|------------- -DELETES_AN_OFFER | `offer-id` -FETCHES_OFFER_LIST | (none) - -## Appendix - -TBD. +Placeholder for client API documentation. diff --git a/client-api/diagrams/signin-flow.svg b/client-api/diagrams/signin-flow.svg deleted file mode 100644 index ef0768119..000000000 --- a/client-api/diagrams/signin-flow.svg +++ /dev/null @@ -1,22 +0,0 @@ -ClientClientAuth0Auth0CloudEPCloudEPPrimePrimeAuthTokens (ID (with email), Access)GET /profile (https, with access token)GET /profile (http, with access token)User profile (email, address, etc.) \ No newline at end of file diff --git a/client-api/generate-diagrams.sh b/client-api/generate-diagrams.sh deleted file mode 100755 index f3fae4224..000000000 --- a/client-api/generate-diagrams.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env bash - -plantuml -tsvg -pipe < puml/signin-flow.puml > diagrams/signin-flow.svg diff --git a/client-api/images/user-subscription-data-model.svg b/client-api/images/user-subscription-data-model.svg deleted file mode 100644 index 3faba7c8e..000000000 --- a/client-api/images/user-subscription-data-model.svg +++ /dev/null @@ -1,4 +0,0 @@ - - - - diff --git a/client-api/puml/signin-flow.puml b/client-api/puml/signin-flow.puml deleted file mode 100644 index 9fe4e0c8a..000000000 --- a/client-api/puml/signin-flow.puml +++ /dev/null @@ -1,9 +0,0 @@ -@startuml - -Client -> Auth0 : Auth -Auth0 -> Client: Tokens (ID (with email), Access) -Client -> CloudEP: GET /profile (https, with access token) -CloudEP -> Prime: GET /profile (http, with access token) -Prime -> Client: User profile (email, address, etc.) - -@enduml From 96700c4a164bbfa37f50f633e1234030d9dd4910 Mon Sep 17 00:00:00 2001 From: Vihang Patil Date: Thu, 11 Oct 2018 14:37:07 +0200 Subject: [PATCH 87/93] Added free product for internal users --- .../src/main/resources/init.cypher | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/tools/neo4j-admin-tools/src/main/resources/init.cypher b/tools/neo4j-admin-tools/src/main/resources/init.cypher index 545f186fe..605f88fe0 100644 --- a/tools/neo4j-admin-tools/src/main/resources/init.cypher +++ b/tools/neo4j-admin-tools/src/main/resources/init.cypher @@ -1,6 +1,15 @@ // For country:NO -CREATE (:Product {`id`: '1GB_249NOK', +CREATE (:Product {`id`: '1GB_0NOK', `presentation/isDefault`: 'true', + `presentation/offerLabel`: '', + `presentation/priceLabel`: 'Free', + `presentation/productLabel`: '+1GB', + `price/amount`: '0', + `price/currency`: '', + `properties/noOfBytes`: '1_000_000_000', + `sku`: '1GB_0NOK'}); + +CREATE (:Product {`id`: '1GB_249NOK', `presentation/offerLabel`: 'Default Offer', `presentation/priceLabel`: '249 NOK', `presentation/productLabel`: '+1GB', @@ -40,6 +49,11 @@ CREATE (:Segment {`id`: 'country-no'}); CREATE (:Offer {`id`: 'default_offer-no'}); +MATCH (n:Offer {id: 'default_offer-no'}) +WITH n +MATCH (m:Product {id: '1GB_0NOK'}) +CREATE (n)-[:OFFER_HAS_PRODUCT]->(m); + MATCH (n:Offer {id: 'default_offer-no'}) WITH n MATCH (m:Product {id: '1GB_249NOK'}) From 09a7eb6212932e498bb8c5ee5317fb51114ea355 Mon Sep 17 00:00:00 2001 From: Vihang Patil Date: Thu, 11 Oct 2018 16:49:49 +0200 Subject: [PATCH 88/93] Moved common jersey code from admin-api and client-api to separate module --- admin-api/build.gradle | 3 ++- .../ostelco/prime/admin/api/AdminModule.kt | 2 -- .../ostelco/prime/admin/api/ImportResource.kt | 2 +- .../ostelco/importer/ImporterResourceTest.kt | 2 +- .../prime/client/api/ClientApiModule.kt | 4 ---- jersey/build.gradle | 8 +++++++ .../org/ostelco/prime/jersey/JerseyModule.kt | 17 ++++++++++++++ .../jersey}/TrackRequestsLoggingFilter.kt | 23 ++++++++++--------- .../prime/jersey/YamlMessageBodyReader.kt | 22 +----------------- .../io.dropwizard.jackson.Discoverable | 1 + .../org.ostelco.prime.module.PrimeModule | 1 + prime/build.gradle | 1 + prime/config/config.yaml | 1 + prime/config/test.yaml | 1 + .../integration-tests/resources/config.yaml | 1 + settings.gradle | 2 ++ 16 files changed, 50 insertions(+), 41 deletions(-) create mode 100644 jersey/build.gradle create mode 100644 jersey/src/main/kotlin/org/ostelco/prime/jersey/JerseyModule.kt rename {prime-modules/src/main/kotlin/org/ostelco/prime/logging => jersey/src/main/kotlin/org/ostelco/prime/jersey}/TrackRequestsLoggingFilter.kt (63%) rename admin-api/src/main/kotlin/org/ostelco/prime/admin/Jersey.kt => jersey/src/main/kotlin/org/ostelco/prime/jersey/YamlMessageBodyReader.kt (69%) create mode 100644 jersey/src/main/resources/META-INF/services/io.dropwizard.jackson.Discoverable create mode 100644 jersey/src/main/resources/META-INF/services/org.ostelco.prime.module.PrimeModule diff --git a/admin-api/build.gradle b/admin-api/build.gradle index 33ebfeb53..96f9fe6bd 100644 --- a/admin-api/build.gradle +++ b/admin-api/build.gradle @@ -8,7 +8,8 @@ dependencies { implementation "javax.xml.bind:jaxb-api:$jaxbVersion" implementation "javax.activation:activation:$javaxActivationVersion" - + + testImplementation project(":jersey") testImplementation "io.dropwizard:dropwizard-testing:$dropwizardVersion" } diff --git a/admin-api/src/main/kotlin/org/ostelco/prime/admin/api/AdminModule.kt b/admin-api/src/main/kotlin/org/ostelco/prime/admin/api/AdminModule.kt index 8e5983f99..f8a72b1f3 100644 --- a/admin-api/src/main/kotlin/org/ostelco/prime/admin/api/AdminModule.kt +++ b/admin-api/src/main/kotlin/org/ostelco/prime/admin/api/AdminModule.kt @@ -2,7 +2,6 @@ package org.ostelco.prime.admin.api import com.fasterxml.jackson.annotation.JsonTypeName import io.dropwizard.setup.Environment -import org.ostelco.prime.admin.YamlMessageBodyReader import org.ostelco.prime.admin.importer.ImportAdapter import org.ostelco.prime.module.PrimeModule @@ -16,7 +15,6 @@ class AdminModule : PrimeModule { jerseySever.register(SegmentResource()) jerseySever.register(ProductResource()) jerseySever.register(ProductClassResource()) - jerseySever.register(YamlMessageBodyReader::class.java) jerseySever.register(ImporterResource(ImportAdapter())) } } diff --git a/admin-api/src/main/kotlin/org/ostelco/prime/admin/api/ImportResource.kt b/admin-api/src/main/kotlin/org/ostelco/prime/admin/api/ImportResource.kt index 607fd44c5..f3dfccdc9 100644 --- a/admin-api/src/main/kotlin/org/ostelco/prime/admin/api/ImportResource.kt +++ b/admin-api/src/main/kotlin/org/ostelco/prime/admin/api/ImportResource.kt @@ -1,6 +1,5 @@ package org.ostelco.prime.admin.api -import org.ostelco.prime.admin.asJson import org.ostelco.prime.admin.importer.AddToSegments import org.ostelco.prime.admin.importer.ChangeSegments import org.ostelco.prime.admin.importer.CreateOffer @@ -9,6 +8,7 @@ import org.ostelco.prime.admin.importer.ImportProcessor import org.ostelco.prime.admin.importer.RemoveFromSegments import org.ostelco.prime.admin.importer.UpdateSegments import org.ostelco.prime.getLogger +import org.ostelco.prime.jsonmapper.asJson import javax.ws.rs.Consumes import javax.ws.rs.DELETE import javax.ws.rs.POST diff --git a/admin-api/src/test/kotlin/org/ostelco/importer/ImporterResourceTest.kt b/admin-api/src/test/kotlin/org/ostelco/importer/ImporterResourceTest.kt index d3940b79c..550f9041d 100644 --- a/admin-api/src/test/kotlin/org/ostelco/importer/ImporterResourceTest.kt +++ b/admin-api/src/test/kotlin/org/ostelco/importer/ImporterResourceTest.kt @@ -6,7 +6,6 @@ import io.dropwizard.testing.junit.ResourceTestRule import org.junit.Assert.assertEquals import org.junit.ClassRule import org.junit.Test -import org.ostelco.prime.admin.YamlMessageBodyReader import org.ostelco.prime.admin.api.ImporterResource import org.ostelco.prime.admin.importer.AddToSegments import org.ostelco.prime.admin.importer.ChangeSegments @@ -17,6 +16,7 @@ import org.ostelco.prime.admin.importer.Offer import org.ostelco.prime.admin.importer.RemoveFromSegments import org.ostelco.prime.admin.importer.UpdateSegments import org.ostelco.prime.apierror.ApiError +import org.ostelco.prime.jersey.YamlMessageBodyReader import org.ostelco.prime.model.Price import javax.ws.rs.client.Entity import javax.ws.rs.core.Response.Status diff --git a/client-api/src/main/kotlin/org/ostelco/prime/client/api/ClientApiModule.kt b/client-api/src/main/kotlin/org/ostelco/prime/client/api/ClientApiModule.kt index 228a0a413..52fdad641 100644 --- a/client-api/src/main/kotlin/org/ostelco/prime/client/api/ClientApiModule.kt +++ b/client-api/src/main/kotlin/org/ostelco/prime/client/api/ClientApiModule.kt @@ -13,7 +13,6 @@ import io.dropwizard.setup.Environment import org.eclipse.jetty.servlets.CrossOriginFilter import org.ostelco.prime.client.api.auth.AccessTokenPrincipal import org.ostelco.prime.client.api.auth.OAuthAuthenticator -import org.ostelco.prime.logging.TrackRequestsLoggingFilter import org.ostelco.prime.client.api.metrics.reportMetricsAtStartUp import org.ostelco.prime.client.api.resources.AnalyticsResource import org.ostelco.prime.client.api.resources.ApplicationTokenResource @@ -70,9 +69,6 @@ class ClientApiModule : PrimeModule { .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)) .build(env.name) - /* Add filters/interceptors. */ - jerseyEnv.register(TrackRequestsLoggingFilter()) - /* APIs. */ jerseyEnv.register(AnalyticsResource(dao)) jerseyEnv.register(ConsentsResource(dao)) diff --git a/jersey/build.gradle b/jersey/build.gradle new file mode 100644 index 000000000..bf35cb3f9 --- /dev/null +++ b/jersey/build.gradle @@ -0,0 +1,8 @@ +plugins { + id "org.jetbrains.kotlin.jvm" version "1.2.71" + id "java-library" +} + +dependencies { + implementation project(":prime-modules") +} \ No newline at end of file diff --git a/jersey/src/main/kotlin/org/ostelco/prime/jersey/JerseyModule.kt b/jersey/src/main/kotlin/org/ostelco/prime/jersey/JerseyModule.kt new file mode 100644 index 000000000..23845922d --- /dev/null +++ b/jersey/src/main/kotlin/org/ostelco/prime/jersey/JerseyModule.kt @@ -0,0 +1,17 @@ +package org.ostelco.prime.jersey + +import com.fasterxml.jackson.annotation.JsonTypeName +import io.dropwizard.setup.Environment +import org.ostelco.prime.module.PrimeModule + +@JsonTypeName("jersey") +class JerseyModule : PrimeModule { + + override fun init(env: Environment) { + + env.jersey().register(YamlMessageBodyReader::class.java) + + /* Add filters/interceptors. */ + env.jersey().register(TrackRequestsLoggingFilter()) + } +} \ No newline at end of file diff --git a/prime-modules/src/main/kotlin/org/ostelco/prime/logging/TrackRequestsLoggingFilter.kt b/jersey/src/main/kotlin/org/ostelco/prime/jersey/TrackRequestsLoggingFilter.kt similarity index 63% rename from prime-modules/src/main/kotlin/org/ostelco/prime/logging/TrackRequestsLoggingFilter.kt rename to jersey/src/main/kotlin/org/ostelco/prime/jersey/TrackRequestsLoggingFilter.kt index ac96a3b3b..a43d0ee96 100644 --- a/prime-modules/src/main/kotlin/org/ostelco/prime/logging/TrackRequestsLoggingFilter.kt +++ b/jersey/src/main/kotlin/org/ostelco/prime/jersey/TrackRequestsLoggingFilter.kt @@ -1,11 +1,11 @@ -package org.ostelco.prime.logging +package org.ostelco.prime.jersey +import org.slf4j.MDC +import java.util.* import javax.ws.rs.container.ContainerRequestContext import javax.ws.rs.container.ContainerRequestFilter import javax.ws.rs.container.ContainerResponseContext import javax.ws.rs.container.ContainerResponseFilter -import org.slf4j.MDC -import java.util.UUID import javax.ws.rs.ext.Provider /** @@ -15,20 +15,21 @@ import javax.ws.rs.ext.Provider class TrackRequestsLoggingFilter : ContainerRequestFilter, ContainerResponseFilter { /* Commonly used HTTP header for tracing requests. */ - val REQUEST_TRACE_HEADER = "X-RequestTrace" + private val requestTraceHeader = "X-Request-ID" /* MDC tracking. */ - val TRACE_ID = "TraceId" + private val traceId = "TraceId" override fun filter(ctx: ContainerRequestContext) { - val traceHeader = ctx.getHeaderString(REQUEST_TRACE_HEADER) - MDC.put("InvocationId", if (!traceHeader.isNullOrBlank()) - traceHeader - else - UUID.randomUUID().toString()) + val traceHeader = ctx.getHeaderString(requestTraceHeader) + MDC.put(traceId, + if (!traceHeader.isNullOrBlank()) + traceHeader + else + UUID.randomUUID().toString()) } override fun filter(reqCtx: ContainerRequestContext, rspCtx: ContainerResponseContext) { - MDC.remove(TRACE_ID) + MDC.remove(traceId) } } diff --git a/admin-api/src/main/kotlin/org/ostelco/prime/admin/Jersey.kt b/jersey/src/main/kotlin/org/ostelco/prime/jersey/YamlMessageBodyReader.kt similarity index 69% rename from admin-api/src/main/kotlin/org/ostelco/prime/admin/Jersey.kt rename to jersey/src/main/kotlin/org/ostelco/prime/jersey/YamlMessageBodyReader.kt index 8d829b154..b36b0d0ea 100644 --- a/admin-api/src/main/kotlin/org/ostelco/prime/admin/Jersey.kt +++ b/jersey/src/main/kotlin/org/ostelco/prime/jersey/YamlMessageBodyReader.kt @@ -1,9 +1,7 @@ -package org.ostelco.prime.admin +package org.ostelco.prime.jersey -import com.fasterxml.jackson.core.JsonProcessingException import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.dataformat.yaml.YAMLFactory -import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper import com.fasterxml.jackson.module.kotlin.registerKotlinModule import org.ostelco.prime.getLogger import java.io.InputStream @@ -15,8 +13,6 @@ import javax.ws.rs.core.MultivaluedMap import javax.ws.rs.core.Response.Status.BAD_REQUEST import javax.ws.rs.ext.MessageBodyReader -/// XXX This is a very generic message body reader, should -// be available anywhere we read yaml files. @Consumes("text/vnd.yaml") class YamlMessageBodyReader : MessageBodyReader { @@ -43,20 +39,4 @@ class YamlMessageBodyReader : MessageBodyReader { throw WebApplicationException(e.message, BAD_REQUEST.statusCode) } } -} - -/** - * Common 'helper' functions for resources. - * - */ -val objectMapper = jacksonObjectMapper() - -fun R.asJson(`object`: Any): String { - try { - return objectMapper.writeValueAsString(`object`) - } catch (e: JsonProcessingException) { - val logger by getLogger() - logger.error("Error in json response {}", e) - } - return "" } \ No newline at end of file diff --git a/jersey/src/main/resources/META-INF/services/io.dropwizard.jackson.Discoverable b/jersey/src/main/resources/META-INF/services/io.dropwizard.jackson.Discoverable new file mode 100644 index 000000000..8056fe23b --- /dev/null +++ b/jersey/src/main/resources/META-INF/services/io.dropwizard.jackson.Discoverable @@ -0,0 +1 @@ +org.ostelco.prime.module.PrimeModule \ No newline at end of file diff --git a/jersey/src/main/resources/META-INF/services/org.ostelco.prime.module.PrimeModule b/jersey/src/main/resources/META-INF/services/org.ostelco.prime.module.PrimeModule new file mode 100644 index 000000000..ab2aa5826 --- /dev/null +++ b/jersey/src/main/resources/META-INF/services/org.ostelco.prime.module.PrimeModule @@ -0,0 +1 @@ +org.ostelco.prime.jersey.JerseyModule \ No newline at end of file diff --git a/prime/build.gradle b/prime/build.gradle index 1a602b479..8c6010d40 100644 --- a/prime/build.gradle +++ b/prime/build.gradle @@ -41,6 +41,7 @@ dependencies { runtimeOnly project(':payment-processor') runtimeOnly project(':analytics-module') runtimeOnly project(':slack') + runtimeOnly project(':jersey') implementation "io.dropwizard:dropwizard-http2:$dropwizardVersion" runtimeOnly "io.dropwizard:dropwizard-json-logging:$dropwizardVersion" diff --git a/prime/config/config.yaml b/prime/config/config.yaml index d26cd9028..d10c9a973 100644 --- a/prime/config/config.yaml +++ b/prime/config/config.yaml @@ -1,4 +1,5 @@ modules: +- type: jersey - type: slack config: notifications: diff --git a/prime/config/test.yaml b/prime/config/test.yaml index 31e7c406d..4dce2146c 100644 --- a/prime/config/test.yaml +++ b/prime/config/test.yaml @@ -1,6 +1,7 @@ # This config is used as config.yaml when prime is running using docker-compose for Acceptance Testing modules: +- type: jersey - type: firebase config: configFile: /secret/pantel-prod.json diff --git a/prime/src/integration-tests/resources/config.yaml b/prime/src/integration-tests/resources/config.yaml index cd659ed6d..fb42b7026 100644 --- a/prime/src/integration-tests/resources/config.yaml +++ b/prime/src/integration-tests/resources/config.yaml @@ -1,4 +1,5 @@ modules: +- type: jersey - type: firebase config: configFile: config/pantel-prod.json diff --git a/settings.gradle b/settings.gradle index fbd8d4601..01df26180 100644 --- a/settings.gradle +++ b/settings.gradle @@ -14,6 +14,7 @@ include ':diameter-test' include ':ext-auth-provider' include ':firebase-store' include ':firebase-extensions' +include ':jersey' include ':model' include ':neo4j-admin-tools' include ':neo4j-store' @@ -43,6 +44,7 @@ project(':diameter-test').projectDir = "$rootDir/diameter-test" as File project(':ext-auth-provider').projectDir = "$rootDir/ext-auth-provider" as File project(':firebase-store').projectDir = "$rootDir/firebase-store" as File project(':firebase-extensions').projectDir = "$rootDir/firebase-extensions" as File +project(':jersey').projectDir = "$rootDir/jersey" as File project(':model').projectDir = "$rootDir/model" as File project(':neo4j-admin-tools').projectDir = "$rootDir/tools/neo4j-admin-tools" as File project(':neo4j-store').projectDir = "$rootDir/neo4j-store" as File From 15cf91a704abad9e01e461ed846dea80003cd33f Mon Sep 17 00:00:00 2001 From: mpeterss Date: Fri, 12 Oct 2018 10:17:12 +0200 Subject: [PATCH 89/93] Update prime configuration for imei download --- .../kotlin/org/ostelco/prime/imei/ImeiLookup.kt | 2 +- prime/config/config.yaml | 2 +- prime/infra/dev/prime.yaml | 16 +++++++++++----- prime/infra/prod/prime.yaml | 11 +++++++++++ 4 files changed, 24 insertions(+), 7 deletions(-) diff --git a/prime-modules/src/main/kotlin/org/ostelco/prime/imei/ImeiLookup.kt b/prime-modules/src/main/kotlin/org/ostelco/prime/imei/ImeiLookup.kt index 4414e7afa..f06fc766f 100644 --- a/prime-modules/src/main/kotlin/org/ostelco/prime/imei/ImeiLookup.kt +++ b/prime-modules/src/main/kotlin/org/ostelco/prime/imei/ImeiLookup.kt @@ -5,5 +5,5 @@ import org.ostelco.prime.imei.core.Imei import org.ostelco.prime.imei.core.ImeiLookupError interface ImeiLookup { - fun getImeiInformation(imeisv: String) : Either + fun getImeiInformation(imei: String) : Either } diff --git a/prime/config/config.yaml b/prime/config/config.yaml index b60c0077c..cf9e29c9b 100644 --- a/prime/config/config.yaml +++ b/prime/config/config.yaml @@ -9,7 +9,7 @@ modules: connectionRequestTimeout: 1s - type: Imei-lookup config: - csvFile: secret/imeiDb.txt + csvFile: /config-data/imeiDb.csv - type: firebase config: configFile: /secret/pantel-prod.json diff --git a/prime/infra/dev/prime.yaml b/prime/infra/dev/prime.yaml index 4a7b4e2d6..7673d030c 100644 --- a/prime/infra/dev/prime.yaml +++ b/prime/infra/dev/prime.yaml @@ -89,6 +89,13 @@ spec: prometheus.io/path: '/prometheus-metrics' prometheus.io/port: '8081' spec: + initContainers: + - name: "init-downloader" + image: "google/cloud-sdk:latest" + command: ['sh', '-c', 'gsutil cp gs://prime-files/dev/*.* /config-data/'] + volumeMounts: + - name: config-data + mountPath: /config-data/ containers: - name: ocs-esp image: gcr.io/endpoints-release/endpoints-runtime:1 @@ -173,8 +180,8 @@ spec: - name: secret-config mountPath: "/secret" readOnly: true - - name: imei-db - mountPath: "/imei" + - name: config-data + mountPath: "/config-data" readOnly: true ports: - containerPort: 8080 @@ -185,9 +192,6 @@ spec: - name: secret-config secret: secretName: pantel-prod.json - - type: Imei-lookup - config: - csvFile: /imei/imeiDb.csv - name: api-ostelco-ssl secret: secretName: api-ostelco-ssl @@ -197,3 +201,5 @@ spec: - name: metrics-ostelco-ssl secret: secretName: metrics-ostelco-ssl + - name: config-data + emptyDir: {} diff --git a/prime/infra/prod/prime.yaml b/prime/infra/prod/prime.yaml index 736054a2e..070bcd225 100644 --- a/prime/infra/prod/prime.yaml +++ b/prime/infra/prod/prime.yaml @@ -89,6 +89,13 @@ spec: prometheus.io/path: '/prometheus-metrics' prometheus.io/port: '8081' spec: + initContainers: + - name: "init-downloader" + image: "google/cloud-sdk:latest" + command: ['sh', '-c', 'gsutil cp gs://prime-files/prod/*.* /config-data/'] + volumeMounts: + - name: config-data + mountPath: /config-data/ containers: - name: ocs-esp image: gcr.io/endpoints-release/endpoints-runtime:1 @@ -170,6 +177,8 @@ spec: volumeMounts: - name: secret-config mountPath: "/secret" + - name: config-data + mountPath: "/config-data" readOnly: true ports: - containerPort: 8080 @@ -189,3 +198,5 @@ spec: - name: metrics-ostelco-ssl secret: secretName: metrics-ostelco-ssl + - name: config-data + emptyDir: {} From 68cd4a0e9a8ec95ecc9117f5780521605b0511d4 Mon Sep 17 00:00:00 2001 From: mpeterss Date: Fri, 12 Oct 2018 10:46:48 +0200 Subject: [PATCH 90/93] Move ime db to correct folder in test --- prime/Dockerfile.test | 2 +- prime/config/test.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/prime/Dockerfile.test b/prime/Dockerfile.test index 33c98ff2d..79696f4f5 100644 --- a/prime/Dockerfile.test +++ b/prime/Dockerfile.test @@ -16,7 +16,7 @@ COPY script/wait.sh /wait.sh # test.yaml is copied as config.yaml for AT. COPY config/pantel-prod.json /secret/pantel-prod.json -COPY config/testDb.csv /imei/imeiDb.csv +COPY config/testDb.csv /config-data/imeiDb.csv COPY config/test.yaml /config/config.yaml COPY build/libs/prime-uber.jar /prime.jar diff --git a/prime/config/test.yaml b/prime/config/test.yaml index 0ddeb85ed..c6f62f0ca 100644 --- a/prime/config/test.yaml +++ b/prime/config/test.yaml @@ -7,7 +7,7 @@ modules: rootPath: test - type: Imei-lookup config: - csvFile: /imei/imeiDb.csv + csvFile: /config-data/imeiDb.csv - type: neo4j config: host: neo4j From f0e355b949556b745b86ce95e50b707e0fa5401f Mon Sep 17 00:00:00 2001 From: Vihang Patil Date: Fri, 12 Oct 2018 13:40:03 +0200 Subject: [PATCH 91/93] Updated dependencies --- analytics-module/build.gradle | 2 +- build.gradle | 6 +++--- docker-compose.override.yaml | 2 +- neo4j-store/build.gradle | 2 +- neo4j-store/src/test/resources/docker-compose.yaml | 2 +- ocs/build.gradle | 2 +- ocsgw/build.gradle | 4 ++-- prime-client-api/build.gradle | 2 +- prime/infra/dev/neo4j.yaml | 2 +- prime/infra/prod/neo4j.yaml | 2 +- prime/src/integration-tests/resources/docker-compose.yaml | 2 +- tools/neo4j-admin-tools/docker-compose.backup.yaml | 2 +- tools/neo4j-admin-tools/docker-compose.neo4j.yaml | 2 +- tools/neo4j-admin-tools/docker-compose.restore.yaml | 2 +- tools/neo4j-admin-tools/docker-compose.yaml | 2 +- .../src/main/resources/docker-compose.yaml | 2 +- 16 files changed, 19 insertions(+), 19 deletions(-) diff --git a/analytics-module/build.gradle b/analytics-module/build.gradle index 2ecf5b533..7864df2e3 100644 --- a/analytics-module/build.gradle +++ b/analytics-module/build.gradle @@ -11,7 +11,7 @@ dependencies { implementation "com.google.cloud:google-cloud-pubsub:$googleCloudVersion" implementation 'com.google.code.gson:gson:2.8.5' - testImplementation 'com.google.api:gax-grpc:1.33.0' + testImplementation 'com.google.api:gax-grpc:1.33.1' testImplementation "io.dropwizard:dropwizard-testing:$dropwizardVersion" testImplementation "org.mockito:mockito-core:$mockitoVersion" diff --git a/build.gradle b/build.gradle index 104ce84ab..b4763aff3 100644 --- a/build.gradle +++ b/build.gradle @@ -34,10 +34,10 @@ subprojects { ext { kotlinVersion = "1.2.71" dropwizardVersion = "1.3.7" - kotlinXCoroutinesVersion = "0.30.0" - googleCloudVersion = "1.48.0" + kotlinXCoroutinesVersion = "0.30.2" + googleCloudVersion = "1.49.0" jacksonVersion = "2.9.7" - stripeVersion = "7.0.0" + stripeVersion = "7.1.0" guavaVersion = "26.0-jre" junit5Version = "5.3.1" assertJVersion = "3.11.1" diff --git a/docker-compose.override.yaml b/docker-compose.override.yaml index f920554c9..5ca420ea3 100644 --- a/docker-compose.override.yaml +++ b/docker-compose.override.yaml @@ -100,7 +100,7 @@ services: neo4j: container_name: "neo4j" - image: neo4j:3.4.7 + image: neo4j:3.4.8 environment: - NEO4J_AUTH=none ports: diff --git a/neo4j-store/build.gradle b/neo4j-store/build.gradle index 65868d564..4d1e80163 100644 --- a/neo4j-store/build.gradle +++ b/neo4j-store/build.gradle @@ -3,7 +3,7 @@ plugins { id "java-library" } -ext.neo4jVersion="3.4.7" +ext.neo4jVersion="3.4.8" ext.neo4jDriverVersion="1.6.3" tasks.withType(org.jetbrains.kotlin.gradle.tasks.KotlinCompile).all { diff --git a/neo4j-store/src/test/resources/docker-compose.yaml b/neo4j-store/src/test/resources/docker-compose.yaml index 633d77427..76e311981 100644 --- a/neo4j-store/src/test/resources/docker-compose.yaml +++ b/neo4j-store/src/test/resources/docker-compose.yaml @@ -3,7 +3,7 @@ version: "3.3" services: neo4j: container_name: "neo4j" - image: neo4j:3.4.7 + image: neo4j:3.4.8 environment: - NEO4J_AUTH=none ports: diff --git a/ocs/build.gradle b/ocs/build.gradle index 9444e7e74..fa7caa3df 100644 --- a/ocs/build.gradle +++ b/ocs/build.gradle @@ -13,7 +13,7 @@ dependencies { implementation project(':prime-modules') implementation 'com.lmax:disruptor:3.4.2' - // implementation "org.jetbrains.kotlinx:kotlinx-coroutines-core:0.30.0" + // implementation "org.jetbrains.kotlinx:kotlinx-coroutines-core:0.30.2" testImplementation "org.jetbrains.kotlin:kotlin-test:$kotlinVersion" testImplementation "org.jetbrains.kotlin:kotlin-test-junit:$kotlinVersion" diff --git a/ocsgw/build.gradle b/ocsgw/build.gradle index cd9de8a6e..16d67c79a 100644 --- a/ocsgw/build.gradle +++ b/ocsgw/build.gradle @@ -1,6 +1,6 @@ plugins { id "application" - // FIXME: unable to update to 4.0.0 + // FIXME martin: unable to update to 4.0.1 id "com.github.johnrengelman.shadow" version "2.0.4" } @@ -17,7 +17,7 @@ dependencies { implementation 'ch.qos.logback:logback-classic:1.2.3' // log to gcp stack-driver - implementation 'com.google.cloud:google-cloud-logging-logback:0.66.0-alpha' + implementation 'com.google.cloud:google-cloud-logging-logback:0.67.0-alpha' testImplementation project(':diameter-test') testImplementation "org.junit.jupiter:junit-jupiter-api:$junit5Version" diff --git a/prime-client-api/build.gradle b/prime-client-api/build.gradle index 09a55d0d2..c043500ca 100644 --- a/prime-client-api/build.gradle +++ b/prime-client-api/build.gradle @@ -1,7 +1,7 @@ plugins { id "org.jetbrains.kotlin.jvm" version "1.2.71" id 'java-library' - id 'org.hidetake.swagger.generator' version '2.13.0' + id 'org.hidetake.swagger.generator' version '2.14.0' id "idea" } diff --git a/prime/infra/dev/neo4j.yaml b/prime/infra/dev/neo4j.yaml index 0ec4cc2f8..e647ca8d5 100644 --- a/prime/infra/dev/neo4j.yaml +++ b/prime/infra/dev/neo4j.yaml @@ -41,7 +41,7 @@ spec: spec: containers: - name: neo4j - image: "neo4j:3.4.7-enterprise" + image: "neo4j:3.4.8-enterprise" imagePullPolicy: "IfNotPresent" env: - name: NEO4J_dbms_mode diff --git a/prime/infra/prod/neo4j.yaml b/prime/infra/prod/neo4j.yaml index 0ec4cc2f8..e647ca8d5 100644 --- a/prime/infra/prod/neo4j.yaml +++ b/prime/infra/prod/neo4j.yaml @@ -41,7 +41,7 @@ spec: spec: containers: - name: neo4j - image: "neo4j:3.4.7-enterprise" + image: "neo4j:3.4.8-enterprise" imagePullPolicy: "IfNotPresent" env: - name: NEO4J_dbms_mode diff --git a/prime/src/integration-tests/resources/docker-compose.yaml b/prime/src/integration-tests/resources/docker-compose.yaml index 633d77427..76e311981 100644 --- a/prime/src/integration-tests/resources/docker-compose.yaml +++ b/prime/src/integration-tests/resources/docker-compose.yaml @@ -3,7 +3,7 @@ version: "3.3" services: neo4j: container_name: "neo4j" - image: neo4j:3.4.7 + image: neo4j:3.4.8 environment: - NEO4J_AUTH=none ports: diff --git a/tools/neo4j-admin-tools/docker-compose.backup.yaml b/tools/neo4j-admin-tools/docker-compose.backup.yaml index 2c128b62e..abe73db72 100644 --- a/tools/neo4j-admin-tools/docker-compose.backup.yaml +++ b/tools/neo4j-admin-tools/docker-compose.backup.yaml @@ -3,7 +3,7 @@ version: "3.7" services: neo4j-online-backup: container_name: neo4j-online-backup - image: neo4j:3.4.7-enterprise + image: neo4j:3.4.8-enterprise command: > bin/neo4j-admin backup --backup-dir=/backup_dir diff --git a/tools/neo4j-admin-tools/docker-compose.neo4j.yaml b/tools/neo4j-admin-tools/docker-compose.neo4j.yaml index 4258e14aa..a8a438294 100644 --- a/tools/neo4j-admin-tools/docker-compose.neo4j.yaml +++ b/tools/neo4j-admin-tools/docker-compose.neo4j.yaml @@ -3,7 +3,7 @@ version: "3.7" services: neo4j: container_name: "neo4j" - image: neo4j:3.4.7 + image: neo4j:3.4.8 environment: - NEO4J_AUTH=none ports: diff --git a/tools/neo4j-admin-tools/docker-compose.restore.yaml b/tools/neo4j-admin-tools/docker-compose.restore.yaml index 685876573..8c9256aed 100644 --- a/tools/neo4j-admin-tools/docker-compose.restore.yaml +++ b/tools/neo4j-admin-tools/docker-compose.restore.yaml @@ -3,7 +3,7 @@ version: "3.7" services: neo4j-online-restore: container_name: neo4j-online-restore - image: neo4j:3.4.7-enterprise + image: neo4j:3.4.8-enterprise command: > bin/neo4j-admin restore --from=/backup_dir/graph.db-backup diff --git a/tools/neo4j-admin-tools/docker-compose.yaml b/tools/neo4j-admin-tools/docker-compose.yaml index 8be1a6627..b73d53eed 100644 --- a/tools/neo4j-admin-tools/docker-compose.yaml +++ b/tools/neo4j-admin-tools/docker-compose.yaml @@ -3,7 +3,7 @@ version: "3.7" services: neo4j: container_name: "neo4j" - image: neo4j:3.4.7 + image: neo4j:3.4.8 environment: - NEO4J_AUTH=none ports: diff --git a/tools/neo4j-admin-tools/src/main/resources/docker-compose.yaml b/tools/neo4j-admin-tools/src/main/resources/docker-compose.yaml index e8f1e59fa..60199e5e5 100644 --- a/tools/neo4j-admin-tools/src/main/resources/docker-compose.yaml +++ b/tools/neo4j-admin-tools/src/main/resources/docker-compose.yaml @@ -3,7 +3,7 @@ version: "3.7" services: neo4j: container_name: "neo4j" - image: neo4j:3.4.7-enterprise + image: neo4j:3.4.8-enterprise environment: - NEO4J_AUTH=none - NEO4J_ACCEPT_LICENSE_AGREEMENT=yes From 99d12efe1399d9185235f0b3e8d39a4c12add23e Mon Sep 17 00:00:00 2001 From: Vihang Patil Date: Tue, 9 Oct 2018 16:27:34 +0200 Subject: [PATCH 92/93] Removed kubctl file based secrets --- prime/infra/README.md | 5 +++-- prime/infra/dev/slack-secrets.yaml | 7 ------- prime/infra/dev/stripe-secrets.yaml | 7 ------- prime/infra/prod/slack-secrets.yaml | 7 ------- prime/infra/prod/stripe-secrets.yaml | 7 ------- 5 files changed, 3 insertions(+), 30 deletions(-) delete mode 100644 prime/infra/dev/slack-secrets.yaml delete mode 100644 prime/infra/dev/stripe-secrets.yaml delete mode 100644 prime/infra/prod/slack-secrets.yaml delete mode 100644 prime/infra/prod/stripe-secrets.yaml diff --git a/prime/infra/README.md b/prime/infra/README.md index 157132c07..643f12657 100644 --- a/prime/infra/README.md +++ b/prime/infra/README.md @@ -71,6 +71,7 @@ gcloud container builds submit \ ```bash kubectl create secret generic pantel-prod.json --from-file prime/config/pantel-prod.json +kubectl create secret generic imeiDb.csv.zip --from-file imeiDb.csv.zip ``` Reference: @@ -207,11 +208,11 @@ kubectl create secret generic pantel-prod.json --from-file prime/config/pantel-p Note: To update the secrets defined using yaml, delete and created them again. They are not updated. ```bash -sed -e s/STRIPE_API_KEY/$(echo -n 'keep-stripe-api-key-here' | base64)/g prime/infra/dev/stripe-secrets.yaml | kubectl apply -f - +kubectl create secret generic stripe-secrets --from-literal=stripeApiKey='keep-stripe-api-key-here' ``` ```bash -sed -e s/SLACK_WEBHOOK_URI/$(echo -n 'https://hooks.slack.com/services/.../.../...' | base64)/g prime/infra/dev/slack-secrets.yaml | kubectl apply -f - +kubectl create secret generic slack-secrets --from-literal=slackWebHookUri='https://hooks.slack.com/services/.../.../...' ``` ```bash diff --git a/prime/infra/dev/slack-secrets.yaml b/prime/infra/dev/slack-secrets.yaml deleted file mode 100644 index 025642bd1..000000000 --- a/prime/infra/dev/slack-secrets.yaml +++ /dev/null @@ -1,7 +0,0 @@ -apiVersion: v1 -kind: Secret -metadata: - name: slack-secrets -type: Opaque -data: - slackWebHookUri: SLACK_WEBHOOK_URI \ No newline at end of file diff --git a/prime/infra/dev/stripe-secrets.yaml b/prime/infra/dev/stripe-secrets.yaml deleted file mode 100644 index 5c0df1090..000000000 --- a/prime/infra/dev/stripe-secrets.yaml +++ /dev/null @@ -1,7 +0,0 @@ -apiVersion: v1 -kind: Secret -metadata: - name: stripe-secrets -type: Opaque -data: - stripeApiKey: STRIPE_API_KEY \ No newline at end of file diff --git a/prime/infra/prod/slack-secrets.yaml b/prime/infra/prod/slack-secrets.yaml deleted file mode 100644 index 025642bd1..000000000 --- a/prime/infra/prod/slack-secrets.yaml +++ /dev/null @@ -1,7 +0,0 @@ -apiVersion: v1 -kind: Secret -metadata: - name: slack-secrets -type: Opaque -data: - slackWebHookUri: SLACK_WEBHOOK_URI \ No newline at end of file diff --git a/prime/infra/prod/stripe-secrets.yaml b/prime/infra/prod/stripe-secrets.yaml deleted file mode 100644 index 5c0df1090..000000000 --- a/prime/infra/prod/stripe-secrets.yaml +++ /dev/null @@ -1,7 +0,0 @@ -apiVersion: v1 -kind: Secret -metadata: - name: stripe-secrets -type: Opaque -data: - stripeApiKey: STRIPE_API_KEY \ No newline at end of file From 2ab1cf940c72d1602f091e9f93b82efc2fd592a7 Mon Sep 17 00:00:00 2001 From: Vihang Patil Date: Mon, 8 Oct 2018 18:51:17 +0200 Subject: [PATCH 93/93] Added alerts to slack mesages --- .../kotlin/org/ostelco/prime/slack/Model.kt | 5 +++-- .../prime/slack/SlackNotificationReporter.kt | 18 +++++++++++------- 2 files changed, 14 insertions(+), 9 deletions(-) diff --git a/slack/src/main/kotlin/org/ostelco/prime/slack/Model.kt b/slack/src/main/kotlin/org/ostelco/prime/slack/Model.kt index 1a25edc5e..11979eef0 100644 --- a/slack/src/main/kotlin/org/ostelco/prime/slack/Model.kt +++ b/slack/src/main/kotlin/org/ostelco/prime/slack/Model.kt @@ -5,12 +5,13 @@ import com.fasterxml.jackson.annotation.JsonProperty data class Message( val channel: String, @JsonProperty("username") val userName: String? = null, - val text: String ?= null, - @JsonProperty("icon_emoji") val iconEmoji: String ?= null, + val text: String = "", + @JsonProperty("icon_emoji") val iconEmoji: String? = null, val attachments: List = emptyList()) { fun format(): Message = this.copy( channel = "#$channel", + text = " $text", iconEmoji = iconEmoji?.let { ":$it:" }) } diff --git a/slack/src/main/kotlin/org/ostelco/prime/slack/SlackNotificationReporter.kt b/slack/src/main/kotlin/org/ostelco/prime/slack/SlackNotificationReporter.kt index 893d9ad8f..9b6fccd2e 100644 --- a/slack/src/main/kotlin/org/ostelco/prime/slack/SlackNotificationReporter.kt +++ b/slack/src/main/kotlin/org/ostelco/prime/slack/SlackNotificationReporter.kt @@ -21,7 +21,7 @@ object SlackNotificationReporter { channel = Registry.channel, // text = message, // userName = Registry.userName, - iconEmoji = levelToEmoji(level), + // iconEmoji = levelToEmoji(level), attachments = listOf( Attachment( fallback = message, @@ -53,12 +53,16 @@ object SlackNotificationReporter { TRACE -> "#C0C0C0" } - private fun levelToTitle(level: Level): String = when (level) { - ERROR -> "Error" - WARN -> "Warning" - INFO -> "Info" - DEBUG -> "Debug" - TRACE -> "Trace" + private fun levelToTitle(level: Level): String { + val emoji = levelToEmoji(level) + val title = when (level) { + ERROR -> "Error" + WARN -> "Warning" + INFO -> "Info" + DEBUG -> "Debug" + TRACE -> "Trace" + } + return ":$emoji: $title" } }