From 8d18027652a7a5506f238b1e4036dd071890f868 Mon Sep 17 00:00:00 2001 From: Jamal El Youssefi Date: Tue, 19 Jun 2018 11:37:12 +0200 Subject: [PATCH 01/68] PLATFORM-LAUNCHER: added email e2e testing --- Makefile | 5 ++- tests/Makefile | 9 +++++ tests/ethereal.js | 23 +++++++++++ tests/lib/helpers.js | 69 ++++++++++++++++++++++++++++----- tests/oisp-tests.js | 90 ++++++++++++++++++++++++++++++++++++-------- tests/package.json | 4 +- 6 files changed, 172 insertions(+), 28 deletions(-) create mode 100644 tests/ethereal.js diff --git a/Makefile b/Makefile index 5144377a..91fcdb3c 100644 --- a/Makefile +++ b/Makefile @@ -93,7 +93,8 @@ start: build .prepare start-test: build .prepare @$(call msg,"Starting IoT connector (test mode) ..."); - @env TEST="1" ./docker.sh up -d + @make -C tests email-account $(shell pwd)/tests/.env + @env TEST="1" /bin/bash -c "source ./tests/.env && ./docker.sh up -d" start-quick: build-quick .prepare @$(call msg,"Starting IoT connector using pulled images..."); @@ -129,7 +130,7 @@ test: cd $(CURRENT_DIR) && \ sudo make distclean && \ make start-test && \ - cd tests && make && make test; \ + source ./tests/.env && cd tests && make && make test; \ done diff --git a/tests/Makefile b/tests/Makefile index 9261883f..4414f5bf 100644 --- a/tests/Makefile +++ b/tests/Makefile @@ -46,6 +46,15 @@ test: @$(call msg,"Starting the e2e testing ..."); @. ../setup-environment.sh && env http_proxy="" https_proxy="" USERNAME=$(USERNAME) PASSWORD=$(PASSWORD) LOG_LEVEL="error" NODE_ENV="local" npm test +ifeq (email-account,$(firstword $(MAKECMDGOALS))) + OUTPUT_FILE := $(wordlist 2,$(words $(MAKECMDGOALS)),$(MAKECMDGOALS)) + $(eval $(OUTPUT_FILE):;@:) +endif + +email-account: + @$(call msg,"Creating ethereal email account"); + @node ./ethereal.js $(OUTPUT_FILE) + clean: @$(call msg,"Cleaning ..."); diff --git a/tests/ethereal.js b/tests/ethereal.js new file mode 100644 index 00000000..33d1eeee --- /dev/null +++ b/tests/ethereal.js @@ -0,0 +1,23 @@ +'use strict'; +const nodemailer = require('nodemailer'); +var fs = require('fs'); + +var outputFile = ".env" +var args = process.argv.slice(2); + +if ( args[0] ) { + outputFile = args[0]; +} + +nodemailer.createTestAccount((err, account) => { + if (!err) { + fs.writeFileSync(outputFile, 'export SMTP_HOST='+account.smtp.host+'\n'); + fs.appendFileSync(outputFile, 'export SMTP_PORT='+account.smtp.port+'\n'); + fs.appendFileSync(outputFile, 'export SMTP_USERNAME='+account.user+'\n'); + fs.appendFileSync(outputFile, 'export SMTP_PASSWORD='+account.pass+'\n'); + fs.appendFileSync(outputFile, 'export IMAP_HOST='+account.imap.host+'\n'); + fs.appendFileSync(outputFile, 'export IMAP_PORT='+account.imap.port+'\n'); + } +}) + + \ No newline at end of file diff --git a/tests/lib/helpers.js b/tests/lib/helpers.js index a2ee0814..8f325244 100644 --- a/tests/lib/helpers.js +++ b/tests/lib/helpers.js @@ -20,8 +20,8 @@ //------------------------------------------------------------------------------------------------------- var uuid = require('uuid/v4'); - var chai = require('chai'); +var Imap = require('imap'); var assert = chai.assert; var config = require("../test-config.json"); @@ -269,13 +269,7 @@ function createRule(ruleConfig, userToken, accountId, deviceId, cb) { status: "Active", resetType: "Automatic", - actions: [{ - type: "actuation", - target: [ - ruleConfig.actuationCmd - ] - }], - + actions: ruleConfig.actions, population: { ids: [deviceId], @@ -466,6 +460,62 @@ function getData(from, userToken, accountId, deviceId, cid, cb) { }); } +function getEmailMessage(user, password, host, port, num, cb) { + if (!cb) { + throw "Callback required"; + } + var imap = new Imap({ + user: user, + password: password, + host: host, + port: port, + tls: true, + tlsOptions: { rejectUnauthorized: false } + }); + + imap.once('ready', function() { + imap.openBox('INBOX', true, function(err, box) { + if ( !err ) { + var f = imap.seq.fetch(num, { + bodies: ['HEADER.FIELDS (TO)', '1'], + struct: true + }); + + f.on('message', function(msg, seqno) { + var buffer = ''; + msg.on('body', function(stream, info) { + + stream.on('data', function(chunk) { + buffer += chunk.toString('utf8'); + }); + }); + + msg.once('end', function() { + buffer = buffer.replace("<","<"); + buffer = buffer.replace(">",">"); + cb(null, buffer); + imap.closeBox(() => { + imap.destroy(); + imap.end(); + }); + }); + }); + } + else { + cb(err); + } + }); + }); + + imap.once('error', function(err) { + cb(err); + }); + + imap.connect(); + +} + + module.exports = { createComponentId: createComponentId, getActivationCode: getActivationCode, @@ -480,5 +530,6 @@ module.exports = { sendObservation: sendObservation, getActuationsForComponent: getActuationsForComponent, getObservation: getObservation, - getData: getData + getData: getData, + getEmailMessage: getEmailMessage }; diff --git a/tests/oisp-tests.js b/tests/oisp-tests.js index bec5dcc9..4f4ce905 100644 --- a/tests/oisp-tests.js +++ b/tests/oisp-tests.js @@ -41,6 +41,8 @@ var actuatorType = "powerswitch.v1.0" var switchOnCmdName = "switch-on" var switchOffCmdName = "switch-off" +var recipientEmail = "jamal.el.youssefi@intel.com" + var rules = []; rules[switchOnCmdName] = { @@ -48,7 +50,16 @@ rules[switchOnCmdName] = { conditionComponent: componentName, basicConditionOperator: "<=", basicConditionValue: "15", - actuationCmd: switchOnCmdName + actions: [ + { + type: "actuation", + target: [ switchOnCmdName ] + }, + { + type: "mail", + target: [ recipientEmail ] + } + ], }; rules[switchOffCmdName] = { @@ -56,7 +67,16 @@ rules[switchOffCmdName] = { conditionComponent: componentName, basicConditionOperator: ">", basicConditionValue: "25", - actuationCmd: switchOffCmdName + actions: [ + { + type: "actuation", + target: [ switchOffCmdName ] + }, + { + type: "mail", + target: [ recipientEmail ] + } + ], }; //------------------------------------------------------------------------------------------------------- @@ -74,43 +94,53 @@ var firstObservationTime; var temperatureValues = [{ value: -15, - expectedActuation: 1 // swich on + expectedActuation: 1, // swich on + expectedEmailReason: "temperature-sensor <= 15" }, { value: -5, - expectedActuation: 1 // swich on + expectedActuation: 1, // swich on + expectedEmailReason: "temperature-sensor <= 15" }, { value: 5, - expectedActuation: 1 // swich on + expectedActuation: 1, // swich on + expectedEmailReason: "temperature-sensor <= 15" }, { value: 15, - expectedActuation: 1 // swich on + expectedActuation: 1, // swich on + expectedEmailReason: "temperature-sensor <= 15" }, { value: 25, - expectedActuation: null // do nothing (no actuation) + expectedActuation: null, // do nothing (no actuation) + expectedEmailReason: null, }, { value: 30, - expectedActuation: 0 // swich off + expectedActuation: 0, // swich off + expectedEmailReason: "temperature-sensor > 25" }, { value: 20, - expectedActuation: null // do nothing (no actuation) + expectedActuation: null, // do nothing (no actuation) + expectedEmailReason: null }, { value: 14, - expectedActuation: 1 // swich on + expectedActuation: 1, // swich on + expectedEmailReason: "temperature-sensor <= 15" }, { value: 20, - expectedActuation: null // do nothing (no actuation) + expectedActuation: null, // do nothing (no actuation) + expectedEmailReason: null }, { value: 28, - expectedActuation: 0 // swich off + expectedActuation: 0, // swich off + expectedEmailReason: "temperature-sensor > 25" } ]; @@ -354,6 +384,7 @@ describe("Sending observations and checking rules ...\n".bold, function() { var index = 0; var nbActuations = 0; + var emailNum = 1; process.stdout.write(" "); @@ -375,10 +406,11 @@ describe("Sending observations and checking rules ...\n".bold, function() { } }; + helpers.wsConnect(proxyConnector, deviceToken, deviceId, function(message) { --nbActuations; - var expectedValue = temperatureValues[index].expectedActuation.toString(); + var expectedActuationValue = temperatureValues[index].expectedActuation.toString(); var componentParam = message.content.params.filter(function(param){ return param.name == componentParamName; }); @@ -387,13 +419,39 @@ describe("Sending observations and checking rules ...\n".bold, function() { var param = componentParam[0]; var paramValue = param.value.toString(); - if(paramValue == expectedValue) + if(paramValue == expectedActuationValue) { - step(); + helpers.getEmailMessage(process.env.SMTP_USERNAME, process.env.SMTP_PASSWORD, + process.env.IMAP_HOST, process.env.IMAP_PORT, emailNum, function(err, message) { + if (!err) { + var lines = message.toString().split("\n"); + var i; + for(i=0; i Date: Tue, 19 Jun 2018 12:38:00 +0200 Subject: [PATCH 02/68] PLATFORM-LAUNCHER: modify the email recipient --- tests/oisp-tests.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/oisp-tests.js b/tests/oisp-tests.js index 4f4ce905..14cfbf10 100644 --- a/tests/oisp-tests.js +++ b/tests/oisp-tests.js @@ -41,7 +41,7 @@ var actuatorType = "powerswitch.v1.0" var switchOnCmdName = "switch-on" var switchOffCmdName = "switch-off" -var recipientEmail = "jamal.el.youssefi@intel.com" +var recipientEmail = "test.receiver@streammyiot.com" var rules = []; From 0814a2b5ab03027fce2c86d78020caeace3ddee5 Mon Sep 17 00:00:00 2001 From: Jamal El Youssefi Date: Tue, 19 Jun 2018 13:53:16 +0200 Subject: [PATCH 03/68] PLATFORM-LAUNCHER: update email e2e testing --- setup-environment.example.sh | 14 ++++++++++---- tests/ethereal.js | 2 ++ tests/oisp-tests.js | 8 ++++++-- 3 files changed, 18 insertions(+), 6 deletions(-) diff --git a/setup-environment.example.sh b/setup-environment.example.sh index f4c571e1..55278fd4 100755 --- a/setup-environment.example.sh +++ b/setup-environment.example.sh @@ -30,6 +30,12 @@ export BACKEND='backend:8080' export NGINX='nginx' export REDIS='redis' export REDIS_PORT='6379' + +export SMTP_HOST="${SMTP_HOST:-auth.smtp.1and1.co.uk}" +export SMTP_PORT="${SMTP_PORT:-587}" +export SMTP_USERNAME="${SMTP_USERNAME:-test.sender@streammyiot.com}" +export SMTP_PASSWORD="${SMTP_PASSWORD:-xxxxx}" + export COMPOSE_PROJECT_NAME="oisp" export VCAP_SERVICES='{ @@ -96,11 +102,11 @@ export VCAP_SERVICES='{ "smtp": [ { "credentials": { - "host": "auth.smtp.1and1.co.uk", - "port": "587", + "host": "'$SMTP_HOST'", + "port": "'$SMTP_PORT'", "protocol": "smtp", - "username": "test.sender@streammyiot.com", - "password": "smtp-password-goes-here" + "username": "'$SMTP_USERNAME'", + "password": "'$SMTP_PASSWORD'" }, "label": "smtp", "name": "mysmtp" diff --git a/tests/ethereal.js b/tests/ethereal.js index 33d1eeee..edf39610 100644 --- a/tests/ethereal.js +++ b/tests/ethereal.js @@ -15,6 +15,8 @@ nodemailer.createTestAccount((err, account) => { fs.appendFileSync(outputFile, 'export SMTP_PORT='+account.smtp.port+'\n'); fs.appendFileSync(outputFile, 'export SMTP_USERNAME='+account.user+'\n'); fs.appendFileSync(outputFile, 'export SMTP_PASSWORD='+account.pass+'\n'); + fs.appendFileSync(outputFile, 'export IMAP_USERNAME='+account.user+'\n'); + fs.appendFileSync(outputFile, 'export IMAP_PASSWORD='+account.pass+'\n'); fs.appendFileSync(outputFile, 'export IMAP_HOST='+account.imap.host+'\n'); fs.appendFileSync(outputFile, 'export IMAP_PORT='+account.imap.port+'\n'); } diff --git a/tests/oisp-tests.js b/tests/oisp-tests.js index 14cfbf10..c26046ef 100644 --- a/tests/oisp-tests.js +++ b/tests/oisp-tests.js @@ -43,6 +43,11 @@ var switchOffCmdName = "switch-off" var recipientEmail = "test.receiver@streammyiot.com" +var imap_username = process.env.IMAP_USERNAME; +var imap_password = process.env.IMAP_PASSWORD; +var imap_host = process.env.IMAP_HOST; +var imap_port = process.env.IMAP_PORT; + var rules = []; rules[switchOnCmdName] = { @@ -421,8 +426,7 @@ describe("Sending observations and checking rules ...\n".bold, function() { if(paramValue == expectedActuationValue) { - helpers.getEmailMessage(process.env.SMTP_USERNAME, process.env.SMTP_PASSWORD, - process.env.IMAP_HOST, process.env.IMAP_PORT, emailNum, function(err, message) { + helpers.getEmailMessage(imap_username, imap_password, imap_host, imap_port, emailNum, function(err, message) { if (!err) { var lines = message.toString().split("\n"); var i; From 6e5f141dec07bee99e569667a963849ecc05c1e4 Mon Sep 17 00:00:00 2001 From: Jamal El Youssefi Date: Tue, 19 Jun 2018 20:32:24 +0200 Subject: [PATCH 04/68] PLATFORM-LAUNCHER: fixed print -e issue --- tests/Makefile | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/Makefile b/tests/Makefile index 4414f5bf..900b5d2e 100644 --- a/tests/Makefile +++ b/tests/Makefile @@ -17,6 +17,7 @@ #---------------------------------------------------------------------------------------------------------------------- # Configs #---------------------------------------------------------------------------------------------------------------------- +SHELL:=/bin/bash MAKEFILE_DIR := $(abspath $(lastword $(MAKEFILE_LIST))) CURRENT_DIR := $(patsubst %/,%,$(dir $(MAKEFILE_DIR))) From 2ae9207d107ab1650d84d79122784b2c3a50da6e Mon Sep 17 00:00:00 2001 From: Jamal El Youssefi Date: Wed, 20 Jun 2018 13:07:43 +0200 Subject: [PATCH 05/68] PLATFORM-LAUNCHER: fixed synchronizationStatus issue --- tests/lib/helpers.js | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/lib/helpers.js b/tests/lib/helpers.js index 8f325244..aca4438d 100644 --- a/tests/lib/helpers.js +++ b/tests/lib/helpers.js @@ -268,6 +268,7 @@ function createRule(ruleConfig, userToken, accountId, deviceId, cb) { type: "Regular", status: "Active", resetType: "Automatic", + synchronizationStatus: "NotSync", actions: ruleConfig.actions, From 3032276aa40e27583f921bbd7294d6220c04ded4 Mon Sep 17 00:00:00 2001 From: Jamal El Youssefi Date: Wed, 20 Jun 2018 13:16:37 +0200 Subject: [PATCH 06/68] PLATFORM-LAUNCHER: update email e2e testing --- tests/Makefile | 2 +- tests/oisp-tests.js | 48 +++++++++++++++++++++++++++++++++------------ 2 files changed, 37 insertions(+), 13 deletions(-) diff --git a/tests/Makefile b/tests/Makefile index 900b5d2e..998a9dc9 100644 --- a/tests/Makefile +++ b/tests/Makefile @@ -52,7 +52,7 @@ ifeq (email-account,$(firstword $(MAKECMDGOALS))) $(eval $(OUTPUT_FILE):;@:) endif -email-account: +email-account: all @$(call msg,"Creating ethereal email account"); @node ./ethereal.js $(OUTPUT_FILE) diff --git a/tests/oisp-tests.js b/tests/oisp-tests.js index c26046ef..9e6c9f7b 100644 --- a/tests/oisp-tests.js +++ b/tests/oisp-tests.js @@ -41,7 +41,7 @@ var actuatorType = "powerswitch.v1.0" var switchOnCmdName = "switch-on" var switchOffCmdName = "switch-off" -var recipientEmail = "test.receiver@streammyiot.com" +var emailRecipient = "test.receiver@streammyiot.com" var imap_username = process.env.IMAP_USERNAME; var imap_password = process.env.IMAP_PASSWORD; @@ -62,7 +62,7 @@ rules[switchOnCmdName] = { }, { type: "mail", - target: [ recipientEmail ] + target: [ emailRecipient ] } ], }; @@ -79,7 +79,7 @@ rules[switchOffCmdName] = { }, { type: "mail", - target: [ recipientEmail ] + target: [ emailRecipient ] } ], }; @@ -428,25 +428,49 @@ describe("Sending observations and checking rules ...\n".bold, function() { { helpers.getEmailMessage(imap_username, imap_password, imap_host, imap_port, emailNum, function(err, message) { if (!err) { + var regExps = [ + { + value: /^To:.*/, + expected: emailRecipient, + found: false + }, + { + value: /^- Device:.*/, + expected: deviceId, + found: false + }, + { + value: /^- Reason:.*/, + expected: temperatureValues[index].expectedEmailReason, + found: false + } + ]; var lines = message.toString().split("\n"); var i; + for(i=0; i Date: Fri, 22 Jun 2018 14:45:09 +0200 Subject: [PATCH 07/68] PLATFORM-LAUNCHER: updated top level Makefiles to improve the e2e testing --- Makefile | 27 ++++++++++++++++++++------- tests/Makefile | 6 +++--- 2 files changed, 23 insertions(+), 10 deletions(-) diff --git a/Makefile b/Makefile index 91fcdb3c..0aa6d389 100644 --- a/Makefile +++ b/Makefile @@ -30,13 +30,19 @@ DOCKER_REGISTRY=localhost:5000/ .init: @$(call msg,"Initializing ..."); + @if [ ! -f ".firstRun" ]; then \ + make docker-clean && \ + touch .firstRun;\ + fi @$(call msg,"Currently on branch ${BRANCH}"); @if [ "${BRANCH}" != master ]; then \ - echo -e "Non-master branch detected! Submodules will not be updated automatically. \nYou have to run 'make update' for submodules from develop branch and update manually otherwise"; \ - read -r -p "Continue? [Y/n]: " response; \ - case $$response in \ - [Nn]* ) echo "Bye!"; exit 1; \ - esac \ + if [ "${TEST}" != "1" ]; then \ + echo -e "Non-master branch detected! Submodules will not be updated automatically. \nYou have to run 'make update' for submodules from develop branch and update manually otherwise"; \ + read -r -p "Continue? [Y/n]: " response; \ + case $$response in \ + [Nn]* ) echo "Bye!"; exit 1; \ + esac \ + fi; \ else \ git submodule init; \ git submodule update; \ @@ -91,10 +97,11 @@ start: build .prepare @$(call msg,"Starting IoT connector ..."); @./docker.sh up -d $(CMD_ARGS) +start-test: export TEST := "1" start-test: build .prepare @$(call msg,"Starting IoT connector (test mode) ..."); @make -C tests email-account $(shell pwd)/tests/.env - @env TEST="1" /bin/bash -c "source ./tests/.env && ./docker.sh up -d" + @source ./tests/.env && ./docker.sh up -d start-quick: build-quick .prepare @$(call msg,"Starting IoT connector using pulled images..."); @@ -117,6 +124,11 @@ update: @git submodule foreach git fetch origin @git submodule foreach git checkout origin/develop +docker-clean: + @$(call msg,"Removing docker images and containers ..."); + @make remove + + ifeq (test,$(firstword $(MAKECMDGOALS))) NB_TESTS := $(wordlist 2,$(words $(MAKECMDGOALS)),$(MAKECMDGOALS)) ifeq ($(NB_TESTS),) @@ -125,12 +137,13 @@ endif $(eval $(NB_TESTS):;@:) endif +test: export TEST := "1" test: @for ((i=0; i < ${NB_TESTS}; i++)) do \ cd $(CURRENT_DIR) && \ sudo make distclean && \ make start-test && \ - source ./tests/.env && cd tests && make && make test; \ + source ./tests/.env && cd tests && make test; \ done diff --git a/tests/Makefile b/tests/Makefile index 998a9dc9..9bc54fe2 100644 --- a/tests/Makefile +++ b/tests/Makefile @@ -30,11 +30,11 @@ ROLE = "admin" # targets #---------------------------------------------------------------------------------------------------------------------- -all: +npm-install: @$(call msg,"Installing npm packages ..."); @cd $(CURRENT_DIR) && npm install > /dev/null -test: +test: npm-install @$(call msg,"Waiting for frontend"); @. ../setup-environment.sh && until kafkacat -b localhost:9092 -t heartbeat -e 2> /dev/null | grep -m 1 "dashboard"; do sleep 1 ; done @@ -52,7 +52,7 @@ ifeq (email-account,$(firstword $(MAKECMDGOALS))) $(eval $(OUTPUT_FILE):;@:) endif -email-account: all +email-account: npm-install @$(call msg,"Creating ethereal email account"); @node ./ethereal.js $(OUTPUT_FILE) From 76efe637b1ae8e89450205bd979717f54d4c4bd1 Mon Sep 17 00:00:00 2001 From: Ali Rasim Kocal Date: Wed, 20 Jun 2018 13:11:22 +0200 Subject: [PATCH 08/68] Docker-compose wait for kafka in gearpump. --- docker-compose-quick.yml | 29 +++++++++++++---------------- docker-compose.yml | 2 +- 2 files changed, 14 insertions(+), 17 deletions(-) diff --git a/docker-compose-quick.yml b/docker-compose-quick.yml index 87d4b9a3..5a860bd9 100644 --- a/docker-compose-quick.yml +++ b/docker-compose-quick.yml @@ -49,31 +49,28 @@ services: - ADVERTISED_PORT=9092 - AUTO_CREATE_TOPICS=true gearpump: - image: "${DOCKER_REGISTRY}${COMPOSE_PROJECT_NAME}/gearpump" - ports: - - 8090:8090 + image: "${DOCKER_REGISTRY}{COMPOSE_PROJECT_NAME}/gearpump" depends_on: - nginx - hbase - redis: - image: redis:3.0 - volumes: - - ./data/redis:/data - ports: - - 6379:6379 - rules-engine-build: - image: "${DOCKER_REGISTRY}${COMPOSE_PROJECT_NAME}/rules-engine-build" - depends_on: - - gearpump - - nginx - - hbase - kafka - frontend working_dir: /app environment: - VCAP_SERVICES=${VCAP_SERVICES} - VCAP_APPLICATION=${VCAP_APPLICATION} - command: ./wait-for-it.sh gearpump:8090 -t 300 -- ./wait-for-it.sh nginx:80 -t 300 -- ./wait-for-it.sh hbase:2181 -t 300 -- ./wait-for-it.sh kafka:9092 -t 300 -- ./wait-for-it.sh frontend:4001 -t 300 -- ./local-push-to-gearpump.sh + - KAFKA=${KAFKA} + - KAFKA_HEARTBEAT_TOPIC=${KAFKA_HEARTBEAT_TOPIC} + - GEARPUMP=${GEARPUMP} + ports: + - 8090:8090 + command: bash bootstrap.sh + redis: + image: redis:3.0 + volumes: + - ./data/redis:/data + ports: + - 6379:6379 nginx: image: "${DOCKER_REGISTRY}${COMPOSE_PROJECT_NAME}/nginx" ports: diff --git a/docker-compose.yml b/docker-compose.yml index c637526f..f93cc48a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -75,7 +75,7 @@ services: - GEARPUMP=${GEARPUMP} ports: - 8090:8090 - command: bash bootstrap.sh + command: bash wait-for-it.sh kafka:9092 -t 300 -- bash bootstrap.sh nginx: build: context: ./oisp-frontend/public-interface/nginx From 86c25acf5e79c19dab838e74a8762f2eb217d49d Mon Sep 17 00:00:00 2001 From: chuan9 Date: Mon, 25 Jun 2018 21:31:06 -0400 Subject: [PATCH 09/68] Implement E2E test for admin role --- tests/lib/helpers.js | 536 ------------------ tests/lib/helpers/accounts.js | 209 +++++++ tests/lib/helpers/alerts.js | 158 ++++++ tests/lib/helpers/auth.js | 94 ++++ tests/lib/helpers/cmpcatalog.js | 134 +++++ tests/lib/helpers/connector.js | 54 ++ tests/lib/helpers/control.js | 120 ++++ tests/lib/helpers/data.js | 168 ++++++ tests/lib/helpers/devices.js | 392 +++++++++++++ tests/lib/helpers/index.js | 37 ++ tests/lib/helpers/invitation.js | 139 +++++ tests/lib/helpers/mail.js | 81 +++ tests/lib/helpers/rules.js | 346 ++++++++++++ tests/lib/helpers/users.js | 243 ++++++++ tests/oisp-tests.js | 943 +++++++++++++++++++++++++++++--- tests/package.json | 5 +- 16 files changed, 3044 insertions(+), 615 deletions(-) delete mode 100644 tests/lib/helpers.js create mode 100644 tests/lib/helpers/accounts.js create mode 100644 tests/lib/helpers/alerts.js create mode 100644 tests/lib/helpers/auth.js create mode 100644 tests/lib/helpers/cmpcatalog.js create mode 100644 tests/lib/helpers/connector.js create mode 100644 tests/lib/helpers/control.js create mode 100644 tests/lib/helpers/data.js create mode 100644 tests/lib/helpers/devices.js create mode 100644 tests/lib/helpers/index.js create mode 100644 tests/lib/helpers/invitation.js create mode 100644 tests/lib/helpers/mail.js create mode 100644 tests/lib/helpers/rules.js create mode 100644 tests/lib/helpers/users.js diff --git a/tests/lib/helpers.js b/tests/lib/helpers.js deleted file mode 100644 index aca4438d..00000000 --- a/tests/lib/helpers.js +++ /dev/null @@ -1,536 +0,0 @@ -/** - * Copyright (c) 2017 Intel Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -"use strict"; - -//------------------------------------------------------------------------------------------------------- -// Helper Functions -//------------------------------------------------------------------------------------------------------- - -var uuid = require('uuid/v4'); -var chai = require('chai'); -var Imap = require('imap'); -var assert = chai.assert; - -var config = require("../test-config.json"); -var oispSdk = require("@open-iot-service-platform/oisp-sdk-js"); -var api = oispSdk(config).api.rest; - -function createComponentId() { - return uuid(); -} - -function getActivationCode(accountId, userToken, cb) { - if (!cb) { - throw "Callback required"; - } - - var data = { - userToken: userToken, - accountId: accountId - }; - - api.accounts.refreshAccountActivationCode(data, function(err, response) { - cb(err, response.activationCode) - }) -} - -function login(username, password, cb) { - if (!cb) { - throw "Callback required"; - } - - var data = { - body: { - username: username, - password: password - } - }; - - api.auth.getAuthToken(data, function(err, response) { - var userToken = null; - - if (!err) { - assert.isString(response.token, "no access token retrieved"); - if (response.token) { - userToken = response.token; - } - } - cb(err, userToken); - }) -} - -function wsConnect(connector, deviceToken, deviceId, cb) { - if (!cb) { - throw "Callback required"; - } - - var deviceInfo = { - device_id: deviceId, - device_token: deviceToken - }; - - connector.updateDeviceInfo(deviceInfo) - - var data = { - deviceId: deviceId - }; - - connector.controlCommandListen(data, cb, function() {}); -} - - -function createAccount(name, userToken, cb) { - if (!cb) { - throw "Callback required"; - } - - var data = { - userToken: userToken, - body: { - name: name - } - }; - - api.accounts.createAccount(data, function(err, response) { - if (!err) { - assert.equal(response.name, name, "accounts name is wrong"); - assert.isString(response.id, "Account id not returned"); - - cb(null, response.id); - } - else { - cb(err); - } - }); -} - -function createDevice(name, deviceId, userToken, accountId, cb) { - if (!cb) { - throw "Callback required"; - } - - var data = { - userToken: userToken, - accountId: accountId, - body: { - name: name, - deviceId: deviceId, - gatewayId: "00-11-22-33-44-55" - } - }; - - api.devices.createDevice(data, function(err, response) { - if (err) { - cb(err, null); - } - else { - cb(null, deviceId); - } - }); -} - -function activateDevice(userToken, accountId, deviceId, cb) { - if (!cb) { - throw "Callback required"; - } - - getActivationCode(accountId, userToken, function(err, activationCode) { - if (err) { - cb(err); - } else { - var data = { - userToken: userToken, - accountId: accountId, - deviceId: deviceId, - body: { - activationCode: activationCode - } - }; - - api.devices.activateDevice(data, function(err, response) { - if (err) { - cb(err); - } else { - cb(null, response.deviceToken); - } - }) - } - }) -} - -function createComponent(name, type, userToken, accountId, deviceId, force, cb) { - if (!cb) { - throw "Callback required"; - } - - var cid = createComponentId(); - - var data = { - userToken: userToken, - accountId: accountId, - deviceId: deviceId, - body: { - name: name, - type: type, - cid: cid - } - }; - - api.devices.addDeviceComponent(data, function(err, response) { - if (err) { - cb(null); - } else { - cb(null, response.cid); - } - }); -} - - -function createCommand(name, paramName, value, userToken, accountId, deviceId, actId, cb) { - if (!cb) { - throw "Callback required"; - } - - var data = { - userToken: userToken, - accountId: accountId, - commandName: name, - body: { - commands: [{ - componentId: actId, - transport: "ws", - parameters: [{ - name: paramName, - value: value.toString() - }] - }] - }, - deviceId: deviceId - }; - - api.control.saveComplexCommand(data, function(err, response) { - cb(err); - }); -} - -function isRuleSynchronized(userToken, accountId, ruleId, cb) { - if (!cb) { - throw "Callback required"; - } - var data = { - userToken: userToken, - accountId: accountId, - ruleId: ruleId - }; - - api.rules.getRuleDetails(data, function(err, response) { - if (err) { - cb(err) - } else { - if ( response.id == ruleId ) { - { - cb(null, response.synchronizationStatus == "Sync" ? true : false) - } - } - else { - cb("rule "+ruleId + " not found "); - } - } - }); -} - -function createRule(ruleConfig, userToken, accountId, deviceId, cb) { - if (!cb) { - throw "Callback required"; - } - - var data = { - userToken: userToken, - accountId: accountId, - - body: { - name: ruleConfig.name, - description: "OISP testing rule", - priority: "Medium", - type: "Regular", - status: "Active", - resetType: "Automatic", - synchronizationStatus: "NotSync", - - actions: ruleConfig.actions, - - population: { - ids: [deviceId], - attributes: null, - tags: null - }, - - conditions: { - operator: "OR", - values: [{ - component: { - dataType: "Number", - name: ruleConfig.conditionComponent, - cid: ruleConfig.cid - }, - type: "basic", - values: [ruleConfig.basicConditionValue.toString()], - operator: ruleConfig.basicConditionOperator - }] - } - } - }; - - - api.rules.createRule(data, function(err, response) { - if (err) { - cb(err) - } else { - var ruleId = response.id; - var syncInterval = setInterval( function(id) { - isRuleSynchronized(userToken, accountId, ruleId, function(err, status) { - if (err) { - clearInterval(syncInterval); - cb(err) - } - else { - if ( status == true ) { - clearInterval(syncInterval); - cb(null, ruleId) - } - } - }) - }, 500) - } - }); - - - -} - -function sendObservation(value, deviceToken, accountId, deviceId, cid, cb) { - if (!cb) { - throw "Callback required"; - } - - var ts = new Date().getTime(); - - var data = { - deviceToken: deviceToken, - deviceId: deviceId, - body: { - accountId: accountId, - on: ts, - data: [{ - componentId: cid, - value: value.toString(), - on: ts - }] - } - } - - api.devices.submitData(data, function(err, response) { - if (err) { - cb(err) - } else { - cb(null, ts); - } - }) - -} - -function getActuationsForComponent(from, maxItems, deviceToken, accountId, deviceId, cid, cb) { - if (!cb) { - throw "Callback required"; - } - - var data = { - from: from, - deviceToken: deviceToken, - accountId: accountId, - deviceId: deviceId - }; - - api.control.pullActuations(data, function(err, response) { - if (err) { - cb(err) - } else { - var actuations = []; - - for (var i = 0; i < response.length; i++) { - if (response[i].componentId == cid) { - if (actuations.length < maxItems) { - actuations.push(response[i].parameters[0].value); - } else { - break; - } - } - } - - cb(null, actuations) - } - }) - -} - - -function getObservation(ts, userToken, accountId, deviceId, cid, cb) { - if (!cb) { - throw "Callback required"; - } - - var data = { - userToken: userToken, - accountId: accountId, - body: { - from: ts, - targetFilter: { - deviceList: [deviceId] - }, - metrics: [{ - id: cid - }] - } - } - - api.data.searchData(data, function(err, response) { - var found = false; - - if (err) { - cb(err); - } else { - if (response.series) { - for (var i = 0; i < response.series.length; i++) { - for (var j = 0; j < response.series[i].points.length; j++) { - if (response.series[i].points[j].ts == ts) { - found = true; - cb(null, ts, parseInt(response.series[i].points[j].value)); - break; - } - } - } - } - - if (!found) { - cb(null, ts, null); - } - } - }); -} - -function getData(from, userToken, accountId, deviceId, cid, cb) { - if (!cb) { - throw "Callback required"; - } - - var data = { - userToken: userToken, - accountId: accountId, - body: { - from: from, - targetFilter: { - deviceList: [deviceId] - }, - metrics: [{ - id: cid - }] - } - }; - - api.data.searchData(data, function(err, response) { - if (err) { - cb(err) - } else { - if (response.series) { - cb(null, response.series[0].points) - } - } - }); -} - -function getEmailMessage(user, password, host, port, num, cb) { - if (!cb) { - throw "Callback required"; - } - var imap = new Imap({ - user: user, - password: password, - host: host, - port: port, - tls: true, - tlsOptions: { rejectUnauthorized: false } - }); - - imap.once('ready', function() { - imap.openBox('INBOX', true, function(err, box) { - if ( !err ) { - var f = imap.seq.fetch(num, { - bodies: ['HEADER.FIELDS (TO)', '1'], - struct: true - }); - - f.on('message', function(msg, seqno) { - var buffer = ''; - msg.on('body', function(stream, info) { - - stream.on('data', function(chunk) { - buffer += chunk.toString('utf8'); - }); - }); - - msg.once('end', function() { - buffer = buffer.replace("<","<"); - buffer = buffer.replace(">",">"); - cb(null, buffer); - imap.closeBox(() => { - imap.destroy(); - imap.end(); - }); - }); - }); - } - else { - cb(err); - } - }); - }); - - imap.once('error', function(err) { - cb(err); - }); - - imap.connect(); - -} - - -module.exports = { - createComponentId: createComponentId, - getActivationCode: getActivationCode, - login: login, - wsConnect: wsConnect, - createAccount: createAccount, - createDevice: createDevice, - activateDevice: activateDevice, - createComponent: createComponent, - createCommand: createCommand, - createRule: createRule, - sendObservation: sendObservation, - getActuationsForComponent: getActuationsForComponent, - getObservation: getObservation, - getData: getData, - getEmailMessage: getEmailMessage -}; diff --git a/tests/lib/helpers/accounts.js b/tests/lib/helpers/accounts.js new file mode 100644 index 00000000..6bd6244b --- /dev/null +++ b/tests/lib/helpers/accounts.js @@ -0,0 +1,209 @@ +/** + * Copyright (c) 2017 Intel Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +"use strict"; + +var uuid = require('uuid/v4'); + +var chai = require('chai'); +var assert = chai.assert; + +var config = require("../../test-config.json"); +var oispSdk = require("@open-iot-service-platform/oisp-sdk-js"); +var api = oispSdk(config).api.rest; + + + +function createAccount(name, userToken, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + body: { + name: name + } + }; + + api.accounts.createAccount(data, function(err, response) { + if (!err) { + assert.isString(response.id, "Account id not returned"); + cb(null, response); + } + else { + cb(err); + } + }); +} + +function getAccountInfo(accountId, userToken, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId + }; + + api.accounts.getAccountInfo(data, function(err, response) { + if (!err) { + assert.notEqual(response, null, 'response is null') + cb(null, response); + } + else { + cb(err); + } + }); +} + +function updateAccount(accountId, userToken, newAccountInfo, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + body: newAccountInfo + }; + + api.accounts.updateAccount(data, function(err, response) { + if (!err) { + assert.notEqual(response, null, 'response is null') + cb(null, response); + } + else { + cb(err); + } + }); +} + +function getAccountActivationCode(accountId, userToken, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId + }; + + api.accounts.getAccountActivationCode(data, function(err, response) { + if(!err){ + assert.notEqual(response.activationCode, null ,'activation code is null') + cb(null,response) + } + else{ + cb(err); + } + }) +} + +function refreshAccountActivationCode(accountId, userToken, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId + }; + + api.accounts.refreshAccountActivationCode(data, function(err, response) { + if(!err){ + assert.notEqual(response.activationCode, null ,'activation code is null') + cb(null,response) + } + else{ + cb(err); + } + }) +} + +function getAccountUsers(accountId, userToken, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId + }; + + api.accounts.getAccountUsers(data, function(err, response) { + assert.notEqual(response, null ,'user list is null') + cb(err, response) + }) +} + +function changeAccountUser(accountId, userToken, userId, cb) { + if (!cb) { + throw "Callback required"; + } + + + var data = { + userToken: userToken, + accountId: accountId, + userId: userId, + body:{ + id: userId, + accounts:{} + } + }; + + data.body.accounts[accountId] = 'admin' + + api.accounts.changeAccountUser(data, function(err, response) { + assert.notEqual(response, null, 'response is null') + cb(err, response) + }) +} + +function deleteAccount(userToken, accountId, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId + }; + + api.accounts.deleteAccount(data, function(err, response) { + if (!err) { + assert.notEqual(response, null, 'response is null') + cb(null,response); + } + else { + cb(err); + } + }); +} + +module.exports = { + getAccountActivationCode: getAccountActivationCode, + refreshAccountActivationCode: refreshAccountActivationCode, + getAccountUsers: getAccountUsers, + changeAccountUser: changeAccountUser, + createAccount: createAccount, + getAccountInfo: getAccountInfo, + updateAccount: updateAccount, + deleteAccount: deleteAccount +}; diff --git a/tests/lib/helpers/alerts.js b/tests/lib/helpers/alerts.js new file mode 100644 index 00000000..0bc1a3c4 --- /dev/null +++ b/tests/lib/helpers/alerts.js @@ -0,0 +1,158 @@ +/** + * Copyright (c) 2017 Intel Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +"use strict"; + +var uuid = require('uuid/v4'); + +var chai = require('chai'); +var assert = chai.assert; + +var config = require("../../test-config.json"); +var oispSdk = require("@open-iot-service-platform/oisp-sdk-js"); +var api = oispSdk(config).api.rest; + + + +function getListOfAlerts(userToken, accountId, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + }; + + api.alerts.getListOfAlerts(data, function(err, response) { + if (err) { + cb(err, null); + } + else { + assert.notEqual(response, null, 'response is null') + cb(null, response); + } + }); +} + +function getAlertDetails(userToken, accountId, alertId, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + alertId: alertId + }; + + api.alerts.getAlertDetails(data, function(err, response) { + if (err) { + cb(err, null); + } + else { + assert.notEqual(response, null, 'response is null') + cb(null, response); + } + }); +} + + +function closeAlert(userToken, accountId, alertId, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + alertId: alertId + }; + + api.alerts.closeAlert(data, function(err, response) { + if (err) { + cb(err, null); + } + else { + getListOfAlerts(userToken, accountId, function(err,response){ + assert.notEqual(response, null, 'response is null') + cb(null, response); + }) + } + }); +} + +function updateAlertStatus(userToken, accountId, alertId, status, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + alertId: alertId, + statusName: status + }; + + api.alerts.updateAlertStatus(data, function(err, response) { + if (err) { + cb(err, null); + } + else { + getAlertDetails(userToken, accountId, alertId, function(err,response){ + if (err) { + cb(err, null); + } + else { + cb(null, response); + } + }) + } + }); +} + +function addCommentsToAlert(userToken, accountId, alertId, comments, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + alertId: alertId, + body: [] + }; + + data.body.push(comments) + + api.alerts.addCommentsToAlert(data, function(err, response) { + if (err) { + cb(err, null); + } + else { + cb(null, response); + } + }); +} + +module.exports = { + getListOfAlerts: getListOfAlerts, + getAlertDetails: getAlertDetails, + closeAlert: closeAlert, + updateAlertStatus: updateAlertStatus, + addCommentsToAlert: addCommentsToAlert +}; diff --git a/tests/lib/helpers/auth.js b/tests/lib/helpers/auth.js new file mode 100644 index 00000000..03c39852 --- /dev/null +++ b/tests/lib/helpers/auth.js @@ -0,0 +1,94 @@ +/** + * Copyright (c) 2017 Intel Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +"use strict"; + +var uuid = require('uuid/v4'); + +var chai = require('chai'); +var assert = chai.assert; + +var config = require("../../test-config.json"); +var oispSdk = require("@open-iot-service-platform/oisp-sdk-js"); +var api = oispSdk(config).api.rest; + + +function login(username, password, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + body: { + username: username, + password: password + } + }; + + api.auth.getAuthToken(data, function(err, response) { + var userToken = null; + + if (!err) { + assert.isString(response.token, "no access token retrieved"); + if (response.token) { + userToken = response.token; + } + } + cb(err, userToken); + }) +} + + +function tokenInfo(token, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken:token + }; + + api.auth.getAuthTokenInfo(data, function(err, response) { + if (!err) { + assert.equal(response.header.typ,'JWT', "response header type error"); + } + cb(err, response); + }) +} + +function userInfo(token, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken:token + }; + + api.auth.getAuthUserInfo(data, function(err, response) { + if (!err) { + assert.isString(response.id, "no access token retrieved"); + } + cb(err, response); + }) +} + +module.exports = { + login: login, + userInfo: userInfo, + tokenInfo: tokenInfo +}; diff --git a/tests/lib/helpers/cmpcatalog.js b/tests/lib/helpers/cmpcatalog.js new file mode 100644 index 00000000..080b9656 --- /dev/null +++ b/tests/lib/helpers/cmpcatalog.js @@ -0,0 +1,134 @@ +/** + * Copyright (c) 2017 Intel Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +"use strict"; + +var chai = require('chai'); +var assert = chai.assert; + +var config = require("../../test-config.json"); +var oispSdk = require("@open-iot-service-platform/oisp-sdk-js"); +var api = oispSdk(config).api.rest; + + +function getCatalog(userToken, accountId, cb){ + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + body: { + full: true + } + }; + + api.cmpcatalog.getCatalog(data, function(err, response) { + assert.notEqual(response, null, 'response is null') + if (err) { + cb(err) + } else { + cb(null, response) + } + }); +} + + +function createCatalog(userToken, accountId, cb){ + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + body: + { + "dimension": "speed", + "version": "1.0", + "type": "sensor", + "dataType": "Number", + "format": "float", + "min": 0, + "max": 500, + "measureunit": "Degress Celsius", + "display": "timeSeries" + } + }; + + api.cmpcatalog.createCatalog(data, function(err, response) { + assert.notEqual(response, null, 'response is null') + if (err) { + cb(err) + } else { + cb(null, response) + } + }); +} + +function getCatalogDetail(userToken, accountId, cid, cb){ + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + componentId: cid + }; + + api.cmpcatalog.getCatalogDetail(data, function(err, response) { + assert.notEqual(response, null, 'response is null') + if (err) { + cb(err) + } else { + cb(null, response) + } + }); +} + + +function updateCatalog(userToken, accountId, cid, newmin, newmax, cb){ + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + componentId: cid, + body:{ + "min": newmin, + "max": newmax + } + }; + + api.cmpcatalog.updateCatalog(data, function(err, response) { + assert.notEqual(response, null, 'response is null') + if (err) { + cb(err) + } else { + cb(null, response) + } + }); +} + +module.exports={ + getCatalog: getCatalog, + createCatalog: createCatalog, + getCatalogDetail: getCatalogDetail, + updateCatalog: updateCatalog +} \ No newline at end of file diff --git a/tests/lib/helpers/connector.js b/tests/lib/helpers/connector.js new file mode 100644 index 00000000..1f256725 --- /dev/null +++ b/tests/lib/helpers/connector.js @@ -0,0 +1,54 @@ +/** + * Copyright (c) 2017 Intel Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +"use strict"; + +//------------------------------------------------------------------------------------------------------- +// Helper Functions +//------------------------------------------------------------------------------------------------------- + +var chai = require('chai'); +var assert = chai.assert; + +var config = require("../../test-config.json"); +var oispSdk = require("@open-iot-service-platform/oisp-sdk-js"); +var api = oispSdk(config).api.rest; + + + +function wsConnect(connector, deviceToken, deviceId, cb) { + if (!cb) { + throw "Callback required"; + } + + var deviceInfo = { + device_id: deviceId, + device_token: deviceToken + }; + + connector.updateDeviceInfo(deviceInfo) + + var data = { + deviceId: deviceId + }; + + connector.controlCommandListen(data, cb, function() {}); +} + +module.exports={ + wsConnect: wsConnect +} + + diff --git a/tests/lib/helpers/control.js b/tests/lib/helpers/control.js new file mode 100644 index 00000000..2909b75a --- /dev/null +++ b/tests/lib/helpers/control.js @@ -0,0 +1,120 @@ +/** + * Copyright (c) 2017 Intel Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +"use strict"; + +//------------------------------------------------------------------------------------------------------- +// Helper Functions +//------------------------------------------------------------------------------------------------------- + +var chai = require('chai'); +var assert = chai.assert; + +var config = require("../../test-config.json"); +var oispSdk = require("@open-iot-service-platform/oisp-sdk-js"); +var api = oispSdk(config).api.rest; + + +function sendActuationCommand(paramName, value, userToken, accountId, actuatorId, deviceId, cb){ + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + body: { + commands: [{ + componentId: actuatorId, + transport: "ws", + parameters: [{ + name: paramName, + value: value.toString() + }] + }], + complexCommands: [] + }, + }; + + api.control.sendActuationCommand(data, function(err, response) { + assert.notEqual(response, null, 'response is null') + if (err) { + cb(err) + } else { + cb(null, response) + } + }); +} + +function pullActuations(parameters, deviceToken, accountId, deviceId, cid, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + from: parameters.from, + to: parameters.to, + deviceToken: deviceToken, + accountId: accountId, + deviceId: deviceId + }; + + api.control.pullActuations(data, function(err, response) { + if (err) { + cb(err) + } else { + cb(null, response) + } + }) + +} + +function saveComplexCommand(name, paramName, value, userToken, accountId, deviceId, actId, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + commandName: name, + body: { + commands: [{ + componentId: actId, + transport: "ws", + parameters: [{ + name: paramName, + value: value.toString() + }] + }] + }, + deviceId: deviceId + }; + + api.control.saveComplexCommand(data, function(err, response) { + assert.notEqual(response, null, 'response is null') + if (err) { + cb(err) + } else { + cb(null, response) + } + }); +} + +module.exports={ + saveComplexCommand: saveComplexCommand, + pullActuations: pullActuations, + sendActuationCommand: sendActuationCommand +} \ No newline at end of file diff --git a/tests/lib/helpers/data.js b/tests/lib/helpers/data.js new file mode 100644 index 00000000..2cc40ed6 --- /dev/null +++ b/tests/lib/helpers/data.js @@ -0,0 +1,168 @@ +/** + * Copyright (c) 2017 Intel Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +"use strict"; + +//------------------------------------------------------------------------------------------------------- +// Helper Functions +//------------------------------------------------------------------------------------------------------- + +var chai = require('chai'); +var assert = chai.assert; + +var config = require("../../test-config.json"); +var oispSdk = require("@open-iot-service-platform/oisp-sdk-js"); +var api = oispSdk(config).api.rest; + + + + +function getObservation(ts, userToken, accountId, deviceId, cid, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + body: { + from: ts, + targetFilter: { + deviceList: [deviceId] + }, + metrics: [{ + id: cid + }] + } + } + + api.data.searchData(data, function(err, response) { + var found = false; + + if (err) { + cb(err); + } else { + if (response.series) { + for (var i = 0; i < response.series.length; i++) { + for (var j = 0; j < response.series[i].points.length; j++) { + if (response.series[i].points[j].ts == ts) { + found = true; + cb(null, ts, parseInt(response.series[i].points[j].value)); + break; + } + } + } + } + + if (!found) { + cb(null, ts, null); + } + } + }); +} + +function searchData(from, userToken, accountId, deviceId, cid, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + body: { + from: from, + targetFilter: { + deviceList: [deviceId] + }, + metrics: [{ + id: cid + }] + } + }; + + api.data.searchData(data, function(err, response) { + if (err) { + cb(err) + } else { + if (response.series) { + cb(null, response.series[0].points) + } + } + }); +} + + +function submitData(value, deviceToken, accountId, deviceId, cid, cb) { + if (!cb) { + throw "Callback required"; + } + var ts = new Date().getTime(); + + var data = { + deviceToken: deviceToken, + deviceId: deviceId, + body: { + accountId: accountId, + on: ts, + data: [{ + componentId: cid, + value: value.toString(), + on: ts + }] + } + } + + api.data.submitData(data, function(err, response) { + if (err) { + cb(err) + } else { + if (response.series) { + cb(null, response.series[0].points) + } + } + }); +} + +function searchDataAdvanced(from, userToken, accountId, deviceId, cid, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + body: { + deviceIds: [deviceId], + from: from, + componentIds: [cid] + } + }; + + api.data.searchDataAdvanced(data, function(err, response) { + if (err) { + cb(err) + } else { + assert.notEqual(response, null, 'response is null') + cb(null, response) + } + }); +} + +module.exports={ + getObservation: getObservation, + searchData: searchData, + submitData: submitData, + searchDataAdvanced: searchDataAdvanced +} \ No newline at end of file diff --git a/tests/lib/helpers/devices.js b/tests/lib/helpers/devices.js new file mode 100644 index 00000000..9bb0e581 --- /dev/null +++ b/tests/lib/helpers/devices.js @@ -0,0 +1,392 @@ +/** + * Copyright (c) 2017 Intel Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +"use strict"; + +var uuid = require('uuid/v4'); + +var chai = require('chai'); +var assert = chai.assert; + +var config = require("../../test-config.json"); +var oispSdk = require("@open-iot-service-platform/oisp-sdk-js"); +var api = oispSdk(config).api.rest; + + +function createDevice(name, deviceId, userToken, accountId, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + body: { + name: name, + deviceId: deviceId, + gatewayId: "00-11-22-33-44-55", + tags: ["tag-a", "tag-b"], + attributes: { + os: "linux" + } + } + }; + + api.devices.createDevice(data, function(err, response) { + if (err) { + cb(err, null); + } + else { + assert.notEqual(response, null, 'response is null') + cb(null, response); + } + }); +} + +function getDevices(userToken, accountId, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + }; + + api.devices.getDevices(data, function(err, response) { + if (err) { + cb(err, null); + } + else { + assert.notEqual(response, null, 'response is null') + cb(null, response); + } + }); +} + +function getDeviceDetails(userToken, accountId, deviceId, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + deviceId: deviceId + }; + + api.devices.getDeviceDetails(data, function(err, response) { + if (err) { + cb(err, null); + } + else { + assert.notEqual(response, null, 'response is null') + cb(null, response); + } + }); +} + +function updateDeviceDetails(userToken, accountId, deviceId, deviceInfo, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + deviceId: deviceId, + body:deviceInfo + }; + + api.devices.updateDeviceDetails(data, function(err, response) { + if (err) { + cb(err, null); + } + else { + assert.notEqual(response, null, 'response is null') + cb(null, response); + } + }); +} + +function getAccountActivationCode(accountId, userToken, cb) { + var data = { + userToken: userToken, + accountId: accountId + }; + + api.accounts.getAccountActivationCode(data, function(err, response) { + if(!err){ + assert.notEqual(response.activationCode, null ,'activation code is null') + cb(null,response.activationCode) + } + else{ + cb(err); + } + }) +} + +function activateDevice(userToken, accountId, deviceId, cb) { + if (!cb) { + throw "Callback required"; + } + + getAccountActivationCode(accountId, userToken, function(err, activationCode) { + if (err) { + cb(err); + } else { + var data = { + userToken: userToken, + accountId: accountId, + deviceId: deviceId, + body: { + activationCode: activationCode + } + }; + + api.devices.activateDevice(data, function(err, response) { + if (err) { + cb(err); + } else { + assert.notEqual(response, null, 'response is null') + cb(null, response); + } + }) + } + }) +} + +function createComponentId() { + return uuid(); +} + +function addDeviceComponent(name, type, userToken, accountId, deviceId, cb) { + if (!cb) { + throw "Callback required"; + } + + var cid = createComponentId(); + + var data = { + userToken: userToken, + accountId: accountId, + deviceId: deviceId, + body: { + name: name, + type: type, + cid: cid + } + }; + + api.devices.addDeviceComponent(data, function(err, response) { + if (err) { + cb(null); + } else { + assert.notEqual(response, null, 'response is null') + cb(null, response.cid); + } + }); +} + +function submitData(value, deviceToken, accountId, deviceId, cid, cb) { + if (!cb) { + throw "Callback required"; + } + + var ts = new Date().getTime(); + + var data = { + deviceToken: deviceToken, + deviceId: deviceId, + body: { + accountId: accountId, + on: ts, + data: [{ + componentId: cid, + value: value.toString(), + on: ts + }] + } + } + + api.devices.submitData(data, function(err, response) { + if (err) { + cb(err) + } else { + cb(null, ts); + } + }) + +} + + + +function deleteDeviceComponent (userToken, accountId,deviceId, cid , cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + deviceId: deviceId, + cid: cid + }; + + api.devices.deleteDeviceComponent(data, function(err, response) { + if (err) { + cb(err) + } else { + cb(null, response) + } + }); + +} + +function deleteDevice (userToken, accountId, deviceId, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + deviceId: deviceId + }; + + api.devices.deleteDevice(data, function(err, response) { + if (err) { + cb(err) + } else { + cb(null, response) + } + }); + +} + +function getDeviceTags (userToken, accountId, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId + }; + + api.devices.getDeviceTags(data, function(err, response) { + if (err) { + cb(err) + } else { + cb(null, response) + } + }); + +} + +function getDeviceAttributes (userToken, accountId, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId + }; + + api.devices.getDeviceAttributes(data, function(err, response) { + if (err) { + cb(err) + } else { + cb(null, response) + } + }); + +} + +function countDevices (userToken, accountId, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + body:{ + "status":{ + "operator":"OR" + }, + "components":{ + "operator":"OR" + } + } + }; + + api.devices.countDevices(data, function(err, response) { + if (err) { + cb(err) + } else { + cb(null, response) + } + }); + +} + +function searchDevices (userToken, accountId, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + limit: 10, + skip: 0, + body:{ + "status":{ + "operator":"OR" + }, + "components":{ + "operator":"OR" + } + } + }; + + api.devices.searchDevices(data, function(err, response) { + if (err) { + cb(err) + } else { + cb(null, response) + } + }); +} + +module.exports = { + createDevice: createDevice, + getDevices: getDevices, + getDeviceDetails: getDeviceDetails, + updateDeviceDetails: updateDeviceDetails, + activateDevice: activateDevice, + addDeviceComponent: addDeviceComponent, + submitData: submitData, + deleteDevice: deleteDevice, + deleteDeviceComponent: deleteDeviceComponent, + getDeviceTags: getDeviceTags, + getDeviceAttributes: getDeviceAttributes, + countDevices: countDevices, + searchDevices: searchDevices +}; diff --git a/tests/lib/helpers/index.js b/tests/lib/helpers/index.js new file mode 100644 index 00000000..070df24d --- /dev/null +++ b/tests/lib/helpers/index.js @@ -0,0 +1,37 @@ +/** + * Copyright (c) 2017 Intel Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +"use strict"; + +//------------------------------------------------------------------------------------------------------- +// Helper Functions +//------------------------------------------------------------------------------------------------------- + +module.exports = { + auth: require('./auth'), + accounts: require('./accounts'), + devices: require('./devices'), + users: require('./users'), + control: require('./control'), + alerts: require('./alerts'), + data: require('./data'), + connector: require('./connector'), + rules: require('./rules'), + cmpcatalog: require('./cmpcatalog'), + invitation: require('./invitation'), + mail: require('./mail') +}; diff --git a/tests/lib/helpers/invitation.js b/tests/lib/helpers/invitation.js new file mode 100644 index 00000000..61b2bdb0 --- /dev/null +++ b/tests/lib/helpers/invitation.js @@ -0,0 +1,139 @@ +/** + * Copyright (c) 2017 Intel Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +"use strict"; + +var chai = require('chai'); +var assert = chai.assert; + +var config = require("../../test-config.json"); +var oispSdk = require("@open-iot-service-platform/oisp-sdk-js"); +var api = oispSdk(config).api.rest; + + +function createInvitation(userToken, accountId, receiveremail, cb){ + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + body: { + email: receiveremail + } + }; + + api.invitation.createInvitation(data, function(err, response) { + assert.notEqual(response, null, 'response is null') + if (err) { + cb(err) + } else { + cb(null, response) + } + }); +} + + +function getAllInvitations(userToken, accountId, cb){ + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId + }; + + api.invitation.getAllInvitations(data, function(err, response) { + assert.notEqual(response, null, 'response is null') + if (err) { + cb(err) + } else { + cb(null, response) + } + }); +} + +function getInvitations(userToken, accountId, email, cb){ + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + email: email + }; + + api.invitation.getInvitations(data, function(err, response) { + if (err) { + cb(err) + } else { + cb(null, response) + } + }); +} + +function acceptInvitation(userToken, accountId, inviteId, cb){ + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + inviteId: inviteId, + body: { + accept: true + } + }; + + api.invitation.acceptInvitation(data, function(err, response) { + if (err) { + cb(err) + } else { + cb(null, response) + } + }); +} + +function deleteInvitations(userToken, accountId, receiveremail, cb){ + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + email: receiveremail + }; + + api.invitation.deleteInvitations(data, function(err, response) { + if (err) { + cb(err) + } else { + cb(null, response) + } + }); +} + +module.exports={ + createInvitation: createInvitation, + getAllInvitations: getAllInvitations, + getInvitations: getInvitations, + acceptInvitation: acceptInvitation, + deleteInvitations: deleteInvitations +} diff --git a/tests/lib/helpers/mail.js b/tests/lib/helpers/mail.js new file mode 100644 index 00000000..0ebf8940 --- /dev/null +++ b/tests/lib/helpers/mail.js @@ -0,0 +1,81 @@ +/** + * Copyright (c) 2017 Intel Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +"use strict"; + +//------------------------------------------------------------------------------------------------------- +// Helper Functions +//------------------------------------------------------------------------------------------------------- + +var Imap = require('imap'); + +function getEmailMessage(user, password, host, port, num, cb) { + if (!cb) { + throw "Callback required"; + } + var imap = new Imap({ + user: user, + password: password, + host: host, + port: port, + tls: true, + tlsOptions: { rejectUnauthorized: false } + }); + + imap.once('ready', function() { + imap.openBox('INBOX', true, function(err, box) { + if ( !err ) { + var f = imap.seq.fetch(num, { + bodies: ['HEADER.FIELDS (TO)', '1'], + struct: true + }); + + f.on('message', function(msg, seqno) { + var buffer = ''; + msg.on('body', function(stream, info) { + + stream.on('data', function(chunk) { + buffer += chunk.toString('utf8'); + }); + }); + + msg.once('end', function() { + buffer = buffer.replace("<","<"); + buffer = buffer.replace(">",">"); + cb(null, buffer); + imap.closeBox(() => { + imap.destroy(); + imap.end(); + }); + }); + }); + } + else { + cb(err); + } + }); + }); + + imap.once('error', function(err) { + cb(err); + }); + + imap.connect(); + +} + +module.exports ={ + getEmailMessage: getEmailMessage +} \ No newline at end of file diff --git a/tests/lib/helpers/rules.js b/tests/lib/helpers/rules.js new file mode 100644 index 00000000..79445a76 --- /dev/null +++ b/tests/lib/helpers/rules.js @@ -0,0 +1,346 @@ +/** + * Copyright (c) 2017 Intel Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +"use strict"; + +//------------------------------------------------------------------------------------------------------- +// Helper Functions +//------------------------------------------------------------------------------------------------------- + + +var uuid = require('uuid/v4'); + +var chai = require('chai'); +var assert = chai.assert; + +var config = require("../../test-config.json"); +var oispSdk = require("@open-iot-service-platform/oisp-sdk-js"); +var api = oispSdk(config).api.rest; + + +function getRules(userToken, accountId, cb) { + if (!cb) { + throw "Callback required"; + } + var data = { + userToken: userToken, + accountId: accountId + }; + + api.rules.getRules(data, function(err, response) { + if (err) { + cb(err) + } else { + cb(null,response) + } + }); +} + +function getRuleDetails(userToken, accountId, ruleId, cb) { + if (!cb) { + throw "Callback required"; + } + var data = { + userToken: userToken, + accountId: accountId, + ruleId: ruleId + }; + + api.rules.getRuleDetails(data, function(err, response) { + if (err) { + cb(err) + } else { + if ( response.id == ruleId ) { + { + cb(null, response.synchronizationStatus == "Sync" ? true : false) + } + } + else { + cb("rule "+ruleId + " not found "); + } + } + }); +} + +function createRule(ruleConfig, userToken, accountId, deviceId, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + + body: { + name: ruleConfig.name, + description: "OISP testing rule", + priority: "Medium", + type: "Regular", + status: "Active", + resetType: "Automatic", + synchronizationStatus: "NotSync", + + actions: ruleConfig.actions, + + population: { + ids: [deviceId], + attributes: null, + tags: null + }, + + conditions: { + operator: "OR", + values: [{ + component: { + dataType: "Number", + name: ruleConfig.conditionComponent, + cid: ruleConfig.cid + }, + type: "basic", + values: [ruleConfig.basicConditionValue.toString()], + operator: ruleConfig.basicConditionOperator + }] + } + } + }; + + + api.rules.createRule(data, function(err, response) { + if (err) { + cb(err) + } else { + var ruleId = response.id; + var syncInterval = setInterval( function(id) { + getRuleDetails(userToken, accountId, ruleId, function(err, status) { + if (err) { + clearInterval(syncInterval); + cb(err) + } + else { + if ( status == true ) { + clearInterval(syncInterval); + cb(null, ruleId) + } + } + }) + }, 500) + + } + }); + +} + +function updateRule(ruleConfig, userToken, accountId, ruleId, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + ruleId: ruleId, + body: { + name: ruleConfig.name, + description: "OISP testing rule new", + priority: "Medium", + type: "Regular", + status: "Active", + resetType: "Automatic", + synchronizationStatus: ruleConfig.synchronizationStatus, + + actions: [{ + type: "actuation", + target: [ + ruleConfig.actuationCmd + ] + }], + + + population: { + ids: ['00-11-22-33-44-55'], + attributes: null, + tags: null + }, + + conditions: { + operator: "OR", + values: [{ + component: { + dataType: "Number", + name: ruleConfig.conditionComponent, + cid: ruleConfig.cid + }, + type: "basic", + values: [ruleConfig.basicConditionValue.toString()], + operator: ruleConfig.basicConditionOperator + }] + } + } + }; + + + api.rules.updateRule(data, function(err, response) { + if (err) { + cb(err) + } else { + cb(null,response) + } + }); +} + + +function updateRuleStatus(userToken, accountId, ruleId, status, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + ruleId: ruleId, + body:{ + status:status + } + }; + + api.rules.updateRuleStatus(data, function(err, response) { + if (err) { + cb(err, null); + } + else { + cb(null,response) + } + }); +} + + +function createDraftRule (draftname, userToken, accountId, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + + body: { + priority: null, + type: null, + resetType: null, + name: draftname, + synchronizationStatus: "NotSync", + + actions:[{ + type:"mail", + target:[] + }], + population:{ + name:null, + ids:[], + tags:[], + attributes:null + }, + conditions: { + operator: null, + values: [] + } + } + }; + + api.rules.createDraftRule(data, function(err, response) { + if (err) { + cb(err) + } else { + cb(null, response) + } + }); + +} + + +function cloneRule (userToken, accountId, ruleId , cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + ruleId: ruleId + }; + + api.rules.cloneRule(data, function(err, response) { + if (err) { + cb(err) + } else { + cb(null, response) + } + }); + +} + +function deleteDraftRule (userToken, accountId,ruleId , cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + ruleId: ruleId + }; + + api.rules.deleteDraftRule(data, function(err, response) { + if (err) { + cb(err) + } else { + cb(null, response) + } + }); + +} + +function deleteRule (userToken, accountId,ruleId , cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + ruleId: ruleId + }; + + api.rules.deleteRule(data, function(err, response) { + if (err) { + cb(err) + } else { + cb(null, response) + } + }); + +} + +module.exports={ + createRule:createRule, + getRules: getRules, + getRuleDetails: getRuleDetails, + updateRule: updateRule, + updateRuleStatus: updateRuleStatus, + createDraftRule: createDraftRule, + cloneRule: cloneRule, + deleteDraftRule: deleteDraftRule, + deleteRule: deleteRule +} diff --git a/tests/lib/helpers/users.js b/tests/lib/helpers/users.js new file mode 100644 index 00000000..638fd23c --- /dev/null +++ b/tests/lib/helpers/users.js @@ -0,0 +1,243 @@ +/** + * Copyright (c) 2017 Intel Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +"use strict"; + +var uuid = require('uuid/v4'); + +var chai = require('chai'); +var assert = chai.assert; + +var config = require("../../test-config.json"); +var oispSdk = require("@open-iot-service-platform/oisp-sdk-js"); +var api = oispSdk(config).api.rest; + + +function addUser(userToken, email, password, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + body: { + email: email, + password: password + } + }; + + api.users.addUser(data, function(err, response) { + if (!err) { + assert.notEqual(response, null, 'response is null') + cb(null,response); + } + else { + cb(err); + } + }); +} + +function getUserInfo(userToken, userId, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + userId: userId + }; + + api.users.getUserInfo(data, function(err, response) { + if (!err) { + assert.notEqual(response, null, 'response is null') + cb(null,response); + } + else { + cb(err); + } + }); +} + +function updateUserInfo(userToken, userId, userInfo, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + userId: userId, + body: userInfo + }; + + api.users.updateUserInfo(data, function(err, response) { + if (!err) { + assert.notEqual(response, null, 'response is null') + cb(null,response); + } + else { + cb(err); + } + }); +} + +function requestUserPasswordChange(username, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + body: { + "email":username + } + }; + + api.users.requestUserPasswordChange(data, function(err, response) { + if (!err) { + assert.notEqual(response, null, 'response is null') + cb(null,response); + } + else { + cb(err); + } + }); +} + +function updateUserPassword(token, password, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + body:{ + token: token, + password: password + } + }; + + api.users.updateUserPassword(data, function(err, response) { + if (!err) { + assert.notEqual(response, null, 'response is null') + cb(null,response); + } + else { + cb(err); + } + }); +} + +function changeUserPassword(userToken, username, oldPasswd, newPasswd, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + username: username, + userToken: userToken, + body: { + "currentpwd": oldPasswd, + "password": newPasswd + } + }; + + api.users.changeUserPassword(data, function(err, response) { + if (!err) { + assert.notEqual(response, null, 'response is null') + cb(null,response); + } + else { + cb(err); + } + }); +} + +function requestUserActivation(username, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + body:{ + email: username, + } + }; + + api.users.requestUserActivation(data, function(err, response) { + if (!err) { + assert.notEqual(response, null, 'response is null') + cb(null,response); + } + else { + cb(err); + } + }); +} + +function activateUser(token, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + body:{ + token: token + } + }; + + api.users.activateUser(data, function(err, response) { + if (!err) { + assert.notEqual(response, null, 'response is null') + cb(null,response); + } + else { + cb(err); + } + }); +} + + +function deleteUser(userToken, userId, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + deleteUserId: userId + }; + + api.users.deleteUser(data, function(err, response) { + if (!err) { + assert.notEqual(response, null, 'response is null') + cb(null,response); + } + else { + cb(err); + } + }); +} + +module.exports={ + addUser: addUser, + getUserInfo: getUserInfo, + updateUserInfo: updateUserInfo, + requestUserPasswordChange: requestUserPasswordChange, + updateUserPassword: updateUserPassword, + changeUserPassword: changeUserPassword, + requestUserActivation: requestUserActivation, + activateUser: activateUser, + deleteUser: deleteUser +} \ No newline at end of file diff --git a/tests/oisp-tests.js b/tests/oisp-tests.js index 9e6c9f7b..52430692 100644 --- a/tests/oisp-tests.js +++ b/tests/oisp-tests.js @@ -30,16 +30,17 @@ var colors = require('colors'); var exec = require('child_process').exec; var accountName = "oisp-tests"; -var deviceName = "oisp-tests-device" +var deviceName = "oisp-tests-device"; -var componentName = "temperature-sensor" -var componentType = "temperature.v1.0" +var componentName = "temperature-sensor"; +var componentType = "temperature.v1.0"; -var actuatorName = "powerswitch-actuator" -var actuatorType = "powerswitch.v1.0" +var actuatorName = "powerswitch-actuator"; +var actuatorType = "powerswitch.v1.0"; + +var switchOnCmdName = "switch-on"; +var switchOffCmdName = "switch-off"; -var switchOnCmdName = "switch-on" -var switchOffCmdName = "switch-off" var emailRecipient = "test.receiver@streammyiot.com" @@ -48,6 +49,9 @@ var imap_password = process.env.IMAP_PASSWORD; var imap_host = process.env.IMAP_HOST; var imap_port = process.env.IMAP_PORT; +var recipientEmail = imap_username; +var emailNum = 1; + var rules = []; rules[switchOnCmdName] = { @@ -62,7 +66,7 @@ rules[switchOnCmdName] = { }, { type: "mail", - target: [ emailRecipient ] + target: [ recipientEmail ] } ], }; @@ -79,7 +83,7 @@ rules[switchOffCmdName] = { }, { type: "mail", - target: [ emailRecipient ] + target: [ recipientEmail ] } ], }; @@ -88,13 +92,18 @@ rules[switchOffCmdName] = { // Tests //------------------------------------------------------------------------------------------------------- var userToken; +var receiverToken; +var receiveruserId; +var receiveraccountId; +var userId; var accountId; var deviceId; var deviceToken; var componentId; var actuatorId; -var ruleId; -var componentParamName; +var rulelist; +var alertlist; +var componentParamName; var firstObservationTime; var temperatureValues = [{ @@ -164,8 +173,9 @@ describe("Waiting for OISP services to be ready ...\n".bold, function() { deviceToken = null; componentId = null; actuatorId = null; - ruleId = null; - componentParamName = "on"; + rulelist = null; + alertlist = null; + componentParamName = "LED"; firstObservationTime = null; done(); @@ -233,33 +243,147 @@ describe("Waiting for OISP services to be ready ...\n".bold, function() { }).timeout(2*60*1000); }) -describe("Creating account and device ...\n".bold, function() { - +describe("get authorization and manage user ...\n".bold, function() { + it('Shall authenticate', function(done) { var username = process.env.USERNAME; var password = process.env.PASSWORD; - + assert.isNotEmpty(username, "no username provided"); assert.isNotEmpty(password, "no password provided"); - helpers.login(username, password, function(err, token) { + helpers.auth.login(username, password, function(err, token) { if (err) { done(new Error("Cannot authenticate: " + err)); } else { userToken = token; done(); + } + }) + }) + + it('Shall get token info', function (done) { + helpers.auth.tokenInfo(userToken, function (err, response) { + if (err) { + done(new Error("Cannot get token info: " + err)); + } else { + assert.equal(response.header.typ, 'JWT', 'response type error' ); + if (response.payload.sub == null){ + done(new Error("get null user id")); + } + else { + userId = response.payload.sub + } + done(); + } + }) + }) + + it('Shall get user information', function(done) { + helpers.users.getUserInfo(userToken, userId, function(err, response) { + if (err) { + done(new Error("Cannot get user information : " + err)); + } else { + assert.isString(response.id) + done(); + } + }) + }) + + it('Shall update user information', function(done) { + var newuserInfo = { + attributes:{ + "phone":"12366666666", + "another_attribute":"another_value", + "new":"next_string_value" + } + } + + helpers.users.updateUserInfo(userToken, userId, newuserInfo, function(err, response) { + if (err) { + done(new Error("Cannot update user information : " + err)); + } else { + assert.equal(response.status, 'OK', 'status error') + done(); } }) }) +}) + +describe("Creating account and device ...\n".bold, function() { + + var accountInfo; it('Shall create account', function(done) { assert.notEqual(userToken, null, "Invalid user token") - - helpers.createAccount(accountName, userToken, function(err, id) { + helpers.accounts.createAccount(accountName, userToken, function(err, response) { if (err) { done(new Error("Cannot create account: " + err)); } else { - accountId = id; + assert.equal(response.name, accountName, "accounts name is wrong"); + accountId = response.id; + done(); + } + }) + }) + + it('Shall get account info', function (done) { + helpers.accounts.getAccountInfo(accountId, userToken, function (err, response) { + if (err) { + done(new Error("Cannot get account info: " + err)); + } else { + accountInfo = response; + done(); + } + }) + }) + + it('Shall update an account', function (done) { + + accountInfo.attributes = { + "phone":"123456789", + "another_attribute":"another_value", + "new":"next_string_value" + } + + helpers.accounts.updateAccount(accountId, userToken, accountInfo, function (err, response) { + if (err) { + done(new Error("Cannot update account: " + err)); + } else { + assert.deepEqual(response.attributes, accountInfo.attributes, 'new attributes not being updated'); + done(); + } + }) + }) + + it('Shall get account activation code', function (done) { + + helpers.accounts.getAccountActivationCode(accountId, userToken, function (err, response) { + if (err) { + done(new Error("Cannot get account activation code: " + err)); + } else { + done(); + } + }) + }) + + it('Shall refresh account activation code', function (done) { + + helpers.accounts.refreshAccountActivationCode(accountId, userToken, function (err, response) { + if (err) { + done(new Error("Cannot refresh account activation code: " + err)); + } else { + done(); + } + }) + }) + + it('Shall list all users for account', function (done) { + + helpers.accounts.getAccountUsers(accountId, userToken, function (err, response) { + if (err) { + done(new Error("Cannot list users for account: " + err)); + } else { done(); } }) @@ -268,35 +392,139 @@ describe("Creating account and device ...\n".bold, function() { it('Shall create device', function(done) { assert.notEqual(accountId, null, "Invalid account id") - helpers.createDevice(deviceName, deviceId, userToken, accountId, function(err, id) { + helpers.devices.createDevice(deviceName, deviceId, userToken, accountId, function(err, response) { if (err) { done(new Error("Cannot create device: " + err)); } else { + assert.equal(response.deviceId, deviceId, 'incorrect device id') + assert.equal(response.name, deviceName, 'incorrect device name') + done(); + } + }) + }) + + it('Shall get a list of all devices', function(done) { + + helpers.devices.getDevices(userToken, accountId, function(err, response) { + if (err) { + done(new Error("Cannot get list of devices: " + err)); + } else { + assert.equal(response[0].deviceId, deviceId, 'incorrect device id') + done(); + } + }) + }) + + it('Shall update info of a device', function(done) { + var deviceInfo = { + gatewayId: deviceId, + name: deviceName, + loc: [ 45.12345, -130.654321, 121.1], + tags: ["tag001", "tag002"], + attributes: { + vendor: "intel", + platform: "x64", + os: "linux" + } + } + helpers.devices.updateDeviceDetails(userToken, accountId, deviceId, deviceInfo, function(err, response) { + if (err) { + done(new Error("Cannot update device info: " + err)); + } else { + assert.notEqual(response, null ,'response is null') + assert.deepEqual(response.attributes, deviceInfo.attributes, 'device info is not updated') + done(); + } + }) + }) + + it('Shall list all tags for device', function(done) { + + helpers.devices.getDeviceTags(userToken, accountId, function(err, response) { + if (err) { + done(new Error("Cannot list all tags for device " + err)); + } else { + assert.equal(response.length, 2, 'error tag numbers') + done(); + } + }) + }) + + it('Shall list all attributes for device', function(done) { + var attributes = { + vendor: ["intel"], + platform: ["x64"], + os: ["linux"] + } + helpers.devices.getDeviceAttributes(userToken, accountId, function(err, response) { + if (err) { + done(new Error("Cannot list all attributes for device " + err)); + } else { + assert.deepEqual(response, attributes, 'get wrong device attributes') done(); } }) }) + it('Shall count devices based on filter', function(done) { + + helpers.devices.countDevices(userToken, accountId, function(err, response) { + if (err) { + done(new Error("Cannot count devices " + err)); + } else { + assert.equal(response.device.total, 1, 'count devices wrong') + done(); + } + }) + }) + + it('Shall search devices based on filter', function(done) { + + helpers.devices.searchDevices(userToken, accountId, function(err, response) { + if (err) { + done(new Error("Cannot search devices " + err)); + } else { + assert.equal(response[0].deviceId, deviceId, 'search wrong device') + done(); + } + }) + }) + it('Shall activate device', function(done) { assert.notEqual(deviceId, null, "Invalid device id") - helpers.activateDevice(userToken, accountId, deviceId, function(err, token) { + helpers.devices.activateDevice(userToken, accountId, deviceId, function(err, response) { if (err) { done(new Error("Cannot activate device " + err)); } else { - deviceToken = token; + assert.isString(response.deviceToken, 'device token is not string') + deviceToken = response.deviceToken; + done(); + } + }) + }) + + it('Shall get detail of one device', function(done) { + + helpers.devices.getDeviceDetails(userToken, accountId, deviceId, function(err, response) { + if (err) { + done(new Error("Cannot get detail of device: " + err)); + } else { + assert.equal(response.deviceId, deviceId, 'incorrect device id') + assert.deepEqual(response.attributes, + {"vendor": "intel", "platform": "x64", "os": "linux"}, + 'incorrect device attributes' ) done(); } }) }) }) -describe("Creating components and rules ... \n".bold, function() { +describe("Creating and getting components ... \n".bold, function() { - it('Shall create component', function(done) { - assert.notEqual(deviceToken, null, "Invalid device token") + it('Shall add device a component', function(done) { - helpers.createComponent(componentName, componentType, userToken, accountId, deviceId, false, function(err, id) { + helpers.devices.addDeviceComponent(componentName, componentType, deviceToken, accountId, deviceId, function(err, id) { if (err) { done(new Error("Cannot create component: " + err)); } else { @@ -306,11 +534,9 @@ describe("Creating components and rules ... \n".bold, function() { }) }).timeout(10000); + it('Shall add device an actuator', function(done) { - it('Shall create actuator', function(done) { - assert.notEqual(deviceToken, null, "Invalid device token") - - helpers.createComponent(actuatorName, actuatorType, userToken, accountId, deviceId, false, function(err, id) { + helpers.devices.addDeviceComponent(actuatorName, actuatorType, deviceToken, accountId, deviceId, function(err, id) { if (err) { done(new Error("Cannot create actuator: " + err)); } else { @@ -323,10 +549,11 @@ describe("Creating components and rules ... \n".bold, function() { it('Shall create switch-on actuation command', function(done) { assert.notEqual(actuatorId, null, "Invalid actuator id") - helpers.createCommand(switchOnCmdName, componentParamName, 1, userToken, accountId, deviceId, actuatorId, function(err) { + helpers.control.saveComplexCommand(switchOnCmdName, componentParamName, 1, userToken, accountId, deviceId, actuatorId, function(err,response) { if (err) { done(new Error("Cannot create switch-on command: " + err)); } else { + assert.equal(response.status, 'OK', 'get error response status') done(); } }) @@ -336,22 +563,107 @@ describe("Creating components and rules ... \n".bold, function() { it('Shall create switch-off actuation command', function(done) { assert.notEqual(actuatorId, null, "Invalid actuator id") - helpers.createCommand(switchOffCmdName, componentParamName, 0, userToken, accountId, deviceId, actuatorId, function(err) { + helpers.control.saveComplexCommand(switchOffCmdName, componentParamName, 0, userToken, accountId, deviceId, actuatorId, function(err,response) { if (err) { done(new Error("Cannot create switch-off command: " + err)); } else { + assert.equal(response.status, 'OK', 'get error response status') + done(); + } + }) + + }).timeout(10000); + + it('Shall send an actuation', function(done){ + + helpers.control.sendActuationCommand(componentParamName, 1, userToken, accountId, actuatorId, deviceId, function(err,response) { + if (err) { + done(new Error("Cannot send an actuation: " + err)); + } else { + assert.equal(response.status, 'OK', 'cannot send an actuation') + done(); + } + }) + + }) + + it('Shall get list of actuations', function(done) { + + var parameters = { + from: 0, + to: undefined + } + + helpers.control.pullActuations(parameters, deviceToken, accountId, deviceId, actuatorId, function(err, response) { + if (err) { + done(new Error("get list of actuations: " + err)); + } else { + assert.equal(response[0].componentId, actuatorId, 'pull error actuations') + done(); + } + }) + + }).timeout(20000); + +}); + +describe("Getting components catalog ... \n".bold, function() { + + it('Shall create a new custom Component Type', function(done) { + helpers.cmpcatalog.createCatalog(userToken, accountId, function(err, response) { + if (err) { + done(new Error("Cannot create component: " + err)); + } else { + assert.equal(response.id, 'speed.v1.0', 'cannot create new component type') done(); } }) + }).timeout(10000); + it('Shall list all component types for account', function(done) { + helpers.cmpcatalog.getCatalog(userToken, accountId, function(err, response) { + if (err) { + done(new Error("Cannot list component: " + err)); + } else { + assert.equal(response.length, 4, 'get wrong number of component types') + done(); + } + }) }).timeout(10000); + it('Shall get component type details', function(done) { + helpers.cmpcatalog.getCatalogDetail(userToken, accountId, 'speed.v1.0', function(err, response) { + if (err) { + done(new Error("Cannot get component type details " + err)); + } else { + assert.equal(response.id, 'speed.v1.0', 'cannot get speed component') + assert.equal(response.max, 500) + done(); + } + }) + }).timeout(10000); + + it('Shall update a component type', function(done) { + var newmin = 10; + var newmax = 1000; + helpers.cmpcatalog.updateCatalog(userToken, accountId, 'speed.v1.0', newmin, newmax, function(err, response) { + if (err) { + done(new Error("Cannot get component type details " + err)); + } else { + assert.equal(response.id, 'speed.v1.1', 'cannot update speed component to v1.1') + assert.equal(response.max, 1000) + done(); + } + }) + }).timeout(10000); +}) + +describe("Creating rules ... \n".bold, function() { it('Shall create switch-on rule', function(done) { - assert.notEqual(deviceToken, null, "Invalid device token") rules[switchOnCmdName].cid = componentId; - helpers.createRule(rules[switchOnCmdName], userToken, accountId, deviceId, function(err, id) { + helpers.rules.createRule(rules[switchOnCmdName], userToken, accountId, deviceId, function(err, id) { if (err) { done(new Error("Cannot create switch-on rule: " + err)); } else { @@ -363,11 +675,10 @@ describe("Creating components and rules ... \n".bold, function() { }).timeout(20000); it('Shall create switch-off rule', function(done) { - assert.notEqual(deviceToken, null, "Invalid device token") rules[switchOffCmdName].cid = componentId; - helpers.createRule(rules[switchOffCmdName], userToken, accountId, deviceId, function(err, id) { + helpers.rules.createRule(rules[switchOffCmdName], userToken, accountId, deviceId, function(err, id) { if (err) { done(new Error("Cannot create switch-off rule: " + err)); } else { @@ -377,7 +688,20 @@ describe("Creating components and rules ... \n".bold, function() { }) }).timeout(20000); -}) + + it('Shall get all rules', function(done) { + helpers.rules.getRules(userToken, accountId, function(err, response) { + if (err) { + done(new Error("Cannot get rules " + err)); + } else { + assert.equal(response[0].name, 'oisp-tests-rule-high-temp', 'rule name is wrong') + rulelist = response + done(); + } + }) + + }).timeout(20000); +}); describe("Sending observations and checking rules ...\n".bold, function() { @@ -389,7 +713,6 @@ describe("Sending observations and checking rules ...\n".bold, function() { var index = 0; var nbActuations = 0; - var emailNum = 1; process.stdout.write(" "); @@ -412,7 +735,7 @@ describe("Sending observations and checking rules ...\n".bold, function() { }; - helpers.wsConnect(proxyConnector, deviceToken, deviceId, function(message) { + helpers.connector.wsConnect(proxyConnector, deviceToken, deviceId, function(message) { --nbActuations; var expectedActuationValue = temperatureValues[index].expectedActuation.toString(); @@ -426,51 +749,27 @@ describe("Sending observations and checking rules ...\n".bold, function() { if(paramValue == expectedActuationValue) { - helpers.getEmailMessage(imap_username, imap_password, imap_host, imap_port, emailNum, function(err, message) { + helpers.mail.getEmailMessage(imap_username, imap_password, imap_host, imap_port, emailNum, function(err, message) { if (!err) { - var regExps = [ - { - value: /^To:.*/, - expected: emailRecipient, - found: false - }, - { - value: /^- Device:.*/, - expected: deviceId, - found: false - }, - { - value: /^- Reason:.*/, - expected: temperatureValues[index].expectedEmailReason, - found: false - } - ]; var lines = message.toString().split("\n"); var i; - for(i=0; i 25', 'get error alert') + done(); + } + }) + }) + + it('Shall update alert status', function(done){ + helpers.alerts.updateAlertStatus(userToken, accountId, alertlist[1].alertId, 'Open', function(err, response) { + if (err) { + done(new Error("Cannot update alert status " + err)); + } else { + assert.notEqual(response, null ,'response is null') + assert.equal(response.status, 'Open', 'wrong alert status') + done(); + } + }) + }) + + it('Shall clear alert infomation', function(done){ + helpers.alerts.closeAlert(userToken, accountId, alertlist[2].alertId, function(err, response) { + if (err) { + done(new Error("Cannot clear alert infomation " + err)); + } else { + done(); + } + }) + }) + +}); + +describe("update rules and create draft rules ... \n".bold, function(){ + + var cloneruleId; + + it('Shall clone a rule', function(done){ + var rulename_clone = rulelist[1].name + ' - cloned'; + + helpers.rules.cloneRule (userToken, accountId, rulelist[1].id, function(err, response) { + if (err) { + done(new Error("cannot clone a rule " + err)); + } else { + assert.notEqual(response, null ,'response is null') + assert.equal(response.name, rulename_clone, 'clone error rule') + cloneruleId = response.id + done(); + } + }) + }) + + it('Shall update a rule', function(done) { + + var newrule = { + name: "oisp-tests-rule-high-temp-new", + synchronizationStatus: "NotSync", + conditionComponent: componentName, + basicConditionOperator: ">", + basicConditionValue: "28", + actuationCmd: switchOffCmdName, + cid: componentId + } + + helpers.rules.updateRule(newrule, userToken, accountId, cloneruleId, function(err, response) { + if (err) { + done(new Error("Cannot update rules " + err)); + } else { + assert.equal(response.name, newrule.name, 'update rule wrong') + done(); + } + }) + + }).timeout(20000); + + it('Shall update rule status', function(done){ + helpers.rules.updateRuleStatus (userToken, accountId, rulelist[1].id, 'Archived', function(err, response) { + if (err) { + done(new Error("Cannot update rule status " + err)); + } else { + assert.notEqual(response, null ,'response is null') + assert.equal(response.status, 'Archived', 'wrong rule status') + done(); + } + }) + }) + + it('Shall create a draft rule', function(done){ + helpers.rules.createDraftRule ('Draftrule', userToken, accountId,function(err, response) { + if (err) { + done(new Error("cannot create a draft rule " + err)); + } else { + assert.notEqual(response, null ,'response is null') + assert.equal(response.name, 'Draftrule', 'wrong draft rule name') + done(); + } + }) + }) +}) + +describe("Adding user and posting email ...\n".bold, function() { + + it("Shall add a new user and post email", function(done) { + assert.isNotEmpty(imap_username, "no email provided"); + assert.isNotEmpty(imap_password, "no password provided"); + helpers.users.addUser(userToken, imap_username, imap_password ,function(err, response) { + if (err) { + done(new Error("Cannot create new user: " + err)); + } else { + assert.equal(response.email, imap_username, 'add wrong user') + assert.equal(response.type, 'user', 'response type wrong') + done(); + } + }) + }) + + it("Shall activate user with token", function(done) { + process.stdout.write(" receiving email for activation token..."); + helpers.mail.getEmailMessage(imap_username, imap_password, imap_host, imap_port, emailNum, function(err, message) { + if (!err) { + var regexp = /token=\w*?\r/ + var activationline = regexp.exec(message.toString()); + var rawactivation = activationline[0].split("token=")[1].toString(); + var activationToken = rawactivation.replace(/\r/, '') + activationToken = activationToken.substring(2) + + if ( activationToken === null) { + done(new Error("Wrong email " + message )) + } + else { + assert.isString(activationToken,'activationToken is not string') + emailNum++; + + helpers.users.activateUser(activationToken, function(err, response) { + if (err) { + done(new Error("Cannot activate user: " + err)); + } else { + assert.equal(response.status, 'OK', 'cannot activate user') + + helpers.auth.login(imap_username, imap_password, function(err, token) { + if (err) { + done(new Error("Cannot authenticate receiver: " + err)); + } else { + receiverToken = token; + done(); + } + }) + } + }); + } + } + else { + done(new Error("Wrong email " + err )) + } + }) + }).timeout( 60 * 1000); + + it('Shall create receiver account', function(done) { + assert.notEqual(receiverToken, null, "Invalid user token") + helpers.accounts.createAccount('receiver', receiverToken, function(err, response) { + if (err) { + done(new Error("Cannot create account: " + err)); + } else { + assert.equal(response.name, 'receiver', "accounts name is wrong"); + receiveraccountId = response.id; + done(); + } + }) + }) }); + +describe("Invite receiver ...\n".bold, function() { + + var inviteId = null; + + it('Shall create invitation', function(done){ + // a mail will be sent to receiver + + helpers.invitation.createInvitation(userToken, accountId, imap_username, function(err, response) { + if (err) { + done(new Error("Cannot create invitation: " + err)); + } else { + emailNum++; + assert.equal(response.email, imap_username, 'send invite to wrong name') + done(); + } + }) + }) + + + it('Shall get all invitations', function(done){ + + helpers.invitation.getAllInvitations(userToken, accountId, function(err, response) { + if (err) { + done(new Error("Cannot get invitation: " + err)); + } else { + assert.equal(response[0], imap_username, 'send invite to wrong name') + done(); + } + }) + }) + + it('Shall delete invitation and send again', function(done){ + helpers.invitation.deleteInvitations(userToken, accountId, imap_username, function(err, response) { + if (err) { + done(new Error("Cannot delete invitation: " + err)); + } else { + helpers.invitation.createInvitation(userToken, accountId, imap_username, function(err, response) { + // when send invitation, the receiver will receive an email said he should login to accept the invitation + if (err) { + done(new Error("Cannot create invitation: " + err)); + } else { + emailNum++; + assert.equal(response.email, imap_username, 'send invite to wrong name') + done(); + } + }) + } + }) + }) + + it('Shall get specific invitations', function(done){ + process.stdout.write(" receiving email for invitations..."); + var getInvitation = function(cb){ + helpers.auth.login(imap_username, imap_password, function(err, token) { + if (err) { + done(new Error("Cannot authenticate: " + err)); + } else { + receiverToken = token; + helpers.invitation.getInvitations(receiverToken, receiveraccountId, imap_username, function(err, response) { + if (err) { + cb(err, null); + } else { + if (response != null){ + cb(null, response) + } else { + process.stdout.write("."); + setTimeout(function(){ + getInvitation(cb); + }, 500); + } + } + }) + } + }) + }; + getInvitation(function(err, response){ + assert.equal(err, null, "get invitation error"); + inviteId = response[0]._id; + done(); + }); + }).timeout(2 * 60 * 1000); + + it('Shall accept specific invitations', function(done){ + helpers.invitation.acceptInvitation(receiverToken, accountId, inviteId, function(err, response) { + if (err) { + done(new Error('cannot accept invitetion :' + err)); + } else { + assert.equal(response.accountName, accountName, 'accept wrong invitation') + done(); + } + }) + }) + + it('Shall request activation', function(done) { + var username = process.env.USERNAME; + + helpers.users.requestUserActivation(username, function(err, response) { + if (err) { + done(new Error('cannot request activation:' + err)); + } else { + assert.equal(response.status, 'OK') + done(); + } + }) + }) + + it('Shall get id of receiver and change privilege', function (done) { + helpers.auth.tokenInfo(receiverToken, function (err, response) { + if (err) { + done(new Error("Cannot get token info: " + err)); + } else { + receiveruserId = response.payload.sub + helpers.accounts.changeAccountUser(accountId, userToken, receiveruserId, function (err, response) { + if (err) { + done(new Error("Cannot change another user privilege to your account: " + err)); + } else { + assert.equal(response.status, 'OK') + done(); + } + }) + + } + }) + }) + + it('Shall list all users for account', function (done) { + + helpers.accounts.getAccountUsers(accountId, userToken, function (err, response) { + if (err) { + done(new Error("Cannot list users for account: " + err)); + } else { + assert.equal(response.length, 2, 'error account numbers') + done(); + } + }) + }) +}) + +describe("change password and delete receiver ... \n".bold, function(){ + + it('Shall request change receiver password', function(done) { + helpers.users.requestUserPasswordChange(imap_username, function(err, response) { + if (err) { + done(new Error("Cannot request change password : " + err)); + } else { + assert.equal(response.status, 'OK', 'status error') + done(); + } + }) + }) + + it('Shall update receiver password', function(done) { + process.stdout.write(" receiving email for new password token..."); + helpers.mail.getEmailMessage(imap_username, imap_password, imap_host, imap_port, emailNum, function(err, message) { + var regexp = /token=\w*?\r/ + var activationline = regexp.exec(message.toString()); + var rawactivation = activationline[0].split("token=")[1].toString(); + var activationToken = rawactivation.replace(/\r/, '') + activationToken = activationToken.substring(2) + + if ( activationToken === null) { + done(new Error("Wrong email " + message )) + } + else { + assert.isString(activationToken,'activationToken is not string') + emailNum++; + + imap_password = 'Receiver12345' + + helpers.users.updateUserPassword(activationToken, imap_password, function(err, response) { + if (err) { + done(new Error("Cannot update receiver password : " + err)); + } else { + assert.equal(response.status, 'OK', 'status error') + done(); + } + }) + } + }); + }).timeout(2 * 60 * 1000); + + it('Shall change password', function(done) { + var username = process.env.USERNAME; + var oldPasswd = process.env.PASSWORD; + var newPasswd = 'oispnewpasswd2' + + helpers.users.changeUserPassword(userToken, username, oldPasswd, newPasswd, function(err, response) { + if (err) { + done(new Error("Cannot change password: " + err)); + } else { + assert.equal(response.password, 'oispnewpasswd2', 'new password error') + emailNum++; + done(); + } + }) + }) + + it('Shall delete draft rule', function(done){ + helpers.rules.deleteRule (userToken, accountId, null , function(err, response){ + if (err) { + done(new Error("cannot delete a draft rule " + err)); + } else { + assert.notEqual(response, null ,'response is null') + assert.equal(response.status, 'Done') + done(); + } + }) + }) + + it('Shall delete a rule', function(done){ + helpers.rules.deleteRule (userToken, accountId, rulelist[0].id, function(err, response){ + if (err) { + done(new Error("cannot delete a rule " + err)); + } else { + assert.notEqual(response, null ,'response is null') + assert.equal(response.status, 'Done') + done(); + } + }) + }) + + it('Shall delete a component', function(done){ + helpers.devices.deleteDeviceComponent (userToken, accountId, deviceId, componentId, function(err, response){ + if (err) { + done(new Error("cannot delete a component " + err)); + } else { + assert.notEqual(response, null ,'response is null') + assert.equal(response.status, 'Done') + done(); + } + }) + }) + + it('Shall delete a device', function(done){ + helpers.devices.deleteDevice (userToken, accountId, deviceId, function(err, response){ + if (err) { + done(new Error("cannot delete a device " + err)); + } else { + assert.notEqual(response, null ,'response is null') + assert.equal(response.status, 'Done') + done(); + } + }) + }) + + it('Shall delete an account', function(done){ + helpers.accounts.deleteAccount (userToken, accountId, function(err, response){ + if (err) { + done(new Error("cannot delete an account " + err)); + } else { + assert.notEqual(response, null ,'response is null') + assert.equal(response.status, 'Done') + done(); + } + }) + }) + + it('Shall delete a user', function(done){ + helpers.users.deleteUser (userToken, userId, function(err, response){ + if (err) { + done(new Error("cannot delete a user " + err)); + } else { + assert.notEqual(response, null ,'response is null') + assert.equal(response.status, 'Done') + done(); + } + }) + }) + +}) diff --git a/tests/package.json b/tests/package.json index d4090ea9..d2784fa8 100644 --- a/tests/package.json +++ b/tests/package.json @@ -16,8 +16,9 @@ "colors": "^1.2.1", "cfenv": "^1.0.0", "kafka-node": "^2.4.0", - "@open-iot-service-platform/oisp-sdk-js": "^1.0.0-beta1", "nodemailer": "^4.6.6", - "imap": "^0.8.19" + "imap": "^0.8.19", + "mailparser": "^2.2.0", + "@open-iot-service-platform/oisp-sdk-js":"https://github.com/Open-IoT-Service-Platform/oisp-sdk-js.git#fdd16923d2d244027638f0b42909d8c29c5444f6" } } From 02a5da0facf9ad41586afa499e8e1554fffcf004 Mon Sep 17 00:00:00 2001 From: Jamal El Youssefi Date: Thu, 5 Jul 2018 18:14:52 +0200 Subject: [PATCH 10/68] fixed the email e2e testing issue --- tests/lib/helpers/mail.js | 3 +++ tests/oisp-tests.js | 16 +++------------- 2 files changed, 6 insertions(+), 13 deletions(-) diff --git a/tests/lib/helpers/mail.js b/tests/lib/helpers/mail.js index 0ebf8940..1b332158 100644 --- a/tests/lib/helpers/mail.js +++ b/tests/lib/helpers/mail.js @@ -37,6 +37,9 @@ function getEmailMessage(user, password, host, port, num, cb) { imap.once('ready', function() { imap.openBox('INBOX', true, function(err, box) { if ( !err ) { + if ( num <= 0 ) { + num = box.messages.total; + } var f = imap.seq.fetch(num, { bodies: ['HEADER.FIELDS (TO)', '1'], struct: true diff --git a/tests/oisp-tests.js b/tests/oisp-tests.js index 52430692..09bdda1e 100644 --- a/tests/oisp-tests.js +++ b/tests/oisp-tests.js @@ -50,7 +50,6 @@ var imap_host = process.env.IMAP_HOST; var imap_port = process.env.IMAP_PORT; var recipientEmail = imap_username; -var emailNum = 1; var rules = []; @@ -749,7 +748,7 @@ describe("Sending observations and checking rules ...\n".bold, function() { if(paramValue == expectedActuationValue) { - helpers.mail.getEmailMessage(imap_username, imap_password, imap_host, imap_port, emailNum, function(err, message) { + helpers.mail.getEmailMessage(imap_username, imap_password, imap_host, imap_port, -1, function(err, message) { if (!err) { var lines = message.toString().split("\n"); var i; @@ -767,7 +766,6 @@ describe("Sending observations and checking rules ...\n".bold, function() { done(new Error("Wrong email " + message )) } else { - emailNum++; step(); } } @@ -1014,8 +1012,7 @@ describe("Adding user and posting email ...\n".bold, function() { }) it("Shall activate user with token", function(done) { - process.stdout.write(" receiving email for activation token..."); - helpers.mail.getEmailMessage(imap_username, imap_password, imap_host, imap_port, emailNum, function(err, message) { + helpers.mail.getEmailMessage(imap_username, imap_password, imap_host, imap_port, -1, function(err, message) { if (!err) { var regexp = /token=\w*?\r/ var activationline = regexp.exec(message.toString()); @@ -1028,7 +1025,6 @@ describe("Adding user and posting email ...\n".bold, function() { } else { assert.isString(activationToken,'activationToken is not string') - emailNum++; helpers.users.activateUser(activationToken, function(err, response) { if (err) { @@ -1079,7 +1075,6 @@ describe("Invite receiver ...\n".bold, function() { if (err) { done(new Error("Cannot create invitation: " + err)); } else { - emailNum++; assert.equal(response.email, imap_username, 'send invite to wrong name') done(); } @@ -1109,7 +1104,6 @@ describe("Invite receiver ...\n".bold, function() { if (err) { done(new Error("Cannot create invitation: " + err)); } else { - emailNum++; assert.equal(response.email, imap_username, 'send invite to wrong name') done(); } @@ -1119,7 +1113,6 @@ describe("Invite receiver ...\n".bold, function() { }) it('Shall get specific invitations', function(done){ - process.stdout.write(" receiving email for invitations..."); var getInvitation = function(cb){ helpers.auth.login(imap_username, imap_password, function(err, token) { if (err) { @@ -1220,8 +1213,7 @@ describe("change password and delete receiver ... \n".bold, function(){ }) it('Shall update receiver password', function(done) { - process.stdout.write(" receiving email for new password token..."); - helpers.mail.getEmailMessage(imap_username, imap_password, imap_host, imap_port, emailNum, function(err, message) { + helpers.mail.getEmailMessage(imap_username, imap_password, imap_host, imap_port, -1, function(err, message) { var regexp = /token=\w*?\r/ var activationline = regexp.exec(message.toString()); var rawactivation = activationline[0].split("token=")[1].toString(); @@ -1233,7 +1225,6 @@ describe("change password and delete receiver ... \n".bold, function(){ } else { assert.isString(activationToken,'activationToken is not string') - emailNum++; imap_password = 'Receiver12345' @@ -1259,7 +1250,6 @@ describe("change password and delete receiver ... \n".bold, function(){ done(new Error("Cannot change password: " + err)); } else { assert.equal(response.password, 'oispnewpasswd2', 'new password error') - emailNum++; done(); } }) From 3f489080a3a71e8d416aabfae5e52824039440ac Mon Sep 17 00:00:00 2001 From: Ali Rasim Kocal Date: Fri, 6 Jul 2018 10:44:57 +0200 Subject: [PATCH 11/68] Automatically remove default config after make update Also added some file to .gitignore. --- .gitignore | 6 +++++- Makefile | 5 ++++- config-backup/README.md | 3 +++ 3 files changed, 12 insertions(+), 2 deletions(-) create mode 100644 config-backup/README.md diff --git a/.gitignore b/.gitignore index 9d270851..d278eaa3 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,8 @@ setup-environment.sh .init .prepare -data \ No newline at end of file +data +config-backup/ +.firstRun +tests/.env +tests/package-lock.json \ No newline at end of file diff --git a/Makefile b/Makefile index 0aa6d389..118f36b4 100644 --- a/Makefile +++ b/Makefile @@ -100,7 +100,7 @@ start: build .prepare start-test: export TEST := "1" start-test: build .prepare @$(call msg,"Starting IoT connector (test mode) ..."); - @make -C tests email-account $(shell pwd)/tests/.env + @make -C tests email-account $(shell pwd)/tests/.env @source ./tests/.env && ./docker.sh up -d start-quick: build-quick .prepare @@ -119,6 +119,9 @@ stop: update: @$(call msg,"Git Update (dev only) ..."); @git pull + @if [ -f setup-environment.sh ]; then \ + mv setup-environment.sh config-backup/setup-environment-$$(date +%Y-%m-%d-%H%M%S).sh.bak; \ + fi; @git submodule init @git submodule update @git submodule foreach git fetch origin diff --git a/config-backup/README.md b/config-backup/README.md new file mode 100644 index 00000000..0ec4e42f --- /dev/null +++ b/config-backup/README.md @@ -0,0 +1,3 @@ +When `make update` is run, the configuration file will be moved to a backup by default. This folder contains the configuration backups. + +It is safe to delete those if you do not need them anymore. \ No newline at end of file From db455c7d4db9763d823aa4b01795e291ca0cc3b4 Mon Sep 17 00:00:00 2001 From: Jamal El Youssefi Date: Fri, 6 Jul 2018 15:48:54 +0200 Subject: [PATCH 12/68] Updated toplevel Makefile for e2e testing --- Makefile | 54 ++++++++++++++++++++++++++++++++++++++---------------- 1 file changed, 38 insertions(+), 16 deletions(-) diff --git a/Makefile b/Makefile index 118f36b4..2df33529 100644 --- a/Makefile +++ b/Makefile @@ -44,21 +44,39 @@ DOCKER_REGISTRY=localhost:5000/ esac \ fi; \ else \ - git submodule init; \ - git submodule update; \ + git submodule init; \ + git submodule update; \ fi; ifeq ($(wildcard ./setup-environment.sh ),) - @tput setaf 1 - @while true; do \ - read -r -p "Use the default config in setup-environment.example.sh file ? [y/N]: " response; \ - case $$response in \ - [Yy]* ) cp ./setup-environment.example.sh setup-environment.sh; break;; \ - [Nn]* ) break;; \ - * ) echo "Please answer yes or no.";; \ - esac \ - done ; - @tput sgr0 + @if [ "${TEST}" != "1" ]; then \ + tput setaf 1; \ + while true; do \ + read -r -p "Use the default config in setup-environment.example.sh file ? [y/N]: " response; \ + case $$response in \ + [Yy]* ) cp ./setup-environment.example.sh setup-environment.sh; break;; \ + [Nn]* ) break;; \ + * ) echo "Please answer yes or no.";; \ + esac \ + done ; \ + tput sgr0; \ + else \ + cp ./setup-environment.example.sh setup-environment.sh; \ + fi endif + + @if [ "${TEST}" == "1" ]; then \ + if [ -d ./data ] && [ ! -f ./data/.forTest ]; then \ + sudo rm -rf data-backup; \ + sudo mv data data-backup; \ + fi; \ + mkdir -p ./data && touch ./data/.forTest; \ + else \ + if [ -d ./data-backup ]; then \ + sudo rm -rf data; \ + sudo mv data-backup data; \ + fi; \ + fi + @if [ -f data/keys/private.pem ]; then echo "RSA keys existing already"; else \ mkdir -p data/keys; \ openssl genpkey -algorithm RSA -out data/keys/private.pem -pkeyopt rsa_keygen_bits:2048; \ @@ -76,8 +94,8 @@ build: .init @./docker.sh create .prepare: - source setup-environment.sh && docker run -i -v $(shell pwd)/oisp-frontend:/app "$${COMPOSE_PROJECT_NAME}_frontend" /bin/bash -c /app/public-interface/scripts/docker-prepare.sh - cp ./oisp-frontend/public-interface/deploy/postgres/base/*.sql ./oisp-frontend/public-interface/scripts/database + @source setup-environment.sh && docker run -i -v $(shell pwd)/oisp-frontend:/app "$${COMPOSE_PROJECT_NAME}_frontend" /bin/bash -c /app/public-interface/scripts/docker-prepare.sh + @cp ./oisp-frontend/public-interface/deploy/postgres/base/*.sql ./oisp-frontend/public-interface/scripts/database @touch $@ build-force: .init @@ -184,8 +202,12 @@ clean: @rm -f .init .prepare distclean: clean - @./docker.sh down - @rm -rf ./data + @if [ -f setup-environment.sh ]; then \ + ./docker.sh down; \ + fi + @if [ -f ./data/.forTest ]; then \ + sudo rm -rf ./data; \ + fi push-docker-images: source setup-environment.sh && \ From d9a646f518deed3ccd18593d8456ce3d5dd13d58 Mon Sep 17 00:00:00 2001 From: Ali Rasim Kocal Date: Tue, 10 Jul 2018 13:44:44 +0200 Subject: [PATCH 13/68] MAKEFILE: Force distclean before update --- Makefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 2df33529..00d0f984 100644 --- a/Makefile +++ b/Makefile @@ -76,7 +76,7 @@ endif sudo mv data-backup data; \ fi; \ fi - + @if [ -f data/keys/private.pem ]; then echo "RSA keys existing already"; else \ mkdir -p data/keys; \ openssl genpkey -algorithm RSA -out data/keys/private.pem -pkeyopt rsa_keygen_bits:2048; \ @@ -134,7 +134,7 @@ stop: @$(call msg,"Stopping IoT connector ..."); @./docker.sh stop $(CMD_ARGS) -update: +update: distclean @$(call msg,"Git Update (dev only) ..."); @git pull @if [ -f setup-environment.sh ]; then \ From d18e88b6142c7bb40dccdc3d84433c5f37088fe2 Mon Sep 17 00:00:00 2001 From: Ali Rasim Kocal Date: Wed, 1 Aug 2018 14:44:37 +0200 Subject: [PATCH 14/68] Changes to postgres container. --- Makefile | 1 - docker-compose-quick.yml | 5 ++--- docker-compose.yml | 5 +++-- docker-postgres/Dockerfile | 4 ++++ 4 files changed, 9 insertions(+), 6 deletions(-) create mode 100644 docker-postgres/Dockerfile diff --git a/Makefile b/Makefile index 00d0f984..0e628a3e 100644 --- a/Makefile +++ b/Makefile @@ -94,7 +94,6 @@ build: .init @./docker.sh create .prepare: - @source setup-environment.sh && docker run -i -v $(shell pwd)/oisp-frontend:/app "$${COMPOSE_PROJECT_NAME}_frontend" /bin/bash -c /app/public-interface/scripts/docker-prepare.sh @cp ./oisp-frontend/public-interface/deploy/postgres/base/*.sql ./oisp-frontend/public-interface/scripts/database @touch $@ diff --git a/docker-compose-quick.yml b/docker-compose-quick.yml index 5a860bd9..7f56c756 100644 --- a/docker-compose-quick.yml +++ b/docker-compose-quick.yml @@ -26,10 +26,9 @@ services: volumes: - ./data/hbase-logs:/opt/hbase/logs postgres: - image: "postgres:9.4.10" + image: "${DOCKER_REGISTRY}${COMPOSE_PROJECT_NAME}/postgres" volumes: - ./data/postgres:/var/lib/postgresql/data - - ./oisp-frontend/public-interface/scripts/database:/docker-entrypoint-initdb.d ports: - 5432:5432 environment: @@ -49,7 +48,7 @@ services: - ADVERTISED_PORT=9092 - AUTO_CREATE_TOPICS=true gearpump: - image: "${DOCKER_REGISTRY}{COMPOSE_PROJECT_NAME}/gearpump" + image: "${DOCKER_REGISTRY}${COMPOSE_PROJECT_NAME}/gearpump" depends_on: - nginx - hbase diff --git a/docker-compose.yml b/docker-compose.yml index f93cc48a..9af83388 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -27,10 +27,11 @@ services: volumes: - ./data/hbase-logs:/opt/hbase/logs postgres: - image: "postgres:9.4.10" + build: + context: . + dockerfile: docker-postgres/Dockerfile volumes: - ./data/postgres:/var/lib/postgresql/data - - ./oisp-frontend/public-interface/scripts/database:/docker-entrypoint-initdb.d ports: - 5432:5432 environment: diff --git a/docker-postgres/Dockerfile b/docker-postgres/Dockerfile new file mode 100644 index 00000000..fa40c49c --- /dev/null +++ b/docker-postgres/Dockerfile @@ -0,0 +1,4 @@ +FROM postgres:9.4-alpine + +COPY ./oisp-frontend/public-interface/deploy/postgres/base/*.sql /docker-entrypoint-initdb.d/ +COPY ./oisp-frontend/public-interface/scripts/database/create_test_database.sh /docker-entrypoint-initdb.d/ \ No newline at end of file From 4b232355254b6134d20a4872f4c3cde94d218f0b Mon Sep 17 00:00:00 2001 From: Ali Rasim Kocal Date: Thu, 2 Aug 2018 13:12:17 +0200 Subject: [PATCH 15/68] Remove .prepare from Makefile This is not required any more due to changes in postgres and frontend containers. --- Makefile | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/Makefile b/Makefile index 0e628a3e..d04c5b6f 100644 --- a/Makefile +++ b/Makefile @@ -93,10 +93,6 @@ build: .init @$(call msg,"Building IoT connector ..."); @./docker.sh create -.prepare: - @cp ./oisp-frontend/public-interface/deploy/postgres/base/*.sql ./oisp-frontend/public-interface/scripts/database - @touch $@ - build-force: .init @$(call msg,"Building IoT connector ..."); @./docker.sh create --force-recreate @@ -110,17 +106,17 @@ ifeq (start,$(firstword $(MAKECMDGOALS))) $(eval $(CMD_ARGS):;@:) endif -start: build .prepare +start: build @$(call msg,"Starting IoT connector ..."); @./docker.sh up -d $(CMD_ARGS) start-test: export TEST := "1" -start-test: build .prepare +start-test: build @$(call msg,"Starting IoT connector (test mode) ..."); @make -C tests email-account $(shell pwd)/tests/.env @source ./tests/.env && ./docker.sh up -d -start-quick: build-quick .prepare +start-quick: build-quick @$(call msg,"Starting IoT connector using pulled images..."); @./docker.sh -f docker-compose-quick.yml up -d $(CMD_ARGS) @@ -198,7 +194,7 @@ logs: clean: @$(call msg,"Cleaning ..."); - @rm -f .init .prepare + @rm -f .init distclean: clean @if [ -f setup-environment.sh ]; then \ From e50ccd4772cd15bf4ebe6ce1bc8ed0e93649c514 Mon Sep 17 00:00:00 2001 From: Ali Rasim Kocal Date: Thu, 2 Aug 2018 12:36:21 +0000 Subject: [PATCH 16/68] Remove volume mount for frontend This is not necessary anymore because the content is built-in the containner. When the volume is mounted anyway, the app won't run, because node-modules directory where the modules are installed in the previous build step will be overwritten. --- docker-compose.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index 9af83388..71304798 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -124,7 +124,6 @@ services: - kafka command: ./wait-for-it.sh postgres:5432 -t 300 -- ./wait-for-it.sh redis:6379 -t 300 -- ./wait-for-it.sh kafka:9092 -t 300 -- ./scripts/wait-for-heartbeat.sh backend websocket-server -- ./scripts/docker-start.sh volumes: - - ./oisp-frontend/public-interface:/app - ./data/keys:/app/keys environment: - VCAP_SERVICES=${VCAP_SERVICES} From d25f63f931e1b17ea9f2a16f78ff091715e1b909 Mon Sep 17 00:00:00 2001 From: Ali Rasim Kocal Date: Fri, 3 Aug 2018 10:55:59 +0200 Subject: [PATCH 17/68] Remove /app mount from nginx --- docker-compose.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index 71304798..2e175c5d 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -91,7 +91,6 @@ services: - frontend volumes: - ./data/keys/ssl:/etc/ssl - - ./oisp-frontend/public-interface:/app websocket-server: build: context: ./oisp-websocket-server From 89ca1364f066ebcbd089d8f9105b2d9db20b3cfe Mon Sep 17 00:00:00 2001 From: Ali Rasim Kocal Date: Thu, 2 Aug 2018 16:38:47 +0200 Subject: [PATCH 18/68] Remove volume mount from websocket-server The volume is no longer required as the source is moved into the container. --- docker-compose.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index 2e175c5d..b3974cae 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -100,7 +100,6 @@ services: - postgres - kafka volumes: - - ./oisp-websocket-server:/app - ./data/keys:/app/keys working_dir: /app environment: From 22a9958bb0e883368e744dbb22c2ee496a0f423f Mon Sep 17 00:00:00 2001 From: Marcel Wagner Date: Tue, 14 Aug 2018 12:26:59 +0000 Subject: [PATCH 19/68] New testDB setup. The test DB is now defined in setup-environment.sh. The test DB is created when postgres container is started the first time. This avoids the error when frontend tries to recreate the DB. Postgres get now a special variable POSTGRES_DB_REGULAR which is always the name of the production DB. Testdb name is now always 'test' --- Makefile | 1 - docker-compose.yml | 2 +- setup-environment.example.sh | 7 ++++++- 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index d04c5b6f..4ecb7654 100644 --- a/Makefile +++ b/Makefile @@ -157,7 +157,6 @@ test: export TEST := "1" test: @for ((i=0; i < ${NB_TESTS}; i++)) do \ cd $(CURRENT_DIR) && \ - sudo make distclean && \ make start-test && \ source ./tests/.env && cd tests && make test; \ done diff --git a/docker-compose.yml b/docker-compose.yml index b3974cae..62aed88c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -35,7 +35,7 @@ services: ports: - 5432:5432 environment: - - POSTGRES_DB=${POSTGRES_DB} + - POSTGRES_DB=${POSTGRES_DB_REGULAR} - POSTGRES_USER=${POSTGRES_USERNAME} - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} kafka: diff --git a/setup-environment.example.sh b/setup-environment.example.sh index 55278fd4..ff74729a 100755 --- a/setup-environment.example.sh +++ b/setup-environment.example.sh @@ -19,7 +19,12 @@ export ZOOKEEPER_KAFKA_PORT='2181' export ZOOKEEPER_HBASE='hbase' export ZOOKEEPER_HBASE_PORT='2181' export POSTGRES='postgres' -export POSTGRES_DB='iot' +export POSTGRES_DB_REGULAR="iot" +export POSTGRES_DB=${POSTGRES_DB_REGULAR} # postgres needs always regular DB name for initialization. It automatically initiates the test DB as well. +if [ "$TEST" = "1" ]; then + export POSTGRES_DB='test' + echo "Warning: Test database selected." +fi export POSTGRES_PORT='5432' export POSTGRES_USERNAME='postgres' export POSTGRES_PASSWORD='intel123' From cf692cd65b61d32dd5618dbd21d424072fe3cb24 Mon Sep 17 00:00:00 2001 From: Marcel Wagner Date: Tue, 14 Aug 2018 12:36:28 +0000 Subject: [PATCH 20/68] E2E-Test fix: Too many fails due to Email test timeouts, emails not retrieved in the right order, or not received at all. Solved by adding function to check whether new emails arrive before trying to retrieve it. Moved whole mails.js to Promises. Added function to consume/delete emails before processing next ones. Added function to download and delete all emails in box. Changed the Rule engine tests: email arrival is not checked. Added test for bulk checking whether all emails from rule-engine tests arrived --- tests/lib/helpers/mail.js | 236 +++++++++++++++++++----- tests/oisp-tests.js | 373 +++++++++++++++++++------------------- 2 files changed, 381 insertions(+), 228 deletions(-) diff --git a/tests/lib/helpers/mail.js b/tests/lib/helpers/mail.js index 1b332158..8655d1b0 100644 --- a/tests/lib/helpers/mail.js +++ b/tests/lib/helpers/mail.js @@ -21,64 +21,214 @@ var Imap = require('imap'); -function getEmailMessage(user, password, host, port, num, cb) { - if (!cb) { - throw "Callback required"; - } + +function removeRecentEmail(user, password, host, port){ + var imap = new Imap({ + user: user, + password: password, + host: host, + port: port, + connTimeout: 30000, + authTimeout: 20000, + tls: true, + tlsOptions: { rejectUnauthorized: false } + }); + + return new Promise(function(resolve,reject){ + imap.once('ready', function() { + imap.openBox('INBOX', false, function(err, box) { + if (!err){ + imap.seq.setFlags(1, '\\Deleted', function(err){ + if (err) reject(err); + imap.closeBox(true, function(err){ + if (err) reject(err); + resolve(); + }) + }) + } + else + reject(err); + }) + }); + imap.connect(); + }); +} + +function waitAndConsumeEmailMessage(user, password, host, port){ + return waitForNewEmail(user, password, host,port, 1) + .then(() => removeRecentEmail(user, password, host, port)); +} + +function waitForNewEmail(user, password, host, port, num){ + var newmail = 0; + var timeoutcount = 0; + const TIMEOUTS = 20; + var imap = new Imap({ user: user, password: password, host: host, port: port, + connTimeout: 30000, + authTimeout: 20000, tls: true, tlsOptions: { rejectUnauthorized: false } }); - imap.once('ready', function() { - imap.openBox('INBOX', true, function(err, box) { - if ( !err ) { - if ( num <= 0 ) { - num = box.messages.total; - } - var f = imap.seq.fetch(num, { - bodies: ['HEADER.FIELDS (TO)', '1'], - struct: true - }); - - f.on('message', function(msg, seqno) { - var buffer = ''; - msg.on('body', function(stream, info) { - - stream.on('data', function(chunk) { - buffer += chunk.toString('utf8'); - }); + return new Promise(function(resolve,reject){ + imap.once('ready', function(){ + check(); + function check(){ + imap.status('INBOX', function(err, box) { + if (box.messages.unseen >= num) { + resolve(box.messages.unseen); + } + else if (box.messages.unseen < num && timeoutcount < TIMEOUTS){ + setTimeout(check, 1000); + timeoutcount++; + } + else if (timeoutcount == TIMEOUTS) { + reject("Timeout") + } + }); + }; + }) + imap.connect(); + }) +} + +function getEmailMessage(user, password, host, port, num) { + + var imap = new Imap({ + user: user, + password: password, + host: host, + port: port, + connTimeout: 30000, + authTimeout: 20000, + tls: true, + tlsOptions: { rejectUnauthorized: false } + }); + + return new Promise(function(resolve, reject){ + imap.once('ready', function() { + imap.openBox('INBOX', false, function(err, box) { + if ( !err ) { + if ( num <= 0 ) { + num = box.messages.total; + } + var f = imap.seq.fetch(num, { + bodies: ['HEADER.FIELDS (TO)', '1'], + struct: true }); - msg.once('end', function() { - buffer = buffer.replace("<","<"); - buffer = buffer.replace(">",">"); - cb(null, buffer); - imap.closeBox(() => { - imap.destroy(); - imap.end(); - }); + f.on('message', function(msg, seqno) { + var buffer = ''; + msg.on('body', function(stream, info) { + + stream.on('data', function(chunk) { + buffer += chunk.toString('utf8'); + }); + }); + + msg.once('end', function() { + buffer = buffer.replace("<","<"); + buffer = buffer.replace(">",">"); + imap.seq.addFlags(num, '\\Deleted', () => + imap.closeBox(true, () => { + imap.destroy(); + imap.end(); + }) + ); + resolve(buffer); + }); }); - }); - } - else { - cb(err); - } - }); - }); + } + else { + reject(err); + } + }); + }); - imap.once('error', function(err) { - cb(err); + imap.once('error', function(err) { + reject(err); + }); + + imap.connect(); + }) +} + + +function getAllEmailMessages(user, password, host, port) { + + var imap = new Imap({ + user: user, + password: password, + host: host, + port: port, + connTimeout: 30000, + authTimeout: 20000, + tls: true, + tlsOptions: { rejectUnauthorized: false } }); - imap.connect(); + return new Promise(function(resolve, reject){ + imap.once('ready', function() { + imap.openBox('INBOX', false, function(err, box) { + if ( !err ) { + var num = Array.from(new Array(box.messages.total), (elem, index) => index + 1); + var f = imap.seq.fetch(num, { + bodies: ['HEADER.FIELDS (TO)', '1'], + struct: true + }); + var buffers = []; + f.on('message', function(msg, seqno) { + var buffer = ''; + msg.on('body', function(stream, info) { + stream.on('data', function(chunk) { + buffer += chunk.toString('utf8'); + }); + }); -} + msg.once('end', function() { + buffer = buffer.replace("<","<"); + buffer = buffer.replace(">",">"); + buffers.push(buffer); + imap.seq.addFlags(seqno, '\\Deleted', (err) => { + if (err){ + reject(err); + } + }); + //resolve(buffers); + }); + }); + f.once('error', (err) => { + reject(err); + }) + f.once('end', () => { + imap.closeBox(true, () => { + imap.destroy(); + imap.end(); + }) + resolve(buffers); + }) + } + else { + reject(err); + } + }); + }); + + imap.once('error', function(err) { + reject(err); + }); + imap.connect(); + }); +} module.exports ={ - getEmailMessage: getEmailMessage -} \ No newline at end of file + getEmailMessage: getEmailMessage, + getAllEmailMessages: getAllEmailMessages, + waitForNewEmail: waitForNewEmail, + waitAndConsumeEmailMessage: waitAndConsumeEmailMessage +} diff --git a/tests/oisp-tests.js b/tests/oisp-tests.js index 09bdda1e..71918dd1 100644 --- a/tests/oisp-tests.js +++ b/tests/oisp-tests.js @@ -702,89 +702,65 @@ describe("Creating rules ... \n".bold, function() { }).timeout(20000); }); -describe("Sending observations and checking rules ...\n".bold, function() { - - it('Shall send observation and check rules', function(done) { - assert.notEqual(componentId, null, "Invalid component id") - assert.notEqual(rules[switchOnCmdName].id, null, "Invalid switch-on rule id") - assert.notEqual(rules[switchOffCmdName].id, null, "Invalid switch-off rule id") - assert.notEqual(proxyConnector, null, "Invalid websocket proxy connector") - - var index = 0; - var nbActuations = 0; - - process.stdout.write(" "); - - for (var i = 0; i < temperatureValues.length; i++) { - temperatureValues[i].ts = null; - if (temperatureValues[i].expectedActuation != null) { - nbActuations++; - } - } - - var step = function(){ - index++; - - if (index == temperatureValues.length) { - process.stdout.write("\n"); - done(); - } else { - sendObservationAndCheckRules(index); - } - }; - - - helpers.connector.wsConnect(proxyConnector, deviceToken, deviceId, function(message) { - --nbActuations; - - var expectedActuationValue = temperatureValues[index].expectedActuation.toString(); - var componentParam = message.content.params.filter(function(param){ - return param.name == componentParamName; - }); - if(componentParam.length == 1) - { - var param = componentParam[0]; - var paramValue = param.value.toString(); - - if(paramValue == expectedActuationValue) - { - helpers.mail.getEmailMessage(imap_username, imap_password, imap_host, imap_port, -1, function(err, message) { - if (!err) { - var lines = message.toString().split("\n"); - var i; - for(i=0; i helpers.mail.getAllEmailMessages(imap_username, imap_password, imap_host, imap_port)) + .then( (messages) => { + var temperatureValuesCopy = temperatureValues.map( (elem) => elem); + messages.forEach( (message) => { + var lines = message.toString().split("\n"); + var i; + lines.forEach((line) => { + var reExecReason = /^- Reason:.*/; + if ( reExecReason.test(line) ) { + var reason = line.split(":")[1].trim(); + var index = temperatureValuesCopy.findIndex( (element) => { + return (reason == element.expectedEmailReason); + }) + temperatureValuesCopy.splice(index, 1); + } + }) + }) + assert.equal(temperatureValuesCopy.length, 3, "Received emails do not match expected emails sent from rule-engine"); + done(); + }).catch( (err) => {done(new Error("Error in Rule Engine Emails: ", err))}); + }).timeout(30 * 1000); + it('Shall check observation', function(done) { helpers.data.searchData(firstObservationTime, userToken, accountId, deviceId, componentId, function(err, data) { if (err) { @@ -995,11 +995,10 @@ describe("update rules and create draft rules ... \n".bold, function(){ }) }) -describe("Adding user and posting email ...\n".bold, function() { - - it("Shall add a new user and post email", function(done) { - assert.isNotEmpty(imap_username, "no email provided"); - assert.isNotEmpty(imap_password, "no password provided"); +describe("Adding user and posting email ...\n".bold, function() { + it("Shall add a new user and post email", function(done) { + assert.isNotEmpty(imap_username, "no email provided"); + assert.isNotEmpty(imap_password, "no password provided"); helpers.users.addUser(userToken, imap_username, imap_password ,function(err, response) { if (err) { done(new Error("Cannot create new user: " + err)); @@ -1010,46 +1009,43 @@ describe("Adding user and posting email ...\n".bold, function() { } }) }) - - it("Shall activate user with token", function(done) { - helpers.mail.getEmailMessage(imap_username, imap_password, imap_host, imap_port, -1, function(err, message) { - if (!err) { - var regexp = /token=\w*?\r/ - var activationline = regexp.exec(message.toString()); - var rawactivation = activationline[0].split("token=")[1].toString(); - var activationToken = rawactivation.replace(/\r/, '') - activationToken = activationToken.substring(2) - - if ( activationToken === null) { - done(new Error("Wrong email " + message )) - } - else { - assert.isString(activationToken,'activationToken is not string') - - helpers.users.activateUser(activationToken, function(err, response) { - if (err) { - done(new Error("Cannot activate user: " + err)); - } else { - assert.equal(response.status, 'OK', 'cannot activate user') - - helpers.auth.login(imap_username, imap_password, function(err, token) { - if (err) { - done(new Error("Cannot authenticate receiver: " + err)); - } else { - receiverToken = token; - done(); - } - }) - } - }); - } - } - else { - done(new Error("Wrong email " + err )) - } - }) - }).timeout( 60 * 1000); - + + it("Shall activate user with token", function(done) { + helpers.mail.waitForNewEmail(imap_username, imap_password, imap_host, imap_port, 1) + .then(() => helpers.mail.getEmailMessage(imap_username, imap_password, imap_host, imap_port, -1)) + .then(function(message) { + var regexp = /token=\w*?\r/; + var activationline = regexp.exec(message.toString()); + var rawactivation = activationline[0].split("token=")[1].toString(); + var activationToken = rawactivation.replace(/\r/, '') + activationToken = activationToken.substring(2) + + if ( activationToken === null) { + done(new Error("Wrong email " + message )) + } + else { + assert.isString(activationToken,'activationToken is not string') + + helpers.users.activateUser(activationToken, function(err, response) { + if (err) { + done(new Error("Cannot activate user: " + err)); + } else { + assert.equal(response.status, 'OK', 'cannot activate user') + + helpers.auth.login(imap_username, imap_password, function(err, token) { + if (err) { + done(new Error("Cannot authenticate receiver: " + err)); + } else { + receiverToken = token; + done(); + } + }) + } + }); + } + }).catch(function(err){done(new Error("No mail received: " + err))}); + }).timeout( 60 * 1000); + it('Shall create receiver account', function(done) { assert.notEqual(receiverToken, null, "Invalid user token") helpers.accounts.createAccount('receiver', receiverToken, function(err, response) { @@ -1068,18 +1064,20 @@ describe("Invite receiver ...\n".bold, function() { var inviteId = null; - it('Shall create invitation', function(done){ + it('Shall create invitation', function(done){ // a mail will be sent to receiver helpers.invitation.createInvitation(userToken, accountId, imap_username, function(err, response) { if (err) { done(new Error("Cannot create invitation: " + err)); - } else { - assert.equal(response.email, imap_username, 'send invite to wrong name') - done(); - } - }) - }) + } else { + assert.equal(response.email, imap_username, 'send invite to wrong name'); + helpers.mail.waitAndConsumeEmailMessage(imap_username, imap_password, imap_host, imap_port).then(function(message){ + done(); + }). catch(function(err){done(new Error("Mail not received: " + err))}); + } + }) + }).timeout( 30 * 1000); it('Shall get all invitations', function(done){ @@ -1094,7 +1092,7 @@ describe("Invite receiver ...\n".bold, function() { }) }) - it('Shall delete invitation and send again', function(done){ + it('Shall delete invitation and send again', function(done){ helpers.invitation.deleteInvitations(userToken, accountId, imap_username, function(err, response) { if (err) { done(new Error("Cannot delete invitation: " + err)); @@ -1103,15 +1101,17 @@ describe("Invite receiver ...\n".bold, function() { // when send invitation, the receiver will receive an email said he should login to accept the invitation if (err) { done(new Error("Cannot create invitation: " + err)); - } else { - assert.equal(response.email, imap_username, 'send invite to wrong name') - done(); - } - }) - } - }) - }) - + } else { + assert.equal(response.email, imap_username, 'send invite to wrong name'); + helpers.mail.waitAndConsumeEmailMessage(imap_username, imap_password, imap_host, imap_port).then(function(message){ + done(); + }) + } + }) + } + }) + }).timeout(30 * 1000); + it('Shall get specific invitations', function(done){ var getInvitation = function(cb){ helpers.auth.login(imap_username, imap_password, function(err, token) { @@ -1154,19 +1154,21 @@ describe("Invite receiver ...\n".bold, function() { }) }) - it('Shall request activation', function(done) { + it('Shall request activation', function(done) { var username = process.env.USERNAME; helpers.users.requestUserActivation(username, function(err, response) { if (err) { done(new Error('cannot request activation:' + err)); } else { - assert.equal(response.status, 'OK') - done(); + assert.equal(response.status, 'OK') + helpers.mail.waitAndConsumeEmailMessage(imap_username, imap_password, imap_host, imap_port).then(function(message){ + done(); + }) } }) - }) - + }).timeout( 30 * 1000); + it('Shall get id of receiver and change privilege', function (done) { helpers.auth.tokenInfo(receiverToken, function (err, response) { if (err) { @@ -1202,7 +1204,8 @@ describe("Invite receiver ...\n".bold, function() { describe("change password and delete receiver ... \n".bold, function(){ it('Shall request change receiver password', function(done) { - helpers.users.requestUserPasswordChange(imap_username, function(err, response) { + var username = process.env.USERNAME; + helpers.users.requestUserPasswordChange(username, function(err, response) { if (err) { done(new Error("Cannot request change password : " + err)); } else { @@ -1211,38 +1214,38 @@ describe("change password and delete receiver ... \n".bold, function(){ } }) }) - - it('Shall update receiver password', function(done) { - helpers.mail.getEmailMessage(imap_username, imap_password, imap_host, imap_port, -1, function(err, message) { - var regexp = /token=\w*?\r/ - var activationline = regexp.exec(message.toString()); - var rawactivation = activationline[0].split("token=")[1].toString(); - var activationToken = rawactivation.replace(/\r/, '') - activationToken = activationToken.substring(2) - - if ( activationToken === null) { - done(new Error("Wrong email " + message )) - } - else { - assert.isString(activationToken,'activationToken is not string') - - imap_password = 'Receiver12345' - - helpers.users.updateUserPassword(activationToken, imap_password, function(err, response) { - if (err) { - done(new Error("Cannot update receiver password : " + err)); - } else { - assert.equal(response.status, 'OK', 'status error') - done(); - } - }) - } - }); - }).timeout(2 * 60 * 1000); - + + it('Shall update receiver password', function(done) { + helpers.mail.waitForNewEmail(imap_username, imap_password, imap_host, imap_port, 1) + .then(() => helpers.mail.getEmailMessage(imap_username, imap_password, imap_host, imap_port, -1)) + .then(function(message) { + var regexp = /token=\w*?\r/; + var activationline = regexp.exec(message.toString()); + var rawactivation = activationline[0].split("token=")[1].toString(); + var activationToken = rawactivation.replace(/\r/, '') + activationToken = activationToken.substring(2) + + if ( activationToken === null) { + done(new Error("Wrong email " + message )) + } + else { + assert.isString(activationToken,'activationToken is not string') + var password = 'Receiver12345' + helpers.users.updateUserPassword(activationToken, password, function(err, response) { + if (err) { + done(new Error("Cannot update receiver password : " + err)); + } else { + assert.equal(response.status, 'OK', 'status error') + done(); + } + }) + } + }).catch(function(err){done(new Error("No mail received: " + err))}); + }).timeout(2 * 60 * 1000); + it('Shall change password', function(done) { var username = process.env.USERNAME; - var oldPasswd = process.env.PASSWORD; + var oldPasswd = "Receiver12345"; var newPasswd = 'oispnewpasswd2' helpers.users.changeUserPassword(userToken, username, oldPasswd, newPasswd, function(err, response) { From 151b04abb0b0011460daf86a5bebdbdad1f8e9dd Mon Sep 17 00:00:00 2001 From: Marcel Wagner Date: Sun, 2 Sep 2018 16:23:28 +0000 Subject: [PATCH 21/68] E2E-tests: Added statistics rule tests --- tests/Makefile | 5 +- tests/lib/helpers/alerts.js | 23 ++- tests/lib/helpers/control.js | 25 +++- tests/lib/helpers/mail.js | 2 +- tests/lib/helpers/rules.js | 70 +++++++++ tests/oisp-tests.js | 57 ++++++-- tests/subtests/promise-wrap.js | 182 +++++++++++++++++++++++ tests/subtests/statistic-rule-tests.js | 190 +++++++++++++++++++++++++ 8 files changed, 535 insertions(+), 19 deletions(-) create mode 100644 tests/subtests/promise-wrap.js create mode 100644 tests/subtests/statistic-rule-tests.js diff --git a/tests/Makefile b/tests/Makefile index 9bc54fe2..b6f8ef83 100644 --- a/tests/Makefile +++ b/tests/Makefile @@ -36,7 +36,10 @@ npm-install: test: npm-install @$(call msg,"Waiting for frontend"); - @. ../setup-environment.sh && until kafkacat -b localhost:9092 -t heartbeat -e 2> /dev/null | grep -m 1 "dashboard"; do sleep 1 ; done +# fill in separator to avoid accidentially dashboard being most recent heartbeat. +# Then look always for latest heartbeat + @ (for i in {1..4}; do echo new-test ; done) | kafkacat -P -b localhost:9092 -t heartbeat; + @. ../setup-environment.sh && until kafkacat -b localhost:9092 -t heartbeat -e -o -4 2> /dev/null | grep -m 1 "dashboard"; do sleep 1 ; done @$(call msg,"Resetting database ..."); @. ../setup-environment.sh && docker exec -i "$${COMPOSE_PROJECT_NAME}_frontend_1" node /app/admin resetDB diff --git a/tests/lib/helpers/alerts.js b/tests/lib/helpers/alerts.js index 0bc1a3c4..7d11b09d 100644 --- a/tests/lib/helpers/alerts.js +++ b/tests/lib/helpers/alerts.js @@ -149,10 +149,31 @@ function addCommentsToAlert(userToken, accountId, alertId, comments, cb) { }); } + +function deleteAlert(userToken, accountId, alertId){ + var data = { + userToken: userToken, + accountId: accountId, + alertId: alertId + } + + return new Promise((resolve, reject) => { + api.alerts.deleteAlert(data, function(err, response){ + if (err){ + reject(err); + } + else { + resolve(response); + } + }); + }); +} + module.exports = { getListOfAlerts: getListOfAlerts, getAlertDetails: getAlertDetails, closeAlert: closeAlert, updateAlertStatus: updateAlertStatus, - addCommentsToAlert: addCommentsToAlert + addCommentsToAlert: addCommentsToAlert, + deleteAlert: deleteAlert }; diff --git a/tests/lib/helpers/control.js b/tests/lib/helpers/control.js index 2909b75a..f839edc7 100644 --- a/tests/lib/helpers/control.js +++ b/tests/lib/helpers/control.js @@ -113,8 +113,29 @@ function saveComplexCommand(name, paramName, value, userToken, accountId, devic }); } + +function deleteComplexCommand(name, userToken, accountId) { + + var data = { + userToken: userToken, + accountId: accountId, + commandName: name, + }; + + return new Promise((resolve, reject) => { + api.control.deleteComplexCommand(data, function(err, response) { + if (err) { + reject(err); + } else { + resolve(response); + } + }); + }) +} + module.exports={ saveComplexCommand: saveComplexCommand, pullActuations: pullActuations, - sendActuationCommand: sendActuationCommand -} \ No newline at end of file + sendActuationCommand: sendActuationCommand, + deleteComplexCommand: deleteComplexCommand +} diff --git a/tests/lib/helpers/mail.js b/tests/lib/helpers/mail.js index 8655d1b0..070cc9ff 100644 --- a/tests/lib/helpers/mail.js +++ b/tests/lib/helpers/mail.js @@ -88,7 +88,7 @@ function waitForNewEmail(user, password, host, port, num){ timeoutcount++; } else if (timeoutcount == TIMEOUTS) { - reject("Timeout") + reject(new Error("Timeout")) } }); }; diff --git a/tests/lib/helpers/rules.js b/tests/lib/helpers/rules.js index 79445a76..da9ec315 100644 --- a/tests/lib/helpers/rules.js +++ b/tests/lib/helpers/rules.js @@ -117,6 +117,75 @@ function createRule(ruleConfig, userToken, accountId, deviceId, cb) { }; + api.rules.createRule(data, function(err, response) { + if (err) { + cb(err) + } else { + var ruleId = response.id; + var syncInterval = setInterval( function(id) { + getRuleDetails(userToken, accountId, ruleId, function(err, status) { + if (err) { + clearInterval(syncInterval); + cb(err) + } + else { + if ( status == true ) { + clearInterval(syncInterval); + cb(null, ruleId) + } + } + }) + }, 500) + } + }); + +} + +function createStatisticRule(ruleConfig, userToken, accountId, deviceId, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + + body: { + name: ruleConfig.name, + description: "OISP testing rule", + priority: "Medium", + type: "Regular", + status: "Active", + resetType: "Automatic", + synchronizationStatus: "NotSync", + + actions: ruleConfig.actions, + + population: { + ids: [deviceId], + attributes: null, + tags: null + }, + + conditions: { + operator: "OR", + values: [{ + component: { + dataType: "Number", + name: ruleConfig.conditionComponent, + cid: ruleConfig.cid + }, + type: "statistics", + values: [ruleConfig.statisticConditionValue.toString()], + operator: ruleConfig.statisticConditionOperator, + baselineCalculationLevel: "Device level", + baselineMinimalInstances: ruleConfig.statisticMinimalInstances, + baselineSecondsBack: ruleConfig.statisticSecondsBack + }] + } + } + }; + api.rules.createRule(data, function(err, response) { if (err) { cb(err) @@ -335,6 +404,7 @@ function deleteRule (userToken, accountId,ruleId , cb) { module.exports={ createRule:createRule, + createStatisticRule:createStatisticRule, getRules: getRules, getRuleDetails: getRuleDetails, updateRule: updateRule, diff --git a/tests/oisp-tests.js b/tests/oisp-tests.js index 71918dd1..362edb50 100644 --- a/tests/oisp-tests.js +++ b/tests/oisp-tests.js @@ -65,7 +65,7 @@ rules[switchOnCmdName] = { }, { type: "mail", - target: [ recipientEmail ] + target: [ emailRecipient ] } ], }; @@ -82,7 +82,7 @@ rules[switchOffCmdName] = { }, { type: "mail", - target: [ recipientEmail ] + target: [ emailRecipient ] } ], }; @@ -161,7 +161,18 @@ process.stdout.write("__________________________________________________________ process.stdout.write(" \n"); process.stdout.write(" OISP E2E TESTING \n".green.bold); process.stdout.write("_____________________________________________________________________\n".bold); - +//Callback for WSS +var cbManager = function(){ + + var wssCB = null; + return { + "cb": function(message){ + wssCB(message) + }, + "set": function(newCB){wssCB = newCB} + } +}(); + describe("Waiting for OISP services to be ready ...\n".bold, function() { @@ -208,7 +219,7 @@ describe("Waiting for OISP services to be ready ...\n".bold, function() { kafkaConsumer = new kafka.Consumer(kafkaClient, topics, options) - var oispServicesToMonitor = ['rules-engine']; + var oispServicesToMonitor = ['rules-engine']; process.stdout.write(" "); kafkaConsumer.on('message', function (message) { process.stdout.write(".".green); @@ -235,7 +246,7 @@ describe("Waiting for OISP services to be ready ...\n".bold, function() { }); } else { - done("Cannot get Kafka offset ") + done(new Error("Cannot get Kafka offset ")) } }); @@ -699,8 +710,8 @@ describe("Creating rules ... \n".bold, function() { } }) - }).timeout(20000); -}); + }).timeout(20000); +}); describe("Sending observations and checking rules ...\n".bold, function() { @@ -733,7 +744,7 @@ describe("Sending observations and checking rules ...\n".bold, function() { } }; - helpers.connector.wsConnect(proxyConnector, deviceToken, deviceId, function(message) { + cbManager.set(function(message) { --nbActuations; var expectedActuationValue = temperatureValues[index].expectedActuation.toString(); @@ -760,9 +771,10 @@ describe("Sending observations and checking rules ...\n".bold, function() { done(new Error("Did not find component param: " + componentParamName)) } }); + helpers.connector.wsConnect(proxyConnector, deviceToken, deviceId, cbManager.cb); var sendObservationAndCheckRules = function(index) { - + process.stdout.write(".".green); helpers.devices.submitData(temperatureValues[index].value, deviceToken, accountId, deviceId, componentId, function(err, ts) { @@ -809,7 +821,7 @@ describe("Sending observations and checking rules ...\n".bold, function() { }) assert.equal(temperatureValuesCopy.length, 3, "Received emails do not match expected emails sent from rule-engine"); done(); - }).catch( (err) => {done(new Error("Error in Rule Engine Emails: ", err))}); + }).catch( (err) => {done(err)}); }).timeout(30 * 1000); it('Shall check observation', function(done) { @@ -858,7 +870,24 @@ describe("Sending observations and checking rules ...\n".bold, function() { }).timeout(10000); }); - + +describe("Do statistics rule subtests ...".bold, + function() { + var test; + var descriptions = require("./subtests/statistic-rule-tests").descriptions; + it(descriptions.createStatisticsRules,function(done) { + test = require("./subtests/statistic-rule-tests").test(userToken, accountId, deviceId, deviceToken, cbManager); + test.createStatisticsRules(done); + }).timeout(10000) + it(descriptions.sendObservations,function(done) { + test.sendObservations(done); + }).timeout(50000); + it(descriptions.cleanup,function(done) { + test.cleanup(done); + }).timeout(10000); + }); + + describe("Geting and manage alerts ... \n".bold, function(){ it('Shall get list of alerts', function(done){ @@ -1043,7 +1072,7 @@ describe("Adding user and posting email ...\n".bold, function() { } }); } - }).catch(function(err){done(new Error("No mail received: " + err))}); + }).catch(function(err){done(err)}); }).timeout( 60 * 1000); it('Shall create receiver account', function(done) { @@ -1074,7 +1103,7 @@ describe("Invite receiver ...\n".bold, function() { assert.equal(response.email, imap_username, 'send invite to wrong name'); helpers.mail.waitAndConsumeEmailMessage(imap_username, imap_password, imap_host, imap_port).then(function(message){ done(); - }). catch(function(err){done(new Error("Mail not received: " + err))}); + }). catch(function(err){done(err)}); } }) }).timeout( 30 * 1000); @@ -1240,7 +1269,7 @@ describe("change password and delete receiver ... \n".bold, function(){ } }) } - }).catch(function(err){done(new Error("No mail received: " + err))}); + }).catch(function(err){done(err)}); }).timeout(2 * 60 * 1000); it('Shall change password', function(done) { diff --git a/tests/subtests/promise-wrap.js b/tests/subtests/promise-wrap.js new file mode 100644 index 00000000..60c5d8da --- /dev/null +++ b/tests/subtests/promise-wrap.js @@ -0,0 +1,182 @@ +/** + * Copyright (c) 2017 Intel Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/*jshint esversion: 6 */ +/*jshint undef: true, unused: true */ +/* jshint node: true */ + +"use strict"; + + +var helpers = require("../lib/helpers"); + + +var checkObservations = function(tempValues, cid, cbManager, deviceToken, accountId, deviceId, componentParamName){ + var firstObservationTime; + return new Promise((resolve, reject) => { + var index = 0; + var nbActuations = 0; + process.stdout.write(" "); + + tempValues + .forEach( (value) => { + value.ts = null; + if (value.expectedActuation !== null) { + nbActuations++; + } + }); + var step; + var sendObservationAndCheckRules = function(index) { + process.stdout.write(".".green); + helpers.devices.submitData(tempValues[index].value, deviceToken, accountId, deviceId, cid, function(err, ts) { + tempValues[index].ts = ts; + + if (index === 0) { + firstObservationTime = tempValues[index].ts; + } + if (err) { + reject(err); + } + + if (tempValues[index].expectedActuation === null) { + step(); + } + }); + }; + step = function(){ + index++; + if (index === tempValues.length) { + process.stdout.write("\n"); + resolve(); + } else { + sendObservationAndCheckRules(index); + } + }; + + var actuationCallback = function(message) { + --nbActuations; + var expectedActuationValue = tempValues[index].expectedActuation.toString(); + var componentParam = message.content.params.filter(function(param){ + return param.name === componentParamName; + }); + if(componentParam.length === 1) + { + var param = componentParam[0]; + var paramValue = param.value.toString(); + + if(paramValue === expectedActuationValue) + { + step(); + } + else + { + reject(new Error("Param value wrong. Expected: " + expectedActuationValue + " Received: " + paramValue)); + } + } + else + { + reject(new Error("Did not find component param: " + componentParamName)); + } + }; + cbManager.set(actuationCallback); + + + sendObservationAndCheckRules(index); + }); +}; + +var addComponent = (componentName, componentType, deviceToken, accountId, deviceId) => { + return new Promise(function(resolve, reject){ + helpers.devices.addDeviceComponent(componentName, componentType, deviceToken, accountId, deviceId, function(err, id) { + if (err) { + reject(err); + } else { + resolve(id); + } + }); + }); +}; +var addActuator = (actuatorName, actuatorType, deviceToken, accountId, deviceId) => { + return new Promise(function(resolve, reject){ + helpers.devices.addDeviceComponent(actuatorName, actuatorType, deviceToken, accountId, deviceId, function(err, id) { + if (err) { + reject(err); + } else { + resolve(id); + } + }); + }); +}; +var createCommand = (cmdName, componentParamName, onOff, userToken, accountId, deviceId, actuatorId) => { + return new Promise(function(resolve, reject){ + helpers.control.saveComplexCommand(cmdName, componentParamName, onOff, userToken, accountId, deviceId, actuatorId, function(err,response) { + if (err) { + reject(err); + } else { + if (response.status !== 'OK') { + reject(new Error("Wrong status: " + response.status)); + } + else { + resolve(); + } + } + }); + }); +}; +var createStatisticRule = (rule, userToken, accountId, deviceId) => { + return new Promise(function(resolve, reject){ + helpers.rules.createStatisticRule(rule, userToken, accountId, deviceId, function(err, id) { + if (err) { + reject(err); + } else { + rule.id = id; + resolve(); + } + }); + }); +}; + +var deleteComponent = function(userToken, accountId, deviceId, componentId){ + return new Promise((resolve, reject) => { + helpers.devices.deleteDeviceComponent (userToken, accountId, deviceId, componentId, function(err, response){ + if (err) { + reject(err); + } else { + resolve(response); + } + }); + }); +}; +var deleteRule = function(userToken, accountId, ruleId){ + return new Promise((resolve, reject) => { + helpers.rules.deleteRule (userToken, accountId, ruleId, function(err, response){ + if (err) { + reject(err); + } else { + resolve(response); + } + }); + }); +}; + +module.exports = { + checkObservations: checkObservations, + addComponent: addComponent, + addActuator: addActuator, + createCommand: createCommand, + createStatisticRule: createStatisticRule, + deleteComponent: deleteComponent, + deleteRule: deleteRule +}; diff --git a/tests/subtests/statistic-rule-tests.js b/tests/subtests/statistic-rule-tests.js new file mode 100644 index 00000000..57d4e986 --- /dev/null +++ b/tests/subtests/statistic-rule-tests.js @@ -0,0 +1,190 @@ +/** + * Copyright (c) 2018 Intel Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/*jshint esversion: 6 */ +/*jshint undef: true, unused: true */ +/* jshint node: true */ + +"use strict"; + +var test = function(userToken, accountId, deviceId, deviceToken, cbManager) { + var chai = require('chai'); + var assert = chai.assert; + var helpers = require("../lib/helpers"); + var componentName = "temperature-sensor-srt"; + var componentType = "temperature.v1.0"; + var actuatorName = "powerswitch-actuator-srt"; + var actuatorType = "powerswitch.v1.0"; + var switchOnCmdName = "switch-on-srt"; + var switchOffCmdName = "switch-off-srt"; + var promtests = require('./promise-wrap'); + var rules = []; + var componentId; + var actuatorId; + var componentParamName = "LED"; + + rules[switchOnCmdName] = { + name: "oisp-tests-rule-statistic-2stddef", + conditionComponent: componentName, + statisticConditionOperator: ">=", + statisticConditionValue: "2", //2*stddev + statisticMinimalInstances: 9, + statisticSecondsBack: 60, //number of seconds to look back when collecting the data + actions: [ + { + type: "actuation", + target: [ switchOnCmdName ] + } + ], + }; + + rules[switchOffCmdName] = { + name: "oisp-tests-rule-statistic-3stddef", + conditionComponent: componentName, + statisticConditionOperator: "<", + statisticConditionValue: "-3", //-3*stddev + statisticMinimalInstances: 9, + statisticSecondsBack: 60, //number of seconds to look back when collecting the relevant data + actions: [ + { + type: "actuation", + target: [ switchOffCmdName ] + } + ], + }; + var temperatureValues = [ + { + value: 17.1, + expectedActuation: null + }, + { + value: 17.0, + expectedActuation: null, + }, + { + value: 17.9, + expectedActuation: null, + }, + { + value: 17.5, + expectedActuation: null, + }, + { + value: 18.1, + expectedActuation: null, + }, + { + value: 16.8, + expectedActuation: null, + }, + { + value: 17.3, + expectedActuation: null, + }, + { + value: 16.9, + expectedActuation: null + }, + { + value: 17, + expectedActuation: null, + }, + { + value: 18.6, + expectedActuation: 1 // switch on + }, + { + value: 15.0, + expectedActuation: null + }, + { + value: 10.0, + expectedActuation: 0 //switch off + } + ]; + + + //********************* Main Object *****************// + //---------------------------------------------------// + return { + "createStatisticsRules": function(done) { + //To be independent of main tests, own sensors, actuators, and commands have to be created + promtests.addComponent(componentName, componentType, deviceToken, accountId, deviceId) + .then((id) => {componentId = id; rules[switchOffCmdName].cid = componentId; rules[switchOnCmdName].cid = componentId;}) + .then(() => promtests.addActuator(actuatorName, actuatorType, deviceToken, accountId, deviceId)) + .then((id) => {actuatorId = id;}) + .then(() => promtests.createCommand(switchOffCmdName, componentParamName, 0, userToken, accountId, deviceId, actuatorId)) + .then(() => promtests.createCommand(switchOnCmdName, componentParamName, 1, userToken, accountId, deviceId, actuatorId)) + .then(() => promtests.createStatisticRule(rules[switchOffCmdName], userToken, accountId, deviceId)) + .then(() => promtests.createStatisticRule(rules[switchOnCmdName], userToken, accountId, deviceId)) + .then(() => {done();}) + .catch((err) => {done(err);}); + }, + "sendObservations": function(done){ + assert.notEqual(componentId, null, "CommponentId not defined"); + assert.notEqual(rules[switchOnCmdName].id, null, "Rule not defined"); + assert.notEqual(rules[switchOffCmdName].id, null, "Rule not defined"); + assert.notEqual(cbManager, null, "cbManager proxy not defined"); + assert.notEqual(deviceToken, null, "Device Token not defined"); + assert.notEqual(deviceId, null, "DeviceId not defined"); + + promtests.checkObservations(temperatureValues, rules[switchOnCmdName].cid, cbManager, deviceToken, accountId, deviceId, componentParamName) + .then(() => {done();}) + .catch((err) => { done(err);}); + }, + "cleanup": function(done){ + var getListOfAlerts = function(userToken, accountId){ + return new Promise((resolve, reject) => { + helpers.alerts.getListOfAlerts(userToken, accountId, function(err, response) { + if (err) { + reject(err); + } else { + resolve(response); + } + }); + }); + }; + //delete 2 most recent alerts + //delete new components + promtests.deleteComponent(userToken, accountId, deviceId, componentId) + .then(() => promtests.deleteComponent(userToken, accountId, deviceId, actuatorId)) + //delete new commands + .then(() => helpers.control.deleteComplexCommand(switchOnCmdName, userToken, accountId)) + .then(() => helpers.control.deleteComplexCommand(switchOffCmdName, userToken, accountId)) + //delete Statistic Rules + .then(() => promtests.deleteRule(userToken, accountId, rules[switchOnCmdName].id)) + .then(() => promtests.deleteRule(userToken, accountId, rules[switchOffCmdName].id)) + .then(() => getListOfAlerts(userToken, accountId)) + .then((alerts) => { + var prm1 = helpers.alerts.deleteAlert(userToken, accountId, alerts[0].alertId); + var prm2 = helpers.alerts.deleteAlert(userToken, accountId, alerts[1].alertId); + return Promise.all([prm1, prm2]); + }) + .then(() => {done();}) + .catch((err) => {done(err);}); + } + }; +}; + +var descriptions = { + "createStatisticsRules": "Shall create statisics rules and wait for synchronization with RE", + "sendObservations": "Shall send observations and trigger event for statistics rules", + "cleanup": "Cleanup components, commands, rules created for subtest" +}; + +module.exports = { + test: test, + descriptions: descriptions +}; From e9464eab77540affa2f1a6e6ba95e033828568e0 Mon Sep 17 00:00:00 2001 From: Ali Rasim Kocal Date: Fri, 7 Sep 2018 06:57:55 +0200 Subject: [PATCH 22/68] Major changes to docker-compose and makefile * Removed docker-compose-quick, a single file is used for pulling and manually building. * Refactored Makefile heavily, and added documentation * Moved to a new version of docker-compose in platform setup * Made the matching changes to setup-environment.example.sh --- .gitignore | 3 +- Makefile | 170 ++++++++++++++++------------ docker-compose-quick.yml | 132 --------------------- docker-compose.yml | 35 +++++- docker-postgres/Dockerfile | 3 +- docker.sh | 6 + platform-setup/setup-ubuntu16.04.sh | 2 +- setup-environment.example.sh | 2 + 8 files changed, 142 insertions(+), 211 deletions(-) delete mode 100644 docker-compose-quick.yml diff --git a/.gitignore b/.gitignore index d278eaa3..787397fe 100644 --- a/.gitignore +++ b/.gitignore @@ -5,4 +5,5 @@ data config-backup/ .firstRun tests/.env -tests/package-lock.json \ No newline at end of file +tests/package-lock.json +data-backup/ \ No newline at end of file diff --git a/Makefile b/Makefile index 4ecb7654..ef66c439 100644 --- a/Makefile +++ b/Makefile @@ -25,8 +25,12 @@ BRANCH:=$(shell git branch| grep \*| cut -d ' ' -f2) export TEST = 0 export HOST_IP_ADDRESS=$(shell ifconfig docker0 | sed -En 's/.*inet (addr:)?(([0-9]*\.){3}[0-9]*).*/\2/p') SSL_CERT_PATH:=data/keys/ssl + COMPOSE_PROJECT_NAME?="oisp" -DOCKER_REGISTRY=localhost:5000/ +DOCKER_REGISTRY="" +CONTAINERS?="" +DOCKER_COMPOSE_ARGS?="" +PULL_IMAGES?=false .init: @$(call msg,"Initializing ..."); @@ -89,48 +93,59 @@ endif fi; @touch $@ +## build: Build OISP images locally. +## You can specify a version using the $DOCKER_TAG argument. +## $CONTAINERS arg (as whitespace seperated list) specifies which containers should be built, +## if it is left blank, all containers will be built. +## This command also accepts the argument $DOCKER_COMPOSE_ARGS, which is passed directly to compose. +## build: .init - @$(call msg,"Building IoT connector ..."); - @./docker.sh create - -build-force: .init - @$(call msg,"Building IoT connector ..."); - @./docker.sh create --force-recreate - -build-quick: .init - @$(call msg,"Building IoT connector by pulling images..."); - @./docker.sh -f docker-compose-quick.yml create - -ifeq (start,$(firstword $(MAKECMDGOALS))) - CMD_ARGS := $(wordlist 2,$(words $(MAKECMDGOALS)),$(MAKECMDGOALS)) - $(eval $(CMD_ARGS):;@:) -endif - -start: build - @$(call msg,"Starting IoT connector ..."); - @./docker.sh up -d $(CMD_ARGS) + @$(call msg,"Building OISP containers"); + @./docker.sh build $(DOCKER_COMPOSE_ARGS) $(CONTAINERS) + +## pull: Pull OISP containers from dockerhub. Requires docker login. +## You can specify a version using the $DOCKER_TAG argument. +## $CONTAINERS arg (as whitespace seperated list) specifies which containers should be pulled, +## if it is left blank, all containers will be pulled. +## This command also accepts the argument $DOCKER_COMPOSE_ARGS, which is passed directly to compose. +## +pull: .init + @$(call msg, "Pulling OISP containers"); + @./docker.sh pull $(DOCKER_COMPOSE_ARGS) $(CONTAINERS) + +## start: Start OISP. +## You can specify a version using the $DOCKER_TAG argument. +## If the images are not present, this will try to pull or build them, depending on the +## value of $PULL_IMAGES (true/false; defaults to false) +## $CONTAINERS arg (as whitespace seperated list) specifies which containers should be started, +## if it is left blank, all containers will be started. +## This command also accepts the argument $DOCKER_COMPOSE_ARGS, which is passed directly to compose. +## +start: + @$(call msg,"Starting OISP"); + @if [ "${PULL_IMAGES}" = "true" ]; then make pull; fi; + @./docker.sh up -d $(DOCKER_COMPOSE_ARGS) $(CONTAINERS) start-test: export TEST := "1" -start-test: build - @$(call msg,"Starting IoT connector (test mode) ..."); +start-test: + @$(call msg,"Starting OISP (test mode)"); @make -C tests email-account $(shell pwd)/tests/.env @source ./tests/.env && ./docker.sh up -d -start-quick: build-quick - @$(call msg,"Starting IoT connector using pulled images..."); - @./docker.sh -f docker-compose-quick.yml up -d $(CMD_ARGS) - -ifeq (stop,$(firstword $(MAKECMDGOALS))) - CMD_ARGS := $(wordlist 2,$(words $(MAKECMDGOALS)),$(MAKECMDGOALS)) - $(eval $(CMD_ARGS):;@:) -endif - +## stop: Stop running OISP containers. +## $CONTAINERS arg (as whitespace seperated list) specifies which containers should be stopped, +## if it is left blank, all containers will be stopped. +## This command also accepts the argument $DOCKER_COMPOSE_ARGS, which is passed directly to compose. +## stop: - @$(call msg,"Stopping IoT connector ..."); - @./docker.sh stop $(CMD_ARGS) + @$(call msg,"Stopping OISP containers"); + @./docker.sh stop $(DOCKER_COMPOSE_ARGS) $(CONTAINERS) +## update: Update all subrepositories to latest origin/develop +## For competabilty, this will also backup and remove setup-environment.sh +## update: distclean - @$(call msg,"Git Update (dev only) ..."); + @$(call msg,"Git Update (dev only)"); @git pull @if [ -f setup-environment.sh ]; then \ mv setup-environment.sh config-backup/setup-environment-$$(date +%Y-%m-%d-%H%M%S).sh.bak; \ @@ -140,49 +155,47 @@ update: distclean @git submodule foreach git fetch origin @git submodule foreach git checkout origin/develop +## docker-clean: Remove all docker images and containers. +## This also includes non-oisp containers. +## docker-clean: - @$(call msg,"Removing docker images and containers ..."); - @make remove - - -ifeq (test,$(firstword $(MAKECMDGOALS))) - NB_TESTS := $(wordlist 2,$(words $(MAKECMDGOALS)),$(MAKECMDGOALS)) -ifeq ($(NB_TESTS),) - NB_TESTS := 1 -endif - $(eval $(NB_TESTS):;@:) -endif - + @$(call msg,"Removing all docker images and containers ..."); + @read -r -p "Continue? [y/N]: " response; \ + case $$response in \ + [Yy]* ) ./docker.sh stop $(docker ps -a -q); \ + ./docker.sh rm -f $(docker ps -a -q); \ + /bin/bash -c "docker images -q | xargs -n 1 -I {} docker rmi -f {}";; \ + [Nn]* ) echo "Not removing containers";; \ + esac \ + +## test: Run OISP E2E tests. +## test: export TEST := "1" -test: - @for ((i=0; i < ${NB_TESTS}; i++)) do \ - cd $(CURRENT_DIR) && \ - make start-test && \ - source ./tests/.env && cd tests && make test; \ - done - - -ifeq (remove,$(firstword $(MAKECMDGOALS))) - CMD_ARGS := $(wordlist 2,$(words $(MAKECMDGOALS)),$(MAKECMDGOALS)) - $(eval $(CMD_ARGS):;@:) -endif +test: .init + make start-test && source ./tests/.env && cd tests && make test; +## remove: Remove all OISP images from local machine. +## $CONTAINERS arg (as whitespace seperated list) specifies which containers should be removed, +## if it is left blank, all containers will removed. +## remove: - @$(call msg,"Removing $(CMD_ARGS) ..."); -ifeq ($(CMD_ARGS),) - @./docker.sh stop $(docker ps -a -q); - @./docker.sh rm -f $(docker ps -a -q); - @/bin/bash -c "docker images -q | xargs -n 1 -I {} docker rmi -f {}" - + @$(call msg,"Removing $(CONTAINERS)") +ifeq ($(CONTAINERS),"") + make stop;\ + $(eval images:=$(shell docker images --filter label=oisp -q)) \ + docker rmi -f $(images) else - @$(foreach container,$(CMD_ARGS), \ + @$(foreach container,$(CONTAINERS), \ + echo $container; \ ./docker.sh stop $(container); \ ./docker.sh rm -f -v $(container); \ docker images | grep "^.*$(container)" | awk '{print $$3}' | xargs -n 1 -I {} docker rmi -f {}; \ ) - endif - +endif +## logs: Create a .zip archive containing logs from all containers. +## The result will be saved in platform-lancuher-logs_{data}.zip +## logs: $(eval LOGS_ARCHIVE := platform-launcher-logs_$(shell date +'%Y-%m-%d_%H-%M-%S')) @$(call msg,"Making one archive file with all the containers logs within ( ./$(LOGS_ARCHIVE).zip ) ..."); @@ -191,6 +204,9 @@ logs: @cd /tmp && zip -q -r ./$(LOGS_ARCHIVE).zip $(LOGS_ARCHIVE)/*.logs @mv /tmp/$(LOGS_ARCHIVE).zip . + +## clean: Remove .init, forcing a clean-up on the next run. +## clean: @$(call msg,"Cleaning ..."); @rm -f .init @@ -203,13 +219,19 @@ distclean: clean sudo rm -rf ./data; \ fi -push-docker-images: - source setup-environment.sh && \ - for img in $$(docker images | grep -oEi $$COMPOSE_PROJECT_NAME"_(\S*)" | cut -d _ -f 2); do \ - docker tag $${COMPOSE_PROJECT_NAME}_$${img} ${DOCKER_REGISTRY}$${COMPOSE_PROJECT_NAME}/$${img}; \ - docker push ${DOCKER_REGISTRY}$${COMPOSE_PROJECT_NAME}/$${img}; \ - done - +## push images: Push containers to dockerhub. +## This obviously requires docker login, +## $CONTAINERS arg selects which containers (whitespace seperated list) should be pushed, +## if left empty, all containers will be pushed. +## +push-images: + @$(call msg,"Pushing docker images to registry"); + @./docker.sh push $(CONTAINERS) + +## help: Show this help message +## +help: + @grep "^##" Makefile | cut -c4- #---------------------------------------------------------------------------------------------------------------------- # helper functions diff --git a/docker-compose-quick.yml b/docker-compose-quick.yml deleted file mode 100644 index 7f56c756..00000000 --- a/docker-compose-quick.yml +++ /dev/null @@ -1,132 +0,0 @@ -# -# Copyright (c) 2017 Intel Corporation -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -version: '3' - -services: - hbase: - image: "${DOCKER_REGISTRY}${COMPOSE_PROJECT_NAME}/hbase" - ports: - - 2181:2181 - - 9090:9090 - - 60000:60000 - - 60020:60020 - volumes: - - ./data/hbase-logs:/opt/hbase/logs - postgres: - image: "${DOCKER_REGISTRY}${COMPOSE_PROJECT_NAME}/postgres" - volumes: - - ./data/postgres:/var/lib/postgresql/data - ports: - - 5432:5432 - environment: - - POSTGRES_DB=${POSTGRES_DB} - - POSTGRES_USER=${POSTGRES_USERNAME} - - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} - kafka: - image: "${DOCKER_REGISTRY}${COMPOSE_PROJECT_NAME}/kafka" - ports: - - 9092:9092 - environment: - - ZK_CONNECT=hbase:2181 - depends_on: - - hbase - environment: - - ADVERTISED_HOST=${HOST_IP_ADDRESS} - - ADVERTISED_PORT=9092 - - AUTO_CREATE_TOPICS=true - gearpump: - image: "${DOCKER_REGISTRY}${COMPOSE_PROJECT_NAME}/gearpump" - depends_on: - - nginx - - hbase - - kafka - - frontend - working_dir: /app - environment: - - VCAP_SERVICES=${VCAP_SERVICES} - - VCAP_APPLICATION=${VCAP_APPLICATION} - - KAFKA=${KAFKA} - - KAFKA_HEARTBEAT_TOPIC=${KAFKA_HEARTBEAT_TOPIC} - - GEARPUMP=${GEARPUMP} - ports: - - 8090:8090 - command: bash bootstrap.sh - redis: - image: redis:3.0 - volumes: - - ./data/redis:/data - ports: - - 6379:6379 - nginx: - image: "${DOCKER_REGISTRY}${COMPOSE_PROJECT_NAME}/nginx" - ports: - - 80:80 - - 443:443 - depends_on: - - websocket-server - - frontend - volumes: - - ./data/keys/ssl:/etc/ssl - - ./oisp-frontend/public-interface:/app - websocket-server: - image: "${DOCKER_REGISTRY}${COMPOSE_PROJECT_NAME}/websocket-server" - ports: - - 5000:5000 - depends_on: - - postgres - - kafka - volumes: - - ./oisp-websocket-server:/app - - ./data/keys:/app/keys - working_dir: /app - environment: - - VCAP_SERVICES=${VCAP_SERVICES} - - VCAP_APPLICATION=${VCAP_APPLICATION} - - TEST=${TEST} - - NODE_ENV=local - command: ./wait-for-it.sh postgres:5432 -t 300 -- ./wait-for-it.sh kafka:9092 -t 300 -- ./scripts/docker-start.sh - frontend: - image: "${DOCKER_REGISTRY}${COMPOSE_PROJECT_NAME}/frontend" - ports: - - 4001:4001 - - depends_on: - - postgres - - redis - - websocket-server - - backend - - kafka - command: ./wait-for-it.sh postgres:5432 -t 300 -- ./wait-for-it.sh redis:6379 -t 300 -- ./wait-for-it.sh kafka:9092 -t 300 -- ./scripts/wait-for-heartbeat.sh backend websocket-server -- ./scripts/docker-start.sh - volumes: - - ./oisp-frontend/public-interface:/app - - ./data/keys:/app/keys - environment: - - VCAP_SERVICES=${VCAP_SERVICES} - - VCAP_APPLICATION=${VCAP_APPLICATION} - - TEST=${TEST} - - NODE_ENV=local - backend: - image: "${DOCKER_REGISTRY}${COMPOSE_PROJECT_NAME}/backend" - ports: - - 8080:8080 - depends_on: - - hbase - - kafka - command: ./wait-for-it.sh hbase:2181 -t 300 -- ./wait-for-it.sh kafka:9092 -t 300 -- make runjar - working_dir: /app - environment: - - VCAP_SERVICES=${VCAP_SERVICES} - - VCAP_APPLICATION=${VCAP_APPLICATION} diff --git a/docker-compose.yml b/docker-compose.yml index 62aed88c..481e45ca 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -13,12 +13,16 @@ # See the License for the specific language governing permissions and # limitations under the License. -version: '3' +version: '3.7' services: hbase: + image: oisp/hbase:${DOCKER_TAG} build: context: ./docker-hbase/ + labels: + - oisp=true + - oisp.git_commit=${GIT_COMMIT_PLATFORM_LAUNCHER} ports: - 2181:2181 - 9090:9090 @@ -27,9 +31,13 @@ services: volumes: - ./data/hbase-logs:/opt/hbase/logs postgres: + image: oisp/postgres:${DOCKER_TAG} build: context: . dockerfile: docker-postgres/Dockerfile + labels: + - oisp=true + - oisp.git_commit=${GIT_COMMIT_PLATFORM_LAUNCHER} volumes: - ./data/postgres:/var/lib/postgresql/data ports: @@ -39,8 +47,12 @@ services: - POSTGRES_USER=${POSTGRES_USERNAME} - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} kafka: + image: oisp/kafka:${DOCKER_TAG} build: context: ./docker-kafka/ + labels: + - oisp=true + - oisp.git_commit=${GIT_COMMIT_PLATFORM_LAUNCHER} ports: - 9092:9092 environment: @@ -59,9 +71,13 @@ services: ports: - 6379:6379 gearpump: + image: oisp/gearpump:${DOCKER_TAG} build: context: ./oisp-gearpump-rule-engine dockerfile: Dockerfile + labels: + - oisp=true + - oisp.git_commit=${GIT_COMMIT_GEARPUMP} depends_on: - nginx - hbase @@ -78,11 +94,15 @@ services: - 8090:8090 command: bash wait-for-it.sh kafka:9092 -t 300 -- bash bootstrap.sh nginx: + image: oisp/nginx:${DOCKER_TAG} build: context: ./oisp-frontend/public-interface/nginx args: - WEBSOCKET_SERVER=websocket-server:5000 - DASHBOARD_SERVER=frontend:4001 + labels: + - oisp=true + - oisp.git_commit=${GIT_COMMIT_FRONTEND} ports: - 80:80 - 443:443 @@ -92,8 +112,12 @@ services: volumes: - ./data/keys/ssl:/etc/ssl websocket-server: + image: oisp/websocket-server:${DOCKER_TAG} build: context: ./oisp-websocket-server + labels: + - oisp=true + - oisp.git_commit=${GIT_COMMIT_WEBSOCKET_SERVER} ports: - 5000:5000 depends_on: @@ -109,11 +133,14 @@ services: - NODE_ENV=local command: ./wait-for-it.sh postgres:5432 -t 300 -- ./wait-for-it.sh kafka:9092 -t 300 -- ./scripts/docker-start.sh frontend: + image: oisp/frontend:${DOCKER_TAG} build: context: ./oisp-frontend/public-interface + labels: + - oisp=true + - ois.git_commit=${GIT_COMMIT_FRONTEND} ports: - 4001:4001 - depends_on: - postgres - redis @@ -129,8 +156,12 @@ services: - TEST=${TEST} - NODE_ENV=local backend: + image: oisp/backend:${DOCKER_TAG} build: context: ./oisp-backend + labels: + - oisp=true + - oisp.git_commit=${GIT_COMMIT_BACKEND} ports: - 8080:8080 depends_on: diff --git a/docker-postgres/Dockerfile b/docker-postgres/Dockerfile index fa40c49c..8fa7c877 100644 --- a/docker-postgres/Dockerfile +++ b/docker-postgres/Dockerfile @@ -1,4 +1,5 @@ FROM postgres:9.4-alpine COPY ./oisp-frontend/public-interface/deploy/postgres/base/*.sql /docker-entrypoint-initdb.d/ -COPY ./oisp-frontend/public-interface/scripts/database/create_test_database.sh /docker-entrypoint-initdb.d/ \ No newline at end of file +COPY ./oisp-frontend/public-interface/scripts/database/create_test_database.sh /docker-entrypoint-initdb.d/ + diff --git a/docker.sh b/docker.sh index cc62fd02..3180446f 100755 --- a/docker.sh +++ b/docker.sh @@ -17,6 +17,12 @@ # Set env for application . ./setup-environment.sh +export GIT_COMMIT_PLATFORM_LAUNCHER=$(git rev-parse HEAD) +export GIT_COMMIT_FRONTEND=$(git -C oisp-frontend rev-parse HEAD) +export GIT_COMMIT_GEARPUMP=$(git -C oisp-gearpump-rule-engine rev-parse HEAD) +export GIT_COMMIT_WEBSOCKET_SERVER=$(git -C oisp-websocket-server rev-parse HEAD) +export GIT_COMMIT_BACKEND=$(git -C oisp-backend rev-parse HEAD) + redsocks_container_name='redsocks' if [ -n "$http_proxy" ] && [ -n "$https_proxy" ] && [ "$1" == "up" ]; then diff --git a/platform-setup/setup-ubuntu16.04.sh b/platform-setup/setup-ubuntu16.04.sh index 9f18dbf2..cd2305e4 100755 --- a/platform-setup/setup-ubuntu16.04.sh +++ b/platform-setup/setup-ubuntu16.04.sh @@ -19,7 +19,7 @@ sudo apt-get update sudo apt-get install docker-ce #install docker-compose -sudo curl -L https://github.com/docker/compose/releases/download/1.14.0/docker-compose-`uname -s`-`uname -m` -o /usr/local/bin/docker-compose +sudo curl -L https://github.com/docker/compose/releases/download/1.22.0/docker-compose-`uname -s`-`uname -m` -o /usr/local/bin/docker-compose sudo chmod +x /usr/local/bin/docker-compose #other packages needed diff --git a/setup-environment.example.sh b/setup-environment.example.sh index ff74729a..ae4bb3b4 100755 --- a/setup-environment.example.sh +++ b/setup-environment.example.sh @@ -14,6 +14,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +export DOCKER_TAG=${DOCKER_TAG:-'latest'} + export ZOOKEEPER_KAFKA='kafka' export ZOOKEEPER_KAFKA_PORT='2181' export ZOOKEEPER_HBASE='hbase' From fca79c806e858ef7fe727360bb6c85abc3b09339 Mon Sep 17 00:00:00 2001 From: ogz Date: Fri, 7 Sep 2018 14:46:17 +0300 Subject: [PATCH 23/68] Add invitation test: Do not accept a non existing invitation --- tests/oisp-tests.js | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/tests/oisp-tests.js b/tests/oisp-tests.js index 362edb50..ad8da237 100644 --- a/tests/oisp-tests.js +++ b/tests/oisp-tests.js @@ -1182,6 +1182,17 @@ describe("Invite receiver ...\n".bold, function() { } }) }) + + it('Shall not accept non-existing invitations, or crash', function(done){ + inviteId = 0; + helpers.invitation.acceptInvitation(receiverToken, accountId, inviteId, function(err, response) { + if (err) { + done(); + } else { + done(new Error('non-existing invitation accepted' + response)); + } + }) + }) it('Shall request activation', function(done) { var username = process.env.USERNAME; From 27e92b18f089d3382120aa6ce3cfc8bb314b1f8e Mon Sep 17 00:00:00 2001 From: Marcel Wagner Date: Fri, 7 Sep 2018 12:04:52 +0000 Subject: [PATCH 24/68] E2E test: Added Kafka readiness tests. In some cases the E2E was not started due to Kafka not being ready. --- tests/Makefile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/Makefile b/tests/Makefile index b6f8ef83..fd3073d6 100644 --- a/tests/Makefile +++ b/tests/Makefile @@ -35,9 +35,10 @@ npm-install: @cd $(CURRENT_DIR) && npm install > /dev/null test: npm-install - @$(call msg,"Waiting for frontend"); + @$(call msg,"Waiting for frontend and test Kafka readiness"); # fill in separator to avoid accidentially dashboard being most recent heartbeat. # Then look always for latest heartbeat + @until echo new-test | kafkacat -P -b localhost:9092 -t heartbeat ; do echo "Kafka not ready. Waiting ..."; sleep 1; done @ (for i in {1..4}; do echo new-test ; done) | kafkacat -P -b localhost:9092 -t heartbeat; @. ../setup-environment.sh && until kafkacat -b localhost:9092 -t heartbeat -e -o -4 2> /dev/null | grep -m 1 "dashboard"; do sleep 1 ; done From fe1121b43c384699b139314434150e7a307e11a9 Mon Sep 17 00:00:00 2001 From: ogz Date: Wed, 19 Sep 2018 17:29:58 +0300 Subject: [PATCH 25/68] Add .init to make start --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index ef66c439..489f043d 100644 --- a/Makefile +++ b/Makefile @@ -121,7 +121,7 @@ pull: .init ## if it is left blank, all containers will be started. ## This command also accepts the argument $DOCKER_COMPOSE_ARGS, which is passed directly to compose. ## -start: +start: .init @$(call msg,"Starting OISP"); @if [ "${PULL_IMAGES}" = "true" ]; then make pull; fi; @./docker.sh up -d $(DOCKER_COMPOSE_ARGS) $(CONTAINERS) From c28e8a9f4fba6741af9060114177ca78a1e72c2b Mon Sep 17 00:00:00 2001 From: Marcel Wagner Date: Sun, 9 Sep 2018 18:42:09 +0200 Subject: [PATCH 26/68] E2E Tests: Added Time-based rule tests -Observation test now with timing, e.g. send one observation per second -Number of actuations is checked before resolving tests to also count unwanted actuations -Added helper to create TB-rules --- tests/lib/helpers/rules.js | 82 ++++++- tests/oisp-tests.js | 20 ++ tests/subtests/promise-wrap.js | 23 +- tests/subtests/statistic-rule-tests.js | 8 + tests/subtests/timebased-rule-tests.js | 321 +++++++++++++++++++++++++ 5 files changed, 444 insertions(+), 10 deletions(-) create mode 100644 tests/subtests/timebased-rule-tests.js diff --git a/tests/lib/helpers/rules.js b/tests/lib/helpers/rules.js index da9ec315..40333508 100644 --- a/tests/lib/helpers/rules.js +++ b/tests/lib/helpers/rules.js @@ -46,7 +46,7 @@ function getRules(userToken, accountId, cb) { cb(null,response) } }); -} +} function getRuleDetails(userToken, accountId, ruleId, cb) { if (!cb) { @@ -72,7 +72,7 @@ function getRuleDetails(userToken, accountId, ruleId, cb) { } } }); -} +} function createRule(ruleConfig, userToken, accountId, deviceId, cb) { if (!cb) { @@ -205,17 +205,84 @@ function createStatisticRule(ruleConfig, userToken, accountId, deviceId, cb) { } }) }, 500) - + } }); } + +function createTbRule(ruleConfig, userToken, accountId, deviceId, cb) { + if (!cb) { + throw "Callback required"; + } + + var data = { + userToken: userToken, + accountId: accountId, + + body: { + name: ruleConfig.name, + description: "TB OISP testing rule", + priority: "Medium", + type: "Regular", + status: "Active", + resetType: "Manual", + synchronizationStatus: "NotSync", + + actions: ruleConfig.actions, + + population: { + ids: [deviceId], + attributes: null, + tags: null + }, + + conditions: { + operator: "OR", + values: [{ + component: { + dataType: "Number", + name: ruleConfig.tbComponent, + cid: ruleConfig.cid + }, + type: "time", + values: [ruleConfig.tbConditionValue.toString()], + operator: ruleConfig.tbConditionOperator, + timeLimit: ruleConfig.tbTimelimit, + }] + } + } + }; + + api.rules.createRule(data, function(err, response) { + if (err) { + cb(err) + } else { + var ruleId = response.id; + var syncInterval = setInterval( function(id) { + getRuleDetails(userToken, accountId, ruleId, function(err, status) { + if (err) { + clearInterval(syncInterval); + cb(err) + } + else { + if ( status == true ) { + clearInterval(syncInterval); + cb(null, ruleId) + } + } + }) + }, 500) + } + }); +} + function updateRule(ruleConfig, userToken, accountId, ruleId, cb) { if (!cb) { throw "Callback required"; } - + var data = { userToken: userToken, accountId: accountId, @@ -267,7 +334,7 @@ function updateRule(ruleConfig, userToken, accountId, ruleId, cb) { cb(null,response) } }); -} +} function updateRuleStatus(userToken, accountId, ruleId, status, cb) { @@ -281,7 +348,7 @@ function updateRuleStatus(userToken, accountId, ruleId, status, cb) { ruleId: ruleId, body:{ status:status - } + } }; api.rules.updateRuleStatus(data, function(err, response) { @@ -310,7 +377,7 @@ function createDraftRule (draftname, userToken, accountId, cb) { resetType: null, name: draftname, synchronizationStatus: "NotSync", - + actions:[{ type:"mail", target:[] @@ -405,6 +472,7 @@ function deleteRule (userToken, accountId,ruleId , cb) { module.exports={ createRule:createRule, createStatisticRule:createStatisticRule, + createTbRule:createTbRule, getRules: getRules, getRuleDetails: getRuleDetails, updateRule: updateRule, diff --git a/tests/oisp-tests.js b/tests/oisp-tests.js index ad8da237..dc756ca5 100644 --- a/tests/oisp-tests.js +++ b/tests/oisp-tests.js @@ -871,6 +871,24 @@ describe("Sending observations and checking rules ...\n".bold, function() { }); + +describe("Do time based rule subtests ...".bold, + function() { + var test; + var descriptions = require("./subtests/timebased-rule-tests").descriptions; + it(descriptions.createTbRules,function(done) { + test = require("./subtests/timebased-rule-tests").test(userToken, accountId, deviceId, deviceToken, cbManager); + test.createTbRules(done); + }).timeout(10000); + it(descriptions.sendObservations,function(done) { + test.sendObservations(done); + }).timeout(60000); + it(descriptions.cleanup,function(done) { + test.cleanup(done); + }).timeout(10000); + }); + + describe("Do statistics rule subtests ...".bold, function() { var test; @@ -888,6 +906,7 @@ describe("Do statistics rule subtests ...".bold, }); + describe("Geting and manage alerts ... \n".bold, function(){ it('Shall get list of alerts', function(done){ @@ -1371,3 +1390,4 @@ describe("change password and delete receiver ... \n".bold, function(){ }) }) + diff --git a/tests/subtests/promise-wrap.js b/tests/subtests/promise-wrap.js index 60c5d8da..d4b3b352 100644 --- a/tests/subtests/promise-wrap.js +++ b/tests/subtests/promise-wrap.js @@ -23,7 +23,7 @@ var helpers = require("../lib/helpers"); -var checkObservations = function(tempValues, cid, cbManager, deviceToken, accountId, deviceId, componentParamName){ +var checkObservations = function(tempValues, cid, cbManager, deviceToken, accountId, deviceId, componentParamName, waitBetweenSendingSamples=0){ var firstObservationTime; return new Promise((resolve, reject) => { var index = 0; @@ -51,7 +51,7 @@ var checkObservations = function(tempValues, cid, cbManager, deviceToken, accoun } if (tempValues[index].expectedActuation === null) { - step(); + setTimeout(step, waitBetweenSendingSamples); } }); }; @@ -59,7 +59,10 @@ var checkObservations = function(tempValues, cid, cbManager, deviceToken, accoun index++; if (index === tempValues.length) { process.stdout.write("\n"); - resolve(); + if (nbActuations === 0){ + resolve(); + } + else reject(new Error("Wrong number of actuations")); } else { sendObservationAndCheckRules(index); } @@ -148,6 +151,19 @@ var createStatisticRule = (rule, userToken, accountId, deviceId) => { }); }; +var createTbRule = (rule, userToken, accountId, deviceId) => { + return new Promise(function(resolve, reject){ + helpers.rules.createTbRule(rule, userToken, accountId, deviceId, function(err, id) { + if (err) { + reject(err); + } else { + rule.id = id; + resolve(); + } + }); + }); +}; + var deleteComponent = function(userToken, accountId, deviceId, componentId){ return new Promise((resolve, reject) => { helpers.devices.deleteDeviceComponent (userToken, accountId, deviceId, componentId, function(err, response){ @@ -177,6 +193,7 @@ module.exports = { addActuator: addActuator, createCommand: createCommand, createStatisticRule: createStatisticRule, + createTbRule: createTbRule, deleteComponent: deleteComponent, deleteRule: deleteRule }; diff --git a/tests/subtests/statistic-rule-tests.js b/tests/subtests/statistic-rule-tests.js index 57d4e986..03c4f5cd 100644 --- a/tests/subtests/statistic-rule-tests.js +++ b/tests/subtests/statistic-rule-tests.js @@ -112,6 +112,14 @@ var test = function(userToken, accountId, deviceId, deviceToken, cbManager) { { value: 10.0, expectedActuation: 0 //switch off + }, + { + value: 17.0, + expectedActuation: null + }, + { + value: 17.0, + expectedActuation: null } ]; diff --git a/tests/subtests/timebased-rule-tests.js b/tests/subtests/timebased-rule-tests.js new file mode 100644 index 00000000..87c2386f --- /dev/null +++ b/tests/subtests/timebased-rule-tests.js @@ -0,0 +1,321 @@ +/** + * Copyright (c) 2018 Intel Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/*jshint esversion: 6 */ +/*jshint undef: true, unused: true */ +/* jshint node: true */ + +"use strict"; + +var test = function(userToken, accountId, deviceId, deviceToken, cbManager) { + var chai = require('chai'); + var assert = chai.assert; + var helpers = require("../lib/helpers"); + var componentName = "temperature-sensor-tbrt"; + var componentType = "temperature.v1.0"; + var actuatorName = "powerswitch-actuator-tbrt"; + var actuatorType = "powerswitch.v1.0"; + var switchOnCmdName = "switch-on-tbrt"; + var switchOffCmdName = "switch-off-tbrt"; + var promtests = require('./promise-wrap'); + var rules = []; + var componentId; + var actuatorId; + var componentParamName = "LED"; + + rules[switchOnCmdName] = { + name: "oisp-tests-rule-tb-gt20-30s", + tbComponent: componentName, + tbConditionOperator: ">=", + tbConditionValue: "20", //>=20 temp + tbTimelimit: 30, //wait 30s + actions: [ + { + type: "actuation", + target: [ switchOnCmdName ] + } + ], + }; + + rules[switchOffCmdName] = { + name: "oisp-tests-rule-tb-lower20-30s", + tbComponent: componentName, + tbConditionOperator: "<", + tbConditionValue: "20", //temp < 20 + tbTimelimit: 10, + actions: [ + { + type: "actuation", + target: [ switchOffCmdName ] + } + ], + }; + var temperatureValues = [ + { + value: 20, + expectedActuation: null + }, + { + value: 21, + expectedActuation: null + }, + { + value: 22, + expectedActuation: null + }, + { + value: 23, + expectedActuation: null + }, + { + value: 24, + expectedActuation: null + }, + { + value: 25, + expectedActuation: null + }, + { + value: 26, + expectedActuation: null + }, + { + value: 27, + expectedActuation: null + }, + { + value: 28, + expectedActuation: null + }, + { + value: 29, + expectedActuation: null + }, + { + value: 30, + expectedActuation: null + }, + { + value: 31, + expectedActuation: null + }, + { + value: 32, + expectedActuation: null + }, + { + value: 33, + expectedActuation: null + }, + { + value: 34, + expectedActuation: null + }, + { + value: 35, + expectedActuation: null + }, + { + value: 36, + expectedActuation: null + }, + { + value: 37, + expectedActuation: null + }, + { + value: 38, + expectedActuation: null + }, + { + value: 39, + expectedActuation: null + }, + { + value: 40, + expectedActuation: null + }, + { + value: 41, + expectedActuation: null + }, + { + value: 42, + expectedActuation: null + }, + { + value: 43, + expectedActuation: null + }, + { + value: 44, + expectedActuation: null + }, + { + value: 45, + expectedActuation: null + }, + { + value: 46, + expectedActuation: null + }, + { + value: 47, + expectedActuation: null + }, + { + value: 48, + expectedActuation: null + }, + { + value: 49, + expectedActuation: 1 + }, + { + value: 50, + expectedActuation: 1 + }, + { + value: 51, + expectedActuation: 1 + }, + { + value: 19, + expectedActuation: null + }, + { + value: 19, + expectedActuation: null + }, + { + value: 19, + expectedActuation: null + }, + { + value: 19, + expectedActuation: null + }, + { + value: 19, + expectedActuation: null + }, + { + value: 19, + expectedActuation: null + }, + { + value: 19, + expectedActuation: null + }, + { + value: 19, + expectedActuation: null + }, + { + value: 19, + expectedActuation: null + }, + { + value: 19, + expectedActuation: null + }, + { + value: 19, + expectedActuation: 0 + }, + { + value: 20, + expectedActuation: null + }, + { + value: 20, + expectedActuation: null + }, + ]; + + + //********************* Main Object *****************// + //---------------------------------------------------// + return { + "createTbRules": function(done) { + //To be independent of main tests, own sensors, actuators, and commands have to be created + promtests.addComponent(componentName, componentType, deviceToken, accountId, deviceId) + .then((id) => {componentId = id; rules[switchOffCmdName].cid = componentId; rules[switchOnCmdName].cid = componentId;}) + .then(() => promtests.addActuator(actuatorName, actuatorType, deviceToken, accountId, deviceId)) + .then((id) => {actuatorId = id;}) + .then(() => promtests.createCommand(switchOffCmdName, componentParamName, 0, userToken, accountId, deviceId, actuatorId)) + .then(() => promtests.createCommand(switchOnCmdName, componentParamName, 1, userToken, accountId, deviceId, actuatorId)) + .then(() => promtests.createTbRule(rules[switchOffCmdName], userToken, accountId, deviceId)) + .then(() => promtests.createTbRule(rules[switchOnCmdName], userToken, accountId, deviceId)) + .then(() => {done();}) + .catch((err) => {done(err);}); + }, + "sendObservations": function(done){ + assert.notEqual(componentId, null, "CommponentId not defined"); + assert.notEqual(rules[switchOnCmdName].id, null, "Rule not defined"); + assert.notEqual(rules[switchOffCmdName].id, null, "Rule not defined"); + assert.notEqual(cbManager, null, "cbManager proxy not defined"); + assert.notEqual(deviceToken, null, "Device Token not defined"); + assert.notEqual(deviceId, null, "DeviceId not defined"); + + promtests.checkObservations(temperatureValues, rules[switchOnCmdName].cid, cbManager, deviceToken, accountId, deviceId, componentParamName, 1000) + .then(() => {done();}) + .catch((err) => { done(err);}); + }, + "cleanup": function(done){ + var getListOfAlerts = function(userToken, accountId){ + return new Promise((resolve, reject) => { + helpers.alerts.getListOfAlerts(userToken, accountId, function(err, response) { + if (err) { + reject(err); + } else { + resolve(response); + } + }); + }); + }; + //delete 2 most recent alerts + //delete new components + promtests.deleteComponent(userToken, accountId, deviceId, componentId) + .then(() => promtests.deleteComponent(userToken, accountId, deviceId, actuatorId)) + //delete new commands + .then(() => helpers.control.deleteComplexCommand(switchOnCmdName, userToken, accountId)) + .then(() => helpers.control.deleteComplexCommand(switchOffCmdName, userToken, accountId)) + //delete Statistic Rules + .then(() => promtests.deleteRule(userToken, accountId, rules[switchOnCmdName].id)) + .then(() => promtests.deleteRule(userToken, accountId, rules[switchOffCmdName].id)) + .then(() => getListOfAlerts(userToken, accountId)) + .then((alerts) => { + var prm1 = helpers.alerts.deleteAlert(userToken, accountId, alerts[0].alertId); + var prm2 = helpers.alerts.deleteAlert(userToken, accountId, alerts[1].alertId); + var prm3 = helpers.alerts.deleteAlert(userToken, accountId, alerts[2].alertId); + return Promise.all([prm1, prm2, prm3]); + }) + .then(() => {done();}) + .catch((err) => {done(err);}); + } + }; +}; + +var descriptions = { + "createTbRules": "Shall create time based rules and wait for synchronization with RE", + "sendObservations": "Shall send observations and trigger event for statistics rules", + "cleanup": "Cleanup components, commands, rules created for subtest" +}; + +module.exports = { + test: test, + descriptions: descriptions +}; From 379ff8ad1e3a51d5d4ad085aa1e9cfb2cc54c3c6 Mon Sep 17 00:00:00 2001 From: ogz Date: Wed, 12 Sep 2018 14:21:58 +0300 Subject: [PATCH 27/68] Add component test: Do not create a component with a name that already exists on the device --- tests/oisp-tests.js | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/tests/oisp-tests.js b/tests/oisp-tests.js index dc756ca5..babf4ad9 100644 --- a/tests/oisp-tests.js +++ b/tests/oisp-tests.js @@ -543,6 +543,20 @@ describe("Creating and getting components ... \n".bold, function() { } }) }).timeout(10000); + + it('Shall not add device a component with the name that a component of the device already has, or crash', function(done) { + + helpers.devices.addDeviceComponent(componentName, componentType, deviceToken, accountId, deviceId, function(err, id) { + if (err) { + done(new Error("Cannot create or try to create component: " + err)); + } else if (id === undefined) { + // Response with code 409, id should be undefined + done(); + } else { + done(new Error("No error is thrown and the component is added successfully: " + id)); + } + }) + }).timeout(10000); it('Shall add device an actuator', function(done) { From f269fb67c68373f20372c291f31c0b5406b0f27e Mon Sep 17 00:00:00 2001 From: Ali Rasim Kocal Date: Thu, 27 Sep 2018 08:25:04 +0000 Subject: [PATCH 28/68] Moved from ifconfig to ip in Makefile ifconfig is deprecated and replaced by ip --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 489f043d..a2a78c6d 100644 --- a/Makefile +++ b/Makefile @@ -23,7 +23,7 @@ SHELL:=/bin/bash CURRENT_DIR:=$(shell dirname $(realpath $(lastword $(MAKEFILE_LIST)))) BRANCH:=$(shell git branch| grep \*| cut -d ' ' -f2) export TEST = 0 -export HOST_IP_ADDRESS=$(shell ifconfig docker0 | sed -En 's/.*inet (addr:)?(([0-9]*\.){3}[0-9]*).*/\2/p') +export HOST_IP_ADDRESS=$(shell ip -4 addr show docker0 | grep -oP '(?<=inet\s)\d+(\.\d+){3}') SSL_CERT_PATH:=data/keys/ssl COMPOSE_PROJECT_NAME?="oisp" From d6e978c179f4bd524009cc0d68ed562576ad611d Mon Sep 17 00:00:00 2001 From: Ali Rasim Kocal Date: Wed, 19 Sep 2018 11:15:10 +0200 Subject: [PATCH 29/68] Created a new container for debugging --- docker-compose.yml | 7 +++++++ docker-debugger/Dockerfile | 17 +++++++++++++++++ docker-debugger/README.md | 12 ++++++++++++ 3 files changed, 36 insertions(+) create mode 100644 docker-debugger/Dockerfile create mode 100644 docker-debugger/README.md diff --git a/docker-compose.yml b/docker-compose.yml index 481e45ca..84b84540 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -172,3 +172,10 @@ services: environment: - VCAP_SERVICES=${VCAP_SERVICES} - VCAP_APPLICATION=${VCAP_APPLICATION} + debugger: + image: oisp/debugger:${DOCKER_TAG} + build: + context: ./docker-debugger + labels: + - oisp=true + - oisp.git_commit=${GIT_COMMIT_PLATFORM_LAUNCHER} \ No newline at end of file diff --git a/docker-debugger/Dockerfile b/docker-debugger/Dockerfile new file mode 100644 index 00000000..0a336a98 --- /dev/null +++ b/docker-debugger/Dockerfile @@ -0,0 +1,17 @@ +FROM ubuntu:xenial + +RUN apt-get update +RUN apt-get -y install nano make git kafkacat python python-pip python3-pip python3-dev python3-setuptools python-setuptools build-essential nodejs dnsutils virtualenv snapd npm wget apt-transport-https curl + +# install kubectl +RUN curl -s https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add - +RUN touch /etc/apt/sources.list.d/kubernetes.list +RUN echo "deb http://apt.kubernetes.io/ kubernetes-xenial main" | tee -a /etc/apt/sources.list.d/kubernetes.list +RUN apt-get update +RUN apt-get install -y kubectl + +RUN alias node=nodejs + +ENV OISP_REMOTE https://github.com/Open-IoT-Service-Platform/platform-launcher.git + +CMD ["tail", "-f", "/dev/null"] \ No newline at end of file diff --git a/docker-debugger/README.md b/docker-debugger/README.md new file mode 100644 index 00000000..3fde6735 --- /dev/null +++ b/docker-debugger/README.md @@ -0,0 +1,12 @@ +Debug Container +=============== + +This container has the tools that are often required to debug various parts of OISP, like kafkacat, node & npm, python & pip etc. + +Since this is not supposed to be deployed in production, lightness of this container is not a concern, so it is meant to be a all in one solution. + +Advantages over debugging via host machine +------------------------------------------ + +1. Not all host machines necessarily have the tools installed. +2. This can be used to access endpoints not exposed publicly within docker/k8s networks. From 8e5f9b69cca3d28fc3fdef17d66c8b29a97c17aa Mon Sep 17 00:00:00 2001 From: Ali Rasim Kocal Date: Tue, 25 Sep 2018 09:23:53 +0200 Subject: [PATCH 30/68] Made debugger container optional --- Makefile | 26 ++++++++++++++------- docker-compose.yml | 7 ------ docker-debugger/Dockerfile | 3 +-- docker-debugger/README.md | 10 +++++--- docker-debugger/docker-compose-debugger.yml | 9 +++++++ 5 files changed, 34 insertions(+), 21 deletions(-) create mode 100644 docker-debugger/docker-compose-debugger.yml diff --git a/Makefile b/Makefile index a2a78c6d..dfc1e171 100644 --- a/Makefile +++ b/Makefile @@ -28,9 +28,14 @@ SSL_CERT_PATH:=data/keys/ssl COMPOSE_PROJECT_NAME?="oisp" DOCKER_REGISTRY="" -CONTAINERS?="" -DOCKER_COMPOSE_ARGS?="" +CONTAINERS?=$(shell docker-compose --log-level ERROR config --services) +DOCKER_COMPOSE_ARGS?= PULL_IMAGES?=false +DEBUG?=false + +ifeq ($(DEBUG),true) +CONTAINERS:=$(CONTAINERS) debugger +endif .init: @$(call msg,"Initializing ..."); @@ -97,34 +102,37 @@ endif ## You can specify a version using the $DOCKER_TAG argument. ## $CONTAINERS arg (as whitespace seperated list) specifies which containers should be built, ## if it is left blank, all containers will be built. +## If $DEBUG is set to true, debugger will be added to CONTAINERS ## This command also accepts the argument $DOCKER_COMPOSE_ARGS, which is passed directly to compose. ## build: .init @$(call msg,"Building OISP containers"); - @./docker.sh build $(DOCKER_COMPOSE_ARGS) $(CONTAINERS) + @./docker.sh -f docker-compose.yml -f docker-debugger/docker-compose-debugger.yml build $(DOCKER_COMPOSE_ARGS) $(CONTAINERS); ## pull: Pull OISP containers from dockerhub. Requires docker login. ## You can specify a version using the $DOCKER_TAG argument. ## $CONTAINERS arg (as whitespace seperated list) specifies which containers should be pulled, ## if it is left blank, all containers will be pulled. +## If $DEBUG is set to true, debugger will be added to CONTAINERS ## This command also accepts the argument $DOCKER_COMPOSE_ARGS, which is passed directly to compose. ## pull: .init @$(call msg, "Pulling OISP containers"); - @./docker.sh pull $(DOCKER_COMPOSE_ARGS) $(CONTAINERS) + @./docker.sh -f docker-compose.yml -f docker-debugger/docker-compose-debugger.yml pull $(DOCKER_COMPOSE_ARGS) $(CONTAINERS); ## start: Start OISP. ## You can specify a version using the $DOCKER_TAG argument. ## If the images are not present, this will try to pull or build them, depending on the ## value of $PULL_IMAGES (true/false; defaults to false) -## $CONTAINERS arg (as whitespace seperated list) specifies which containers should be started, -## if it is left blank, all containers will be started. +## If DEBUG is set to true, debugger will be added to containers +## CONTAINERS arg (as whitespace seperated list) specifies which containers should be started, +## if it is left blank, all containers except `debugger` will be started. ## This command also accepts the argument $DOCKER_COMPOSE_ARGS, which is passed directly to compose. ## start: .init @$(call msg,"Starting OISP"); @if [ "${PULL_IMAGES}" = "true" ]; then make pull; fi; - @./docker.sh up -d $(DOCKER_COMPOSE_ARGS) $(CONTAINERS) + @./docker.sh -f docker-compose.yml -f docker-debugger/docker-compose-debugger.yml up -d $(DOCKER_COMPOSE_ARGS) $(CONTAINERS) start-test: export TEST := "1" start-test: @@ -134,12 +142,12 @@ start-test: ## stop: Stop running OISP containers. ## $CONTAINERS arg (as whitespace seperated list) specifies which containers should be stopped, -## if it is left blank, all containers will be stopped. +## if it is left blank, all containers except `debugger` will be stopped. ## This command also accepts the argument $DOCKER_COMPOSE_ARGS, which is passed directly to compose. ## stop: @$(call msg,"Stopping OISP containers"); - @./docker.sh stop $(DOCKER_COMPOSE_ARGS) $(CONTAINERS) + @./docker.sh -f docker-compose.yml -f docker-debugger/docker-compose-debugger.yml stop $(DOCKER_COMPOSE_ARGS) $(CONTAINERS) ## update: Update all subrepositories to latest origin/develop ## For competabilty, this will also backup and remove setup-environment.sh diff --git a/docker-compose.yml b/docker-compose.yml index 84b84540..481e45ca 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -172,10 +172,3 @@ services: environment: - VCAP_SERVICES=${VCAP_SERVICES} - VCAP_APPLICATION=${VCAP_APPLICATION} - debugger: - image: oisp/debugger:${DOCKER_TAG} - build: - context: ./docker-debugger - labels: - - oisp=true - - oisp.git_commit=${GIT_COMMIT_PLATFORM_LAUNCHER} \ No newline at end of file diff --git a/docker-debugger/Dockerfile b/docker-debugger/Dockerfile index 0a336a98..581c8f2b 100644 --- a/docker-debugger/Dockerfile +++ b/docker-debugger/Dockerfile @@ -1,7 +1,6 @@ FROM ubuntu:xenial -RUN apt-get update -RUN apt-get -y install nano make git kafkacat python python-pip python3-pip python3-dev python3-setuptools python-setuptools build-essential nodejs dnsutils virtualenv snapd npm wget apt-transport-https curl +RUN apt-get update && apt-get -y install nano make git kafkacat python python-pip python3-pip python3-dev python3-setuptools python-setuptools build-essential nodejs dnsutils virtualenv snapd npm wget apt-transport-https curl # install kubectl RUN curl -s https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add - diff --git a/docker-debugger/README.md b/docker-debugger/README.md index 3fde6735..46faa8cd 100644 --- a/docker-debugger/README.md +++ b/docker-debugger/README.md @@ -1,12 +1,16 @@ -Debug Container +#Debug Container =============== This container has the tools that are often required to debug various parts of OISP, like kafkacat, node & npm, python & pip etc. Since this is not supposed to be deployed in production, lightness of this container is not a concern, so it is meant to be a all in one solution. -Advantages over debugging via host machine ------------------------------------------- +## Build & run + +Run the make commands from project root directory, and set DEBUG=true or add debugger to CONTAINERS. + +## Advantages over debugging via host machine + 1. Not all host machines necessarily have the tools installed. 2. This can be used to access endpoints not exposed publicly within docker/k8s networks. diff --git a/docker-debugger/docker-compose-debugger.yml b/docker-debugger/docker-compose-debugger.yml new file mode 100644 index 00000000..2ed6d7ff --- /dev/null +++ b/docker-debugger/docker-compose-debugger.yml @@ -0,0 +1,9 @@ +version: '3.7' +services: + debugger: + image: oisp/debugger:${DOCKER_TAG} + build: + context: docker-debugger + labels: + - oisp=true + - oisp.git_commit=${GIT_COMMIT_PLATFORM_LAUNCHER} \ No newline at end of file From e1638c81231af0927b679ca93ec129893b23c910 Mon Sep 17 00:00:00 2001 From: Ali Rasim Kocal Date: Thu, 27 Sep 2018 12:02:37 +0000 Subject: [PATCH 31/68] Minor updates to debugger container --- Makefile | 2 +- docker-debugger/Dockerfile | 9 ++++++++- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index dfc1e171..3bd5a08a 100644 --- a/Makefile +++ b/Makefile @@ -234,7 +234,7 @@ distclean: clean ## push-images: @$(call msg,"Pushing docker images to registry"); - @./docker.sh push $(CONTAINERS) + @./docker.sh -f docker-compose.yml -f docker-debugger/docker-compose-debugger.yml push $(CONTAINERS) ## help: Show this help message ## diff --git a/docker-debugger/Dockerfile b/docker-debugger/Dockerfile index 581c8f2b..f0a9464d 100644 --- a/docker-debugger/Dockerfile +++ b/docker-debugger/Dockerfile @@ -9,8 +9,15 @@ RUN echo "deb http://apt.kubernetes.io/ kubernetes-xenial main" | tee -a /etc/ap RUN apt-get update RUN apt-get install -y kubectl -RUN alias node=nodejs +RUN npm install --global n +RUN n 6 + +RUN ln -s /usr/bin/nodejs /usr/bin/node ENV OISP_REMOTE https://github.com/Open-IoT-Service-Platform/platform-launcher.git +RUN mkdir /home/platform-launcher + +WORKDIR /home/platform-launcher + CMD ["tail", "-f", "/dev/null"] \ No newline at end of file From a095f6e60c9ff7f27d7ed5f9e5fc9aa829ca3cde Mon Sep 17 00:00:00 2001 From: Marcel Wagner Date: Fri, 28 Sep 2018 12:53:10 +0000 Subject: [PATCH 32/68] E2E test: fix for timebased rule test sometimes reporting errors --- tests/oisp-tests.js | 2 +- tests/subtests/promise-wrap.js | 37 ++++-- tests/subtests/timebased-rule-tests.js | 171 +++++-------------------- 3 files changed, 57 insertions(+), 153 deletions(-) diff --git a/tests/oisp-tests.js b/tests/oisp-tests.js index babf4ad9..d88850a3 100644 --- a/tests/oisp-tests.js +++ b/tests/oisp-tests.js @@ -896,7 +896,7 @@ describe("Do time based rule subtests ...".bold, }).timeout(10000); it(descriptions.sendObservations,function(done) { test.sendObservations(done); - }).timeout(60000); + }).timeout(120000); it(descriptions.cleanup,function(done) { test.cleanup(done); }).timeout(10000); diff --git a/tests/subtests/promise-wrap.js b/tests/subtests/promise-wrap.js index d4b3b352..630298cc 100644 --- a/tests/subtests/promise-wrap.js +++ b/tests/subtests/promise-wrap.js @@ -40,21 +40,30 @@ var checkObservations = function(tempValues, cid, cbManager, deviceToken, accoun var step; var sendObservationAndCheckRules = function(index) { process.stdout.write(".".green); - helpers.devices.submitData(tempValues[index].value, deviceToken, accountId, deviceId, cid, function(err, ts) { - tempValues[index].ts = ts; - if (index === 0) { - firstObservationTime = tempValues[index].ts; - } - if (err) { - reject(err); - } - - if (tempValues[index].expectedActuation === null) { - setTimeout(step, waitBetweenSendingSamples); - } - }); - }; + if (tempValues[index].hasOwnProperty('delay')){ + setTimeout(sendActualObservation, tempValues[index].delay); + } + else{ + sendActualObservation(); + } + function sendActualObservation(){ + helpers.devices.submitData(tempValues[index].value, deviceToken, accountId, deviceId, cid, function(err, ts) { + tempValues[index].ts = ts; + + if (index === 0) { + firstObservationTime = tempValues[index].ts; + } + if (err) { + reject(err); + } + + if (tempValues[index].expectedActuation === null) { + setTimeout(step, waitBetweenSendingSamples); + } + }); + } + }; step = function(){ index++; if (index === tempValues.length) { diff --git a/tests/subtests/timebased-rule-tests.js b/tests/subtests/timebased-rule-tests.js index 87c2386f..fc7212d8 100644 --- a/tests/subtests/timebased-rule-tests.js +++ b/tests/subtests/timebased-rule-tests.js @@ -65,184 +65,79 @@ var test = function(userToken, accountId, deviceId, deviceToken, cbManager) { var temperatureValues = [ { value: 20, - expectedActuation: null + expectedActuation: null, + delay: 0 }, { value: 21, - expectedActuation: null + expectedActuation: null, + delay: 1000 }, { value: 22, - expectedActuation: null + expectedActuation: null, + delay: 2000 }, { value: 23, - expectedActuation: null + expectedActuation: null, + delay: 4000 }, { value: 24, - expectedActuation: null + expectedActuation: null, + delay: 8000 }, { value: 25, - expectedActuation: null - }, - { - value: 26, - expectedActuation: null - }, - { - value: 27, - expectedActuation: null - }, - { - value: 28, - expectedActuation: null - }, - { - value: 29, - expectedActuation: null - }, - { - value: 30, - expectedActuation: null - }, - { - value: 31, - expectedActuation: null - }, - { - value: 32, - expectedActuation: null - }, - { - value: 33, - expectedActuation: null - }, - { - value: 34, - expectedActuation: null - }, - { - value: 35, - expectedActuation: null - }, - { - value: 36, - expectedActuation: null - }, - { - value: 37, - expectedActuation: null - }, - { - value: 38, - expectedActuation: null - }, - { - value: 39, - expectedActuation: null - }, - { - value: 40, - expectedActuation: null - }, - { - value: 41, - expectedActuation: null - }, - { - value: 42, - expectedActuation: null - }, - { - value: 43, - expectedActuation: null - }, - { - value: 44, - expectedActuation: null - }, - { - value: 45, - expectedActuation: null - }, - { - value: 46, - expectedActuation: null - }, - { - value: 47, - expectedActuation: null - }, - { - value: 48, - expectedActuation: null + expectedActuation: null, + delay: 8000 }, { value: 49, - expectedActuation: 1 + expectedActuation: 1, + delay: 10000 }, { value: 50, - expectedActuation: 1 + expectedActuation: 1, + delay: 1000 }, { value: 51, - expectedActuation: 1 - }, - { - value: 19, - expectedActuation: null + expectedActuation: 1, + delay: 1000 }, { value: 19, - expectedActuation: null + expectedActuation: null, + delay: 1000 }, { value: 19, - expectedActuation: null + expectedActuation: null, + delay: 2000 }, { value: 19, - expectedActuation: null + expectedActuation: null, + delay: 4000 }, { value: 19, - expectedActuation: null - }, - { - value: 19, - expectedActuation: null - }, - { - value: 19, - expectedActuation: null - }, - { - value: 19, - expectedActuation: null - }, - { - value: 19, - expectedActuation: null - }, - { - value: 19, - expectedActuation: null - }, - { - value: 19, - expectedActuation: 0 + expectedActuation: 0, + delay: 10000 }, { value: 20, - expectedActuation: null + expectedActuation: null, + delay: 1000 }, { - value: 20, - expectedActuation: null - }, + value: 20, + expectedActuation: null, + delay: 1000 + } ]; @@ -270,7 +165,7 @@ var test = function(userToken, accountId, deviceId, deviceToken, cbManager) { assert.notEqual(deviceToken, null, "Device Token not defined"); assert.notEqual(deviceId, null, "DeviceId not defined"); - promtests.checkObservations(temperatureValues, rules[switchOnCmdName].cid, cbManager, deviceToken, accountId, deviceId, componentParamName, 1000) + promtests.checkObservations(temperatureValues, rules[switchOnCmdName].cid, cbManager, deviceToken, accountId, deviceId, componentParamName) .then(() => {done();}) .catch((err) => { done(err);}); }, From 383149f4372db4ada47030e5c51345a3e4925721 Mon Sep 17 00:00:00 2001 From: Ali Rasim Kocal Date: Mon, 1 Oct 2018 15:34:45 +0200 Subject: [PATCH 33/68] Adapted tests for k8s --- .gitignore | 1 + Makefile | 7 +++++- tests/Makefile | 24 ++++++++++++++++--- tests/oisp-tests.js | 6 ++--- tests/test-config-docker.json | 39 +++++++++++++++++++++++++++++++ tests/test-config-kubernetes.json | 39 +++++++++++++++++++++++++++++++ 6 files changed, 109 insertions(+), 7 deletions(-) create mode 100644 tests/test-config-docker.json create mode 100644 tests/test-config-kubernetes.json diff --git a/.gitignore b/.gitignore index 787397fe..4ead6ee5 100644 --- a/.gitignore +++ b/.gitignore @@ -6,4 +6,5 @@ config-backup/ .firstRun tests/.env tests/package-lock.json +tests/test-config.json data-backup/ \ No newline at end of file diff --git a/Makefile b/Makefile index 3bd5a08a..fa061200 100644 --- a/Makefile +++ b/Makefile @@ -37,6 +37,9 @@ ifeq ($(DEBUG),true) CONTAINERS:=$(CONTAINERS) debugger endif +# Can be docker or kubernetes +export TESTING_PLATFORM?=docker + .init: @$(call msg,"Initializing ..."); @if [ ! -f ".firstRun" ]; then \ @@ -136,9 +139,11 @@ start: .init start-test: export TEST := "1" start-test: - @$(call msg,"Starting OISP (test mode)"); + @$(call msg,"Starting OISP (test mode: $(TESTING_PLATFORM) )"); @make -C tests email-account $(shell pwd)/tests/.env +ifeq ($(TESTING_PLATFORM),docker) @source ./tests/.env && ./docker.sh up -d +endif ## stop: Stop running OISP containers. ## $CONTAINERS arg (as whitespace seperated list) specifies which containers should be stopped, diff --git a/tests/Makefile b/tests/Makefile index fd3073d6..7e4fb1bd 100644 --- a/tests/Makefile +++ b/tests/Makefile @@ -26,6 +26,14 @@ USERNAME = "oisp@testing.com" PASSWORD = "OispTesting1" ROLE = "admin" +ifeq ($(TESTING_PLATFORM), docker) +KAFKA_BROKER="localhost:9092" +else +KAFKA_BROKER="kafka:9092" +NAMESPACE=oisp +DASHBOARD_POD:=$(shell kubectl -n oisp get pods -o custom-columns=:metadata.name | grep dashboard | head -n 1) +endif + #---------------------------------------------------------------------------------------------------------------------- # targets #---------------------------------------------------------------------------------------------------------------------- @@ -34,20 +42,30 @@ npm-install: @$(call msg,"Installing npm packages ..."); @cd $(CURRENT_DIR) && npm install > /dev/null + test: npm-install + @cp test-config-$(TESTING_PLATFORM).json test-config.json @$(call msg,"Waiting for frontend and test Kafka readiness"); # fill in separator to avoid accidentially dashboard being most recent heartbeat. # Then look always for latest heartbeat - @until echo new-test | kafkacat -P -b localhost:9092 -t heartbeat ; do echo "Kafka not ready. Waiting ..."; sleep 1; done - @ (for i in {1..4}; do echo new-test ; done) | kafkacat -P -b localhost:9092 -t heartbeat; - @. ../setup-environment.sh && until kafkacat -b localhost:9092 -t heartbeat -e -o -4 2> /dev/null | grep -m 1 "dashboard"; do sleep 1 ; done + @until echo new-test | kafkacat -P -b $(KAFKA_BROKER) -t heartbeat; do echo "Kafka not ready. Waiting ..."; sleep 1; done + @ (for i in {1..4}; do echo new-test ; done) | kafkacat -P -b $(KAFKA_BROKER) -t heartbeat; + @. ../setup-environment.sh && until kafkacat -C -b $(KAFKA_BROKER) -t heartbeat -e -o -4 2> /dev/null | grep -m 1 "dashboard"; do sleep 1 ; done +ifeq ($(TESTING_PLATFORM), docker) @$(call msg,"Resetting database ..."); @. ../setup-environment.sh && docker exec -i "$${COMPOSE_PROJECT_NAME}_frontend_1" node /app/admin resetDB @$(call msg,"Adding a user for testing ..."); @. ../setup-environment.sh && docker exec -i "$${COMPOSE_PROJECT_NAME}_frontend_1" node /app/admin addUser $(USERNAME) $(PASSWORD) $(ROLE) > /dev/null +else + @$(call msg,"Resetting database ..."); + @kubectl exec -i $(DASHBOARD_POD) -- node /app/admin resetDB + @$(call msg,"Adding a user for testing ..."); + @kubectl exec -i $(DASHBOARD_POD) -- node /app/admin addUser $(USERNAME) $(PASSWORD) $(ROLE) > /dev/null + +endif @$(call msg,"Starting the e2e testing ..."); @. ../setup-environment.sh && env http_proxy="" https_proxy="" USERNAME=$(USERNAME) PASSWORD=$(PASSWORD) LOG_LEVEL="error" NODE_ENV="local" npm test diff --git a/tests/oisp-tests.js b/tests/oisp-tests.js index d88850a3..71e4a175 100644 --- a/tests/oisp-tests.js +++ b/tests/oisp-tests.js @@ -195,8 +195,9 @@ describe("Waiting for OISP services to be ready ...\n".bold, function() { var kafkaConsumer; var kafka_credentials = cfenvReader.getServiceCredentials("kafka-ups"); var topic = kafka_credentials.topics.heartbeat.name; - var partition = 0; - var kafkaClient = new kafka.KafkaClient({kafkaHost: "localhost:9092"}); + var partition = 0; + var kafkaAddress = config["connector"]["kafka"]["host"] + ":" + config["connector"]["kafka"]["port"]; + var kafkaClient = new kafka.KafkaClient({kafkaHost: kafkaAddress}); var kafkaOffset = new kafka.Offset(kafkaClient); var getKafkaOffset = function(topic_, partition_, cb) { @@ -1404,4 +1405,3 @@ describe("change password and delete receiver ... \n".bold, function(){ }) }) - diff --git a/tests/test-config-docker.json b/tests/test-config-docker.json new file mode 100644 index 00000000..41c7e7a3 --- /dev/null +++ b/tests/test-config-docker.json @@ -0,0 +1,39 @@ +{ + "logger": { + "LEVEL": "info", + "PATH": "/tmp/", + "MAX_SIZE": 134217728 + }, + "default_connector": "rest+ws", + "connector": { + "rest": { + "host": "localhost", + "port": 80, + "protocol": "http", + "strictSSL": false, + "timeout": 30000, + "proxy": { + "host": false, + "port": false + } + }, + "ws": { + "host": "localhost", + "port": 5000, + "minRetryTime": 2500, + "maxRetryTime": 600000, + "testTimeout": 40000, + "pingPongIntervalMs": 30000, + "enablePingPong": true, + "secure": false, + "proxy": { + "host": false, + "port": false + } + }, + "kafka": { + "host": "localhost", + "port": 9092 + } + } +} diff --git a/tests/test-config-kubernetes.json b/tests/test-config-kubernetes.json new file mode 100644 index 00000000..5063d828 --- /dev/null +++ b/tests/test-config-kubernetes.json @@ -0,0 +1,39 @@ +{ + "logger": { + "LEVEL": "info", + "PATH": "/tmp/", + "MAX_SIZE": 134217728 + }, + "default_connector": "rest+ws", + "connector": { + "rest": { + "host": "dashboard", + "port": 4001, + "protocol": "http", + "strictSSL": false, + "timeout": 30000, + "proxy": { + "host": false, + "port": false + } + }, + "ws": { + "host": "websocket-server", + "port": 5000, + "minRetryTime": 2500, + "maxRetryTime": 600000, + "testTimeout": 40000, + "pingPongIntervalMs": 30000, + "enablePingPong": true, + "secure": false, + "proxy": { + "host": false, + "port": false + } + }, + "kafka": { + "host": "kafka", + "port": 9092 + } + } +} From 4ab5769112be56e852a0d598fc7d10a033c458f2 Mon Sep 17 00:00:00 2001 From: Ali Rasim Kocal Date: Tue, 9 Oct 2018 15:47:34 +0200 Subject: [PATCH 34/68] Updated debugger container image Upgrade to Ubuntu 18.04 and install tools needed for load testing with locust. --- docker-debugger/Dockerfile | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/docker-debugger/Dockerfile b/docker-debugger/Dockerfile index f0a9464d..aabaab1e 100644 --- a/docker-debugger/Dockerfile +++ b/docker-debugger/Dockerfile @@ -1,4 +1,4 @@ -FROM ubuntu:xenial +FROM ubuntu:18.04 RUN apt-get update && apt-get -y install nano make git kafkacat python python-pip python3-pip python3-dev python3-setuptools python-setuptools build-essential nodejs dnsutils virtualenv snapd npm wget apt-transport-https curl @@ -12,7 +12,8 @@ RUN apt-get install -y kubectl RUN npm install --global n RUN n 6 -RUN ln -s /usr/bin/nodejs /usr/bin/node + +RUN pip3 install locustio oisp ENV OISP_REMOTE https://github.com/Open-IoT-Service-Platform/platform-launcher.git @@ -20,4 +21,6 @@ RUN mkdir /home/platform-launcher WORKDIR /home/platform-launcher +EXPOSE 8089 5557 5558 + CMD ["tail", "-f", "/dev/null"] \ No newline at end of file From a3a15763c0fda11297c5c41a1407a70d63fcc2ba Mon Sep 17 00:00:00 2001 From: Marcel Wagner Date: Sun, 28 Oct 2018 21:51:16 +0100 Subject: [PATCH 35/68] Updated Kafka version to 1.0.0 --- docker-kafka/Dockerfile | 7 +++--- docker-kafka/scripts/start-kafka.sh | 25 ++++++++++++++++---- docker-kafka/supervisor/kafka.conf | 5 ++-- docker-kafka/supervisor/supervisord.conf | 29 ++++++++++++++++++++++++ 4 files changed, 56 insertions(+), 10 deletions(-) create mode 100644 docker-kafka/supervisor/supervisord.conf diff --git a/docker-kafka/Dockerfile b/docker-kafka/Dockerfile index f34836f0..a90c52dd 100644 --- a/docker-kafka/Dockerfile +++ b/docker-kafka/Dockerfile @@ -12,7 +12,8 @@ FROM java:openjdk-8-jre ENV DEBIAN_FRONTEND noninteractive ENV SCALA_VERSION 2.11 -ENV KAFKA_VERSION 0.10.2.0 +#ENV KAFKA_VERSION 0.10.2.0 +ENV KAFKA_VERSION 1.0.0 ENV KAFKA_HOME /opt/kafka_"$SCALA_VERSION"-"$KAFKA_VERSION" # Install Kafka, Zookeeper and other needed things @@ -27,9 +28,9 @@ RUN apt-get update && \ ADD scripts/start-kafka.sh /usr/bin/start-kafka.sh # Supervisor config -ADD supervisor/kafka.conf supervisor/zookeeper.conf /etc/supervisor/conf.d/ +ADD supervisor/kafka.conf /etc/supervisor/conf.d/ # 2181 is zookeeper, 9092 is kafka -EXPOSE 2181 9092 +EXPOSE 9092 CMD ["supervisord", "-n"] diff --git a/docker-kafka/scripts/start-kafka.sh b/docker-kafka/scripts/start-kafka.sh index 168fc97e..023c087f 100755 --- a/docker-kafka/scripts/start-kafka.sh +++ b/docker-kafka/scripts/start-kafka.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/usr/bin/env bash # Optional ENV variables: # * ADVERTISED_HOST: the external ip for the container, e.g. `docker-machine ip \`docker-machine active\`` @@ -9,18 +9,21 @@ # * NUM_PARTITIONS: configure the default number of log partitions per topic # Configure advertised host/port if we run in helios + + if [ ! -z "$HELIOS_PORT_kafka" ]; then ADVERTISED_HOST=`echo $HELIOS_PORT_kafka | cut -d':' -f 1 | xargs -n 1 dig +short | tail -n 1` ADVERTISED_PORT=`echo $HELIOS_PORT_kafka | cut -d':' -f 2` fi + # Set the external host and port if [ ! -z "$ADVERTISED_HOST" ]; then echo "advertised host: $ADVERTISED_HOST" if grep -q "^advertised.host.name" $KAFKA_HOME/config/server.properties; then sed -r -i "s/#(advertised.host.name)=(.*)/\1=$ADVERTISED_HOST/g" $KAFKA_HOME/config/server.properties else - echo "advertised.host.name=$ADVERTISED_HOST" >> $KAFKA_HOME/config/server.properties + echo -e "\nadvertised.host.name=$ADVERTISED_HOST" >> $KAFKA_HOME/config/server.properties fi fi if [ ! -z "$ADVERTISED_PORT" ]; then @@ -46,7 +49,13 @@ if [ ! -z "$ZK_CHROOT" ]; then } # configure kafka - sed -r -i "s/(zookeeper.connect)=(.*)/\1=localhost:2181\/$ZK_CHROOT/g" $KAFKA_HOME/config/server.properties + sed -r -i "s/(zookeeper.connect)=(.*)/\1=locallhost:2181\/$ZK_CHROOT/g" $KAFKA_HOME/config/server.properties +fi + +# Configure external Zookeeper +if [ ! -z "$ZK_CONNECT" ]; then + echo "Setting external Zookeeper service" + sed -r -i "s/(zookeeper.connect)=(.*)/\1=$ZK_CONNECT/g" $KAFKA_HOME/config/server.properties fi # Allow specification of log retention policies @@ -68,8 +77,14 @@ fi # Enable/disable auto creation of topics if [ ! -z "$AUTO_CREATE_TOPICS" ]; then echo "auto.create.topics.enable: $AUTO_CREATE_TOPICS" - echo "auto.create.topics.enable=$AUTO_CREATE_TOPICS" >> $KAFKA_HOME/config/server.properties + if grep auto.create.topics.enable $KAFKA_HOME/config/server.properties; then + sed -r -i "s/(auto.create.topics.enable)=(.*)/\1=${AUTO_CREATE_TOPICS}/g" $KAFKA_HOME/config/server.properties + else + echo "auto.create.topics.enable=$AUTO_CREATE_TOPICS" >> $KAFKA_HOME/config/server.properties + fi fi -# Run Kafka +# Run kafka-server $KAFKA_HOME/bin/kafka-server-start.sh $KAFKA_HOME/config/server.properties + + diff --git a/docker-kafka/supervisor/kafka.conf b/docker-kafka/supervisor/kafka.conf index de058323..62e2896b 100644 --- a/docker-kafka/supervisor/kafka.conf +++ b/docker-kafka/supervisor/kafka.conf @@ -1,4 +1,5 @@ [program:kafka] -command=/usr/bin/start-kafka.sh +command=bash /usr/bin/start-kafka.sh autostart=true -autorestart=true \ No newline at end of file +autorestart=true +stopsignal=TERM diff --git a/docker-kafka/supervisor/supervisord.conf b/docker-kafka/supervisor/supervisord.conf new file mode 100644 index 00000000..78f44461 --- /dev/null +++ b/docker-kafka/supervisor/supervisord.conf @@ -0,0 +1,29 @@ +; supervisor config file + +[unix_http_server] +file=/var/run/supervisor.sock ; (the path to the socket file) +chmod=0700 ; sockef file mode (default 0700) + +[supervisord] +logfile=/var/log/supervisor/supervisord.log ; (main log file;default $CWD/supervisord.log) +pidfile=/var/run/supervisord.pid ; (supervisord pidfile;default supervisord.pid) +childlogdir=/var/log/supervisor ; ('AUTO' child log dir, default $TEMP) +nodaemon=true + +; the below section must remain in the config file for RPC +; (supervisorctl/web interface) to work, additional interfaces may be +; added by defining them in separate rpcinterface: sections +[rpcinterface:supervisor] +supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface + +[supervisorctl] +serverurl=unix:///var/run/supervisor.sock ; use a unix:// URL for a unix socket + +; The [include] section can just contain the "files" setting. This +; setting can list multiple files (separated by whitespace or +; newlines). It can also contain wildcards. The filenames are +; interpreted as relative to this file. Included files *cannot* +; include files themselves. + +[include] +files = /etc/supervisor/conf.d/*.conf From 08dc9e6dc3552e2c242b0596f57980e4211d3331 Mon Sep 17 00:00:00 2001 From: Marcel Wagner Date: Sun, 28 Oct 2018 21:53:10 +0100 Subject: [PATCH 36/68] Added standalone Zookeeper, HDFS, Hbase-regionserver --- Makefile | 17 ++ docker-compose.yml | 86 +++++++-- docker-hbase/Dockerfile | 14 ++ docker-hbase/config/hbase-site.xml | 16 +- docker-hbase/config/supervisor.conf | 11 -- docker-hbase/config/supervisor.region.conf | 31 +++ .../scripts/entrypoint-region-cleanup.sh | 30 +++ docker-hbase/scripts/entrypoint-region.sh | 24 +++ docker-hbase/scripts/entrypoint.sh | 24 ++- docker-hbase/scripts/replace-hostname.sh | 6 +- docker-hbase/scripts/wait-for-it.sh | 177 ++++++++++++++++++ setup-environment.example.sh | 5 +- 12 files changed, 402 insertions(+), 39 deletions(-) create mode 100644 docker-hbase/config/supervisor.region.conf create mode 100755 docker-hbase/scripts/entrypoint-region-cleanup.sh create mode 100755 docker-hbase/scripts/entrypoint-region.sh create mode 100755 docker-hbase/scripts/wait-for-it.sh diff --git a/Makefile b/Makefile index fa061200..1c338fa7 100644 --- a/Makefile +++ b/Makefile @@ -88,6 +88,23 @@ endif sudo mv data-backup data; \ fi; \ fi + @if [ -f data/hdfs/name ]; then echo "HDFS folder existing already"; else \ + echo "Creating HDFS name and data node"; \ + mkdir -p data/hdfs/name; \ + mkdir -p data/hdfs/data; \ + fi; + @if [ -f data/zookeeper ]; then echo "Zookeeper folder existing already"; else \ + echo "Creating Zookeeper data folder"; \ + mkdir -p data/zookeeper; \ + mkdir -p data/zookeeper-logs; \ + fi; + + @if [ -f data/keys/hbase/id_rsa ]; then echo "HBase keys existing already"; else \ + mkdir -p data/keys/hbase; \ + ssh-keygen -q -t rsa -P "" -f data/keys/hbase/id_rsa; \ + fi; + @cp data/keys/hbase/* docker-hbase + @chmod 755 docker-hbase/id_rsa* @if [ -f data/keys/private.pem ]; then echo "RSA keys existing already"; else \ mkdir -p data/keys; \ diff --git a/docker-compose.yml b/docker-compose.yml index 481e45ca..443f8818 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -16,20 +16,50 @@ version: '3.7' services: - hbase: + hbase-master: image: oisp/hbase:${DOCKER_TAG} + entrypoint: bash /opt/wait-for-it.sh hdfs-namenode:8020 -t 300 -- bash /opt/wait-for-it.sh zookeeper:2181 -t 300 -- /opt/entrypoint.sh build: context: ./docker-hbase/ labels: - oisp=true - oisp.git_commit=${GIT_COMMIT_PLATFORM_LAUNCHER} ports: - - 2181:2181 - 9090:9090 + - 8081:8080 + - 9095:9095 + - 16010:16010 - 60000:60000 - - 60020:60020 volumes: - ./data/hbase-logs:/opt/hbase/logs + depends_on: + - zookeeper + - hdfs-namenode + - hdfs-datanode + - hbase-region + environment: + - REGIONSERVERS=hbase-region + - ZOOKEEPER=${ZOOKEEPER_HBASE} + - HDFS_NAMENODE=hdfs-namenode + hbase-region: + image: oisp/hbase:${DOCKER_TAG} + entrypoint: /opt/entrypoint-region.sh + build: + context: ./docker-hbase/ + dockerfile: Dockerfile + labels: + - oisp=true + - oisp.git_commit=${GIT_COMMIT_PLATFORM_LAUNCHER} + volumes: + - ./data/hbase-logs:/opt/hbase/logs + depends_on: + - zookeeper + - hdfs-namenode + - hdfs-datanode + environment: + - HBASE_MASTER=hbase-master + - ZOOKEEPER=${ZOOKEEPER_HBASE} + - HDFS_NAMENODE=hdfs-namenode postgres: image: oisp/postgres:${DOCKER_TAG} build: @@ -55,15 +85,13 @@ services: - oisp.git_commit=${GIT_COMMIT_PLATFORM_LAUNCHER} ports: - 9092:9092 - environment: - - ZK_CONNECT=hbase:2181 depends_on: - - hbase + - zookeeper environment: - ADVERTISED_HOST=${HOST_IP_ADDRESS} - - ADVERTISED_PORT=9092 + - ADVERTISED_PORT=${KAFKA_PORT} - AUTO_CREATE_TOPICS=true - + - ZK_CONNECT=${ZOOKEEPER_KAFKA}:${ZOOKEEPER_KAFKA_PORT} redis: image: redis:3.0 volumes: @@ -80,7 +108,7 @@ services: - oisp.git_commit=${GIT_COMMIT_GEARPUMP} depends_on: - nginx - - hbase + - hbase-master - kafka - frontend working_dir: /app @@ -165,10 +193,46 @@ services: ports: - 8080:8080 depends_on: - - hbase + - hbase-master - kafka - command: ./wait-for-it.sh hbase:2181 -t 300 -- ./wait-for-it.sh kafka:9092 -t 300 -- make runjar + command: ./wait-for-it.sh hbase-master:9090 -t 300 -- ./wait-for-it.sh kafka:9092 -t 300 -- make runjar working_dir: /app environment: - VCAP_SERVICES=${VCAP_SERVICES} - VCAP_APPLICATION=${VCAP_APPLICATION} + hdfs-namenode: + image: singularities/hadoop + command: start-hadoop namenode + volumes: + - ./data/hdfs/name:/opt/hdfs + hostname: hdfs-namenode + environment: + HDFS_USER: root + ports: + - "8020:8020" + - "14000:14000" + - "50070:50070" + - "50075:50075" + - "10020:10020" + - "13562:13562" + - "19888:19888" + hdfs-datanode: + image: singularities/hadoop + command: start-hadoop datanode hdfs-namenode + volumes: + - ./data/hdfs/data:/opt/hdfs + environment: + HDFS_USER: root + links: + - hdfs-namenode + zookeeper: + image: zookeeper:3.4.13 + hostname: zookeeper + volumes: + - ./data/zookeeper:/data + - ./data/zookeeper-logs:/datalog + ports: + - 2181:2181 + environment: + ZOO_MY_ID: 1 + ZOO_SERVERS: server.1=0.0.0.0:2888:3888 \ No newline at end of file diff --git a/docker-hbase/Dockerfile b/docker-hbase/Dockerfile index 43013c76..efc4b6b3 100644 --- a/docker-hbase/Dockerfile +++ b/docker-hbase/Dockerfile @@ -34,6 +34,7 @@ RUN apt-get update && apt-get upgrade -y && \ sed -i "s,^. export JAVA_HOME.*,export JAVA_HOME=$JAVA_HOME," conf/hbase-env.sh && \ echo "export JAVA_HOME=$JAVA_HOME" > /etc/profile.d/defaults.sh && \ apt-get install -y --no-install-recommends openjdk-8-jre-headless && \ + apt-get install -y openssh-server openssh-client && \ apt-get clean && rm -rf /var/cache/apt/archives/* /var/lib/apt/lists/* @@ -46,9 +47,22 @@ ADD config/zoo.cfg /opt/hbase/conf/ ADD scripts/replace-hostname.sh /opt/ ADD scripts/entrypoint.sh /opt/ +ADD scripts/entrypoint-region.sh /opt/ ADD config/supervisor.conf /etc/supervisor/conf.d/ +RUN mkdir -p /root/.ssh +RUN chmod 700 /root/.ssh +COPY id_rsa.pub /root/.ssh/id_rsa.pub +ADD id_rsa /root/.ssh/id_rsa +RUN ls -la /root/.ssh +RUN cat /root/.ssh/id_rsa.pub > /root/.ssh/authorized_keys +RUN chmod 600 /root/.ssh/authorized_keys +RUN chmod 600 /root/.ssh/id_rsa + +RUN sed -i 's/# StrictHostKeyChecking ask/ StrictHostKeyChecking no/' /etc/ssh/ssh_config + +ADD scripts/wait-for-it.sh /opt/ # REST API EXPOSE 8080 # REST Web UI at :8085/rest.jsp diff --git a/docker-hbase/config/hbase-site.xml b/docker-hbase/config/hbase-site.xml index eac48dd5..6071aee0 100644 --- a/docker-hbase/config/hbase-site.xml +++ b/docker-hbase/config/hbase-site.xml @@ -3,21 +3,15 @@ hbase.zookeeper.quorum - hbase-hostname + zookeeper-hostname hbase.rootdir - file:////data/hbase + hdfs://hdfs-namenode-hostname:8020/hbase - - - hbase.master.info.bindAddress - hbase-hostname - - - - hbase.regionserver.info.bindAddress - hbase-hostname + + hbase.cluster.distributed + true diff --git a/docker-hbase/config/supervisor.conf b/docker-hbase/config/supervisor.conf index c7f7160a..9ef1391d 100644 --- a/docker-hbase/config/supervisor.conf +++ b/docker-hbase/config/supervisor.conf @@ -13,14 +13,3 @@ redirect_stderr=true autostart=true autorestart = true priority=20 - -[program:master] -command=/bin/sh -c "/opt/hbase/bin/hbase master start" -stdout_logfile=/dev/fd/1 -stdout_logfile_maxbytes=0 -redirect_stderr=true -startsecs = 10 -startretries = 10 -autostart=true -autorestart = true -priority=30 diff --git a/docker-hbase/config/supervisor.region.conf b/docker-hbase/config/supervisor.region.conf new file mode 100644 index 00000000..dde28945 --- /dev/null +++ b/docker-hbase/config/supervisor.region.conf @@ -0,0 +1,31 @@ +[program:thrift] +command=/bin/sh -c "/opt/hbase/bin/hbase thrift start" +stdout_logfile = /data/logs/hbase-region-thrift.log +redirect_stderr=true +autostart=true +autorestart = true +stopsecs = 10 +priority=10 + +[program:rest] +command=/bin/sh -c "/opt/hbase/bin/hbase rest start" +stdout_logfile = /data/logs/hbase-region-rest.log +redirect_stderr=true +autostart=true +autorestart = true +stopsecs = 10 +priority=20 + + +[program:shutdown] +command=/opt/entrypoint-region-cleanup.sh +stdout_logfile=/data/logs/hbase-region-shutdown.log +stdout_logfile_maxbytes=0 +redirect_stderr=true +startsecs = 10 +stopsecs = 10 +startretries = 1 +autostart=true +autorestart = false +priority=30 + diff --git a/docker-hbase/scripts/entrypoint-region-cleanup.sh b/docker-hbase/scripts/entrypoint-region-cleanup.sh new file mode 100755 index 00000000..87762d24 --- /dev/null +++ b/docker-hbase/scripts/entrypoint-region-cleanup.sh @@ -0,0 +1,30 @@ +#!/bin/bash + +# +# Copyright (c) 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License + +echo "Stop listener started" + +function clean_up { + echo "Stop listener called" + /opt/hbase/bin/hbase-daemon.sh stop regionserver + exit 0; + } +trap clean_up SIGTERM + +while true; do sleep 10; done + + + diff --git a/docker-hbase/scripts/entrypoint-region.sh b/docker-hbase/scripts/entrypoint-region.sh new file mode 100755 index 00000000..5ba0f610 --- /dev/null +++ b/docker-hbase/scripts/entrypoint-region.sh @@ -0,0 +1,24 @@ +#!/bin/bash + +# +# Copyright (c) 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License + +/opt/replace-hostname.sh + +mkdir -p /data/logs + +service ssh start +exec supervisord -n + diff --git a/docker-hbase/scripts/entrypoint.sh b/docker-hbase/scripts/entrypoint.sh index 217d11a3..95369fcb 100755 --- a/docker-hbase/scripts/entrypoint.sh +++ b/docker-hbase/scripts/entrypoint.sh @@ -19,4 +19,26 @@ mkdir -p /data/logs -supervisord -n +service ssh start + +RS_CONF=/opt/hbase/conf/regionservers + +if [ ! -f /etc/hosts.bak ]; then + cp /etc/hosts /etc/hosts.bak +else + cp /etc/hosts.bak /etc/hosts +fi + +rm $RS_CONF +for rs in $REGIONSERVERS; do + RS_HOSTNAME=$(ssh root@${rs} '(cat /etc/hostname)') + RS_HOSTS=$(ssh root@${rs} "(grep ${RS_HOSTNAME} /etc/hosts)") + echo $RS_HOSTNAME >> $RS_CONF + if ! grep ${RS_HOSTNAME} /etc/hosts; then + echo $RS_HOSTS >> /etc/hosts + fi +done + +/opt/hbase/bin/start-hbase.sh + +exec supervisord -n diff --git a/docker-hbase/scripts/replace-hostname.sh b/docker-hbase/scripts/replace-hostname.sh index 107162e3..dce8c281 100755 --- a/docker-hbase/scripts/replace-hostname.sh +++ b/docker-hbase/scripts/replace-hostname.sh @@ -23,7 +23,6 @@ declare -a filesToUpdate=( '/opt/hbase/conf/hbase-site.xml' - '/opt/hbase/conf/zoo.cfg' ) for file in "${filesToUpdate[@]}"; do @@ -31,8 +30,9 @@ for file in "${filesToUpdate[@]}"; do if [ ! -f "${file}.bak" ]; then cp "${file}" "${file}.back" fi - - sed "s/hbase-hostname/${HOSTNAME}/g" "${file}.back" > "${file}" + cp "${file}.back" "${file}" + sed -i "s/zookeeper-hostname/${ZOOKEEPER}/g" "${file}" + sed -i "s/hdfs-namenode-hostname/${HDFS_NAMENODE}/g" "${file}" done diff --git a/docker-hbase/scripts/wait-for-it.sh b/docker-hbase/scripts/wait-for-it.sh new file mode 100755 index 00000000..401a6f17 --- /dev/null +++ b/docker-hbase/scripts/wait-for-it.sh @@ -0,0 +1,177 @@ +#!/usr/bin/env bash +# Use this script to test if a given TCP host/port are available + +cmdname=$(basename $0) + +echoerr() { if [[ $QUIET -ne 1 ]]; then echo "$@" 1>&2; fi } + +usage() +{ + cat << USAGE >&2 +Usage: + $cmdname host:port [-s] [-t timeout] [-- command args] + -h HOST | --host=HOST Host or IP under test + -p PORT | --port=PORT TCP port under test + Alternatively, you specify the host and port as host:port + -s | --strict Only execute subcommand if the test succeeds + -q | --quiet Don't output any status messages + -t TIMEOUT | --timeout=TIMEOUT + Timeout in seconds, zero for no timeout + -- COMMAND ARGS Execute command with args after the test finishes +USAGE + exit 1 +} + +wait_for() +{ + if [[ $TIMEOUT -gt 0 ]]; then + echoerr "$cmdname: waiting $TIMEOUT seconds for $HOST:$PORT" + else + echoerr "$cmdname: waiting for $HOST:$PORT without a timeout" + fi + start_ts=$(date +%s) + while : + do + if [[ $ISBUSY -eq 1 ]]; then + nc -z $HOST $PORT + result=$? + else + (echo > /dev/tcp/$HOST/$PORT) >/dev/null 2>&1 + result=$? + fi + if [[ $result -eq 0 ]]; then + end_ts=$(date +%s) + echoerr "$cmdname: $HOST:$PORT is available after $((end_ts - start_ts)) seconds" + break + fi + sleep 1 + done + return $result +} + +wait_for_wrapper() +{ + # In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692 + if [[ $QUIET -eq 1 ]]; then + timeout $BUSYTIMEFLAG $TIMEOUT $0 --quiet --child --host=$HOST --port=$PORT --timeout=$TIMEOUT & + else + timeout $BUSYTIMEFLAG $TIMEOUT $0 --child --host=$HOST --port=$PORT --timeout=$TIMEOUT & + fi + PID=$! + trap "kill -INT -$PID" INT + wait $PID + RESULT=$? + if [[ $RESULT -ne 0 ]]; then + echoerr "$cmdname: timeout occurred after waiting $TIMEOUT seconds for $HOST:$PORT" + fi + return $RESULT +} + +# process arguments +while [[ $# -gt 0 ]] +do + case "$1" in + *:* ) + hostport=(${1//:/ }) + HOST=${hostport[0]} + PORT=${hostport[1]} + shift 1 + ;; + --child) + CHILD=1 + shift 1 + ;; + -q | --quiet) + QUIET=1 + shift 1 + ;; + -s | --strict) + STRICT=1 + shift 1 + ;; + -h) + HOST="$2" + if [[ $HOST == "" ]]; then break; fi + shift 2 + ;; + --host=*) + HOST="${1#*=}" + shift 1 + ;; + -p) + PORT="$2" + if [[ $PORT == "" ]]; then break; fi + shift 2 + ;; + --port=*) + PORT="${1#*=}" + shift 1 + ;; + -t) + TIMEOUT="$2" + if [[ $TIMEOUT == "" ]]; then break; fi + shift 2 + ;; + --timeout=*) + TIMEOUT="${1#*=}" + shift 1 + ;; + --) + shift + CLI="$@" + break + ;; + --help) + usage + ;; + *) + echoerr "Unknown argument: $1" + usage + ;; + esac +done + +if [[ "$HOST" == "" || "$PORT" == "" ]]; then + echoerr "Error: you need to provide a host and port to test." + usage +fi + +TIMEOUT=${TIMEOUT:-15} +STRICT=${STRICT:-0} +CHILD=${CHILD:-0} +QUIET=${QUIET:-0} + +# check to see if timeout is from busybox? +# check to see if timeout is from busybox? +TIMEOUT_PATH=$(realpath $(which timeout)) +if [[ $TIMEOUT_PATH =~ "busybox" ]]; then + ISBUSY=1 + BUSYTIMEFLAG="-t" +else + ISBUSY=0 + BUSYTIMEFLAG="" +fi + +if [[ $CHILD -gt 0 ]]; then + wait_for + RESULT=$? + exit $RESULT +else + if [[ $TIMEOUT -gt 0 ]]; then + wait_for_wrapper + RESULT=$? + else + wait_for + RESULT=$? + fi +fi + +if [[ $CLI != "" ]]; then + if [[ $RESULT -ne 0 && $STRICT -eq 1 ]]; then + echoerr "$cmdname: strict mode, refusing to execute subprocess" + exit $RESULT + fi + exec $CLI +else + exit $RESULT +fi diff --git a/setup-environment.example.sh b/setup-environment.example.sh index ae4bb3b4..686a93e4 100755 --- a/setup-environment.example.sh +++ b/setup-environment.example.sh @@ -16,9 +16,9 @@ export DOCKER_TAG=${DOCKER_TAG:-'latest'} -export ZOOKEEPER_KAFKA='kafka' +export ZOOKEEPER_KAFKA='zookeeper' export ZOOKEEPER_KAFKA_PORT='2181' -export ZOOKEEPER_HBASE='hbase' +export ZOOKEEPER_HBASE='zookeeper' export ZOOKEEPER_HBASE_PORT='2181' export POSTGRES='postgres' export POSTGRES_DB_REGULAR="iot" @@ -31,6 +31,7 @@ export POSTGRES_PORT='5432' export POSTGRES_USERNAME='postgres' export POSTGRES_PASSWORD='intel123' export KAFKA='kafka:9092' +export KAFKA_PORT='9092' export KAFKA_HEARTBEAT_TOPIC='heartbeat' export GEARPUMP='gearpump:8090' export BACKEND='backend:8080' From 9de42dd438f0b5fd11a09a440460084c5baa83d9 Mon Sep 17 00:00:00 2001 From: ogz Date: Tue, 6 Nov 2018 19:42:32 +0300 Subject: [PATCH 37/68] Add option DISABLE_RATE_LIMITS to Makefile for frontend --- Makefile | 8 ++++++++ docker-compose.yml | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 1c338fa7..3d888e28 100644 --- a/Makefile +++ b/Makefile @@ -33,6 +33,14 @@ DOCKER_COMPOSE_ARGS?= PULL_IMAGES?=false DEBUG?=false +# Variable to remove rate limits for endpoints in frontend +DISABLE_RATE_LIMITS?=false +RATE_LIMIT = '' +ifeq ($(DISABLE_RATE_LIMITS),true) +RATE_LIMIT = '--disable-rate-limits' +endif +export RATE_LIMIT + ifeq ($(DEBUG),true) CONTAINERS:=$(CONTAINERS) debugger endif diff --git a/docker-compose.yml b/docker-compose.yml index 443f8818..6dfd4218 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -175,7 +175,7 @@ services: - websocket-server - backend - kafka - command: ./wait-for-it.sh postgres:5432 -t 300 -- ./wait-for-it.sh redis:6379 -t 300 -- ./wait-for-it.sh kafka:9092 -t 300 -- ./scripts/wait-for-heartbeat.sh backend websocket-server -- ./scripts/docker-start.sh + command: ./wait-for-it.sh postgres:5432 -t 300 -- ./wait-for-it.sh redis:6379 -t 300 -- ./wait-for-it.sh kafka:9092 -t 300 -- ./scripts/wait-for-heartbeat.sh backend websocket-server -- ./scripts/docker-start.sh ${RATE_LIMIT} volumes: - ./data/keys:/app/keys environment: From f21cca67e51424c52e97a212372ee60acc57f000 Mon Sep 17 00:00:00 2001 From: Marcel Wagner Date: Sun, 11 Nov 2018 22:09:49 +0100 Subject: [PATCH 38/68] New OISP-Configuration independent of VCAP --- docker-compose.yml | 8 +++-- setup-environment.example.sh | 60 ++++++++++++++++++++++++++++++++++++ 2 files changed, 66 insertions(+), 2 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 6dfd4218..23728e44 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -198,8 +198,12 @@ services: command: ./wait-for-it.sh hbase-master:9090 -t 300 -- ./wait-for-it.sh kafka:9092 -t 300 -- make runjar working_dir: /app environment: - - VCAP_SERVICES=${VCAP_SERVICES} - - VCAP_APPLICATION=${VCAP_APPLICATION} + - OISP_BACKEND_CONFIG=${OISP_BACKEND_CONFIG} + - OISP_KAFKA_CONFIG=${OISP_KAFKA_CONFIG} + - OISP_ZOOKEEPER_CONFIG=${OISP_ZOOKEEPER_CONFIG} + - OISP_KERBEROS_CONFIG=${OISP_KERBEROS_CONFIG} + - OISP_HBASE_CONFIG=${OISP_HBASE_CONFIG} + - OISP_HADOOP_PROPERTIES=${OISP_HADOOP_PROPERTIES} hdfs-namenode: image: singularities/hadoop command: start-hadoop namenode diff --git a/setup-environment.example.sh b/setup-environment.example.sh index 686a93e4..bfe4b3ac 100755 --- a/setup-environment.example.sh +++ b/setup-environment.example.sh @@ -216,3 +216,63 @@ export VCAP_SERVICES='{ export VCAP_APPLICATION='{ "space_name": "local" }' + + +#VCAP variables are left overs from Cloud Foundry times. +#To allow a transition to Kubernetes, we will define both, VCAP and regular environment variables. +#The following is how the configuration looks in future +#The root variable for every service looks like OISP_SERVICE_CONFIG +#It contains a JSON object, starting with single quote, hash names with double quotes, environmental variables with double, then single quotes, e.g. "'$VAR'" +#References to other environmental variables which can be parsed as OBJECTS are done with "@@OISP_*_CONFIG" +#References to other environmental variables which can be parsed as MAPS (e.g. Property Maps) are done with "%%OISP_*_PROPERTIES". MAPS contain only pairs. + +export OISP_BACKEND_CONFIG=\ +'{ + "tsdbName": "hbase", + "kafkaConfig": "@@OISP_KAFKA_CONFIG", + "zookeeperConfig": "@@OISP_ZOOKEEPER_CONFIG", + "kerberosConfig": "@@OISP_KERBEROS_CONFIG", + "hbaseConfig": "@@OISP_HBASE_CONFIG" +}' + + +export OISP_KAFKA_CONFIG=\ +'{ + "uri": "'$KAFKA'", + "partitions": 1, + "replication": 1, + "timeoutMs": 10000, + "topicsObservations": "metrics", + "topicsRuleEngine": "rules-update", + "topicsHeartbeatName": "'$KAFKA_HEARTBEAT_TOPIC'", + "topicsHeartbeatInterval": 5000 +}' + +export OISP_ZOOKEEPER_CONFIG=\ +'{ + "zkCluster": "'${ZOOKEEPER_KAFKA}:${ZOOKEEPER_KAFKA_PORT}'", + "zkNode": "/tmp" +'} + +export OISP_KERBEROS_CONFIG=\ +'{ + "kdc": "localhost", + "kpassword": "pass", + "krealm": "realm", + "kuser": "user" +}' + +export OISP_HBASE_CONFIG=\ +'{ + "hadoopProperties": "%%OISP_HADOOP_PROPERTIES" +}' + +export OISP_HADOOP_PROPERTIES=\ +'{ + "hadoop.security.authentication": "simple", + "hadoop.security.authorization": "false", + "hbase.security.authentication": "simple", + "ha.zookeeper.quorum": "'$ZOOKEEPER_HBASE'", + "hbase.zookeeper.property.clientPort": "'$ZOOKEEPER_HBASE_PORT'", + "hbase.zookeeper.quorum": "'$ZOOKEEPER_HBASE'" +}' From 9c0376e988b58bada155acfd83b8b5b117a8ae5e Mon Sep 17 00:00:00 2001 From: Marcel Wagner Date: Sun, 25 Nov 2018 01:32:07 +0100 Subject: [PATCH 39/68] Created Data Sending subtests and necessary API in helper library * Added subtests which send and receive data * Extended API for GPS and attribute handling * Extended API for specifying additional target filters * Extended searchDataAdvanced to request and receive attributes * Tests for sending aggretated metrics for several components in one request --- tests/lib/helpers/data.js | 103 +++++- tests/oisp-tests.js | 109 +++--- tests/subtests/data-sending-tests.js | 515 +++++++++++++++++++++++++++ tests/subtests/promise-wrap.js | 57 ++- 4 files changed, 723 insertions(+), 61 deletions(-) create mode 100644 tests/subtests/data-sending-tests.js diff --git a/tests/lib/helpers/data.js b/tests/lib/helpers/data.js index 2cc40ed6..4f35e4f0 100644 --- a/tests/lib/helpers/data.js +++ b/tests/lib/helpers/data.js @@ -50,7 +50,6 @@ function getObservation(ts, userToken, accountId, deviceId, cid, cb) { api.data.searchData(data, function(err, response) { var found = false; - if (err) { cb(err); } else { @@ -73,32 +72,49 @@ function getObservation(ts, userToken, accountId, deviceId, cid, cb) { }); } -function searchData(from, userToken, accountId, deviceId, cid, cb) { + +function searchData(from, to, userToken, accountId, deviceId, cid, queryMeasureLocation, targetFilter, cb) { if (!cb) { throw "Callback required"; } + var metrics = [{ "id": cid }]; + + if (Array.isArray(cid)) { + metrics = cid.map((element) => ({"id": element})) + } + + if (targetFilter == undefined) { + targetFilter = {} + } + if (targetFilter.deviceList == undefined) { + targetFilter.deviceList = [deviceId]; + } else { + if (targetFilter.deviceList.indexOf(deviceId) == -1) { + targetFilter.deviceList.push(deviceId); + } + } + var data = { userToken: userToken, accountId: accountId, body: { from: from, - targetFilter: { - deviceList: [deviceId] - }, - metrics: [{ - id: cid - }] + targetFilter: targetFilter, + metrics: metrics, + queryMeasureLocation: queryMeasureLocation } }; + if (to !== undefined && to > 0) { + data.body.to = to; + } + api.data.searchData(data, function(err, response) { if (err) { cb(err) } else { - if (response.series) { - cb(null, response.series[0].points) - } + cb(null, response) } }); } @@ -111,7 +127,7 @@ function submitData(value, deviceToken, accountId, deviceId, cid, cb) { var ts = new Date().getTime(); var data = { - deviceToken: deviceToken, + userToken: deviceToken, deviceId: deviceId, body: { accountId: accountId, @@ -128,14 +144,55 @@ function submitData(value, deviceToken, accountId, deviceId, cid, cb) { if (err) { cb(err) } else { - if (response.series) { - cb(null, response.series[0].points) + if (response) { + cb(null, response) } } }); } -function searchDataAdvanced(from, userToken, accountId, deviceId, cid, cb) { +function submitDataList(valueList, deviceToken, accountId, deviceId, cidList, cb) { + if (!cb) { + throw "Callback required"; + } + var ts = new Date().getTime(); + + var data = { + userToken: deviceToken, + deviceId: deviceId, + body: { + accountId: accountId, + on: valueList[0].ts, + data: [] + } + } + + valueList.forEach(function(element){ + var toPush = { + componentId: cidList[element.component], + value: element.value.toString(), + on: element.ts + } + if (element.loc) { + toPush.loc = element.loc; + } + if (element.attributes !== undefined){ + toPush.attributes = element.attributes; + } + data.body.data.push(toPush); + }); + api.data.submitData(data, function(err, response) { + if (err) { + cb(err) + } else { + if (response) { + cb(null, response) + } + } + }); +} + +function searchDataAdvanced(from, to, userToken, accountId, deviceId, cidList, showMeasureLocation, returnedMeasureAttributes, aggregations, countOnly, cb) { if (!cb) { throw "Callback required"; } @@ -146,10 +203,21 @@ function searchDataAdvanced(from, userToken, accountId, deviceId, cid, cb) { body: { deviceIds: [deviceId], from: from, - componentIds: [cid] + showMeasureLocation: showMeasureLocation, + componentIds: cidList } }; + if (returnedMeasureAttributes !== undefined && returnedMeasureAttributes != []) { + data.body.returnedMeasureAttributes = returnedMeasureAttributes; + } + if (aggregations !== undefined && aggregations != "") { + data.body.aggregations = aggregations; + } + if (countOnly !== undefined) { + data.body.countOnly = countOnly; + } + api.data.searchDataAdvanced(data, function(err, response) { if (err) { cb(err) @@ -164,5 +232,6 @@ module.exports={ getObservation: getObservation, searchData: searchData, submitData: submitData, + submitDataList: submitDataList, searchDataAdvanced: searchDataAdvanced -} \ No newline at end of file +} diff --git a/tests/oisp-tests.js b/tests/oisp-tests.js index 71e4a175..5829c788 100644 --- a/tests/oisp-tests.js +++ b/tests/oisp-tests.js @@ -727,7 +727,8 @@ describe("Creating rules ... \n".bold, function() { }).timeout(20000); }); - + + describe("Sending observations and checking rules ...\n".bold, function() { it('Shall send observation and check rules', function(done) { @@ -839,51 +840,42 @@ describe("Sending observations and checking rules ...\n".bold, function() { }).catch( (err) => {done(err)}); }).timeout(30 * 1000); - it('Shall check observation', function(done) { - helpers.data.searchData(firstObservationTime, userToken, accountId, deviceId, componentId, function(err, data) { - if (err) { - done(new Error("Cannot get data: " + err)) - } - - if (data && data.length >= temperatureValues.length) { - for (var i = 0; i < data.length; i++) { - for (var j = 0; j < temperatureValues.length; j++) { - if (temperatureValues[j].ts == data[i].ts && temperatureValues[j].value == data[i].value) { - temperatureValues[j].ts = null; - } - } - } - - var err = ""; - for (var i = 0; i < temperatureValues.length; i++) { - if (temperatureValues[i].ts != null) { - err += "[" + i + "]=" + temperatureValues[i].value + " "; - } - } - if (err.length == 0) { - done(); - } else { - done(new Error("Got wrong data for " + err)) - } - } else { - done(new Error("Cannot get data")) - } - - }) - }).timeout(10000); - - it('Shall check observation in advance ways', function(done) { - helpers.data.searchDataAdvanced(firstObservationTime, userToken, accountId, deviceId, componentId, function(err, response) { + it('Shall check observation', function(done) { + helpers.data.searchData(firstObservationTime, -1, userToken, accountId, deviceId, componentId, false, {}, function(err, result) { if (err) { done(new Error("Cannot get data: " + err)) - }else { - assert.equal(response.data[0].deviceId, deviceId, 'advance search fail') - done() - } + } + + var data = {} + if (result && result.series && result.series.length == 1){ + data = result.series[0].points; + } + else { + done(new Error("Cannot get data.")); + } + if (data && data.length == temperatureValues.length) { + for (var j = 0; j < temperatureValues.length; j++) { + if (temperatureValues[j].ts == data[j].ts && temperatureValues[j].value == data[j].value) { + temperatureValues[j].ts = null; + } + } + } + + var err = ""; + for (var i = 0; i < temperatureValues.length; i++) { + if (temperatureValues[i].ts != null) { + err += "[" + i + "]=" + temperatureValues[i].value + " "; + } + } + if (err.length == 0) { + done(); + } else { + done(new Error("Got wrong data for " + err)) + } + + }) + }).timeout(10000); - }) - }).timeout(10000); - }); @@ -921,6 +913,36 @@ describe("Do statistics rule subtests ...".bold, }); +describe("Do data sending subtests ...".bold, + function() { + var test; + var descriptions = require("./subtests/data-sending-tests").descriptions; + it(descriptions.sendAggregatedDataPoints,function(done) { + test = require("./subtests/data-sending-tests").test(userToken, accountId, deviceId, deviceToken, cbManager); + test.sendAggregatedDataPoints(done); + }).timeout(10000); + it(descriptions.receiveAggregatedDataPoints,function(done) { + test.receiveAggregatedDataPoints(done); + }).timeout(10000); + it(descriptions.sendAggregatedMultipleDataPoints,function(done) { + test.sendAggregatedMultipleDataPoints(done); + }).timeout(10000); + it(descriptions.receiveAggregatedMultipleDataPoints,function(done) { + test.receiveAggregatedMultipleDataPoints(done); + }).timeout(10000); + it(descriptions.sendDataPointsWithLoc,function(done) { + test.sendDataPointsWithLoc(done); + }).timeout(10000); + it(descriptions.receiveDataPointsWithLoc,function(done) { + test.receiveDataPointsWithLoc(done); + }).timeout(10000); + it(descriptions.sendDataPointsWithAttributes,function(done) { + test.sendDataPointsWithAttributes(done); + }).timeout(10000); + it(descriptions.receiveDataPointsWithAttributes,function(done) { + test.receiveDataPointsWithAttributes(done); + }).timeout(10000); + }); describe("Geting and manage alerts ... \n".bold, function(){ @@ -1405,3 +1427,4 @@ describe("change password and delete receiver ... \n".bold, function(){ }) }) + diff --git a/tests/subtests/data-sending-tests.js b/tests/subtests/data-sending-tests.js new file mode 100644 index 00000000..04827191 --- /dev/null +++ b/tests/subtests/data-sending-tests.js @@ -0,0 +1,515 @@ +/** + * Copyright (c) 2018 Intel Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/*jshint esversion: 6 */ +/*jshint undef: true, unused: true */ +/* jshint node: true */ + +"use strict"; + +var test = function(userToken, accountId, deviceId, deviceToken, cbManager) { + var chai = require('chai'); + var assert = chai.assert; + var helpers = require("../lib/helpers"); + var componentNames = ["temperature-sensor-sdt", "humidity-sensor-sdt"]; + var componentTypes = ["temperature.v1.0", "humidity.v1.0"]; + var promtests = require('./promise-wrap'); + var rules = []; + var componentId = []; + var dataValues1Time; + var dataValues2Time; + var dataValues3Time; + var dataValues4Time; + const MIN_NUMBER = 0.0001; + + var dataValues1 = [ + [{ + component: 0, + value: 10.1, + ts: 1 + }], + [{ + component: 0, + value: 11.2, + ts: 2 + }, { + component: 0, + value: 12.3, + ts: 3 + }], + [{ + component: 0, + value: 13.4, + ts: 4 + }, + { + component: 0, + value: 14.5, + ts: 5 + }, + { + component: 0, + value: 15.6, + ts: 6 + } + ], + [{ + component: 0, + value: 16.7, + ts: 7 + }, + { + component: 0, + value: 17.8, + ts: 8 + }, + { + component: 0, + value: 18.9, + ts: 9 + }, + { + component: 0, + value: 20.0, + ts: 10 + } + ], + [{ + component: 0, + value: 21.1, + ts: 11 + }, + { + component: 0, + value: 22.2, + ts: 12 + }, + { + component: 0, + value: 23.3, + ts: 13 + }, + { + component: 0, + value: 24.4, + ts: 14 + }, + { + component: 0, + value: 25.5, + ts: 15 + } + + ] + ]; + + + var dataValues2 = [ + [{ + component: 0, + value: 10.1, + ts: 1000 + }], + [{ + component: 1, + value: 10, + ts: 1020 + }, + { + component: 0, + value: 12.3, + ts: 1030 + } + ], + [{ + component: 0, + value: 13.4, + ts: 1040 + }, + { + component: 1, + value: 20, + ts: 1050 + }, + { + component: 0, + value: 15.6, + ts: 1060 + } + ], + [{ + component: 1, + value: 30, + ts: 1070 + }, + { + component: 0, + value: 17.8, + ts: 1070 + }, + { + component: 0, + value: 18.9, + ts: 1090 + }, + { + component: 1, + value: 40, + ts: 1100 + } + ], + [{ + component: 1, + value: 50, + ts: 1170 + }] + ] + + var dataValues3 = [ + [{ + component: 0, + value: 10, + ts: 10000, + loc: [99.12345, 12.3456, 456.789] + }], + [{ + component: 0, + value: 11, + ts: 20000, + loc: [9.8765, 432.1, 09.876] + }], + [{ + component: 0, + value: 12, + ts: 30000, + }], + [{ + component: 0, + value: 13, + ts: 40000, + loc: [0.0, 0.0, 0.0] + }], + [{ + component: 0, + value: 14, + ts: 50000, + loc: [200.345, 300.21] + }] + ]; + + var dataValues4 = [ + [{ + component: 1, + value: 99, + ts: 100000, + loc: [1.2444, 10.987, 456.789], + attributes: { + "key1": "value1" + } + }], + [{ + component: 1, + value: 98, + ts: 200000, + attributes: { + "key1": "value1", + "key2": "value2", + "key3": "value3" + } + }], + [{ + component: 1, + value: 97, + ts: 300000, + attributes: { + "key3": "value1", + "key4": "value2" + } + }], + [{ + component: 1, + value: 96, + ts: 400000, + loc: [0.0, 0.0, 0.0] + }], + [{ + component: 1, + value: 95, + ts: 500000, + loc: [200.345, 300.21], + attributes: { + "key5": "key1" + } + }] + ]; + + var createObjectFromData = function(sample, sampleHeader){ + var o = {}; + sample.forEach(function(element, index) { + if (element != "") { + var key = sampleHeader[index]; + if (key === "Value") { + key = "value"; + } else if (key === "Timestamp") { + key = "ts"; + } + o[key] = element; + } + }) + return o; + } + + var locEqual = function(dataValue, element) { + if (dataValue.loc == undefined) { + if ((element.lat == undefined || element.lat === "") && (element.lat == undefined || element.lon === "") && (element.alt == undefined || element.alt === "")) { + return true; + } else { + return false; + } + } + if ((dataValue.loc[0] == undefined || (dataValue.loc[0] - Number(element.lat)) <= MIN_NUMBER) + && (dataValue.loc[1] == undefined || (dataValue.loc[1].toString() - Number(element.lon)) <= MIN_NUMBER) + && (dataValue.loc[2] == undefined || (dataValue.loc[2].toString() - Number(element.alt)) <= MIN_NUMBER)) { + return true; + } else { + return false; + } + } + +var attrEqual = function(dataValue, element) { + var result = true; + if (dataValue.attributes !== undefined) { + Object.keys(dataValue.attributes).forEach(function(el) { + if (element[el] != dataValue.attributes[el]) { + result = false; + } + }) + } + return result; +} + + var comparePoints = function(dataValues, points) { + var result = true; + var reason = ""; + points.forEach(function(element, index) { + if ((element.ts != dataValues[index].ts) || + (element.value != dataValues[index].value) || + !locEqual(dataValues[index], element) || + !attrEqual(dataValues[index], element)) { + result = false; + reason = "Point " + JSON.stringify(element) + " does not fit to expected value " + + JSON.stringify(dataValues[index]); + } + }); + if (result === true) return true; + else return reason; + } + + var flattenArray = function(array) { + var results = array.map(function(element) { + return element; + }); + results = results.reduce(function(acc, cur) { + return acc.concat(cur) + }) + return results; + } + + + //********************* Main Object *****************// + //---------------------------------------------------// + return { + "sendAggregatedDataPoints": function(done) { + //To be independent of main tests, own sensors, actuators, and commands have to be created + promtests.addComponent(componentNames[0], componentTypes[0], deviceToken, accountId, deviceId) + .then((id) => { + componentId[0] = id; + }) + .then((id) => promtests.addComponent(componentNames[1], componentTypes[1], deviceToken, accountId, deviceId)) + .then((id) => { + componentId[1] = id; + }) + .then(() => { + var proms = []; + dataValues1Time = 0; + dataValues1.forEach(function(element) { + proms.push(promtests.submitDataList(element, deviceToken, accountId, deviceId, componentId)) + }); + return Promise.all(proms); + }) + .then(() => { + done(); + }) + .catch((err) => { + done(err); + }); + }, + "receiveAggregatedDataPoints": function(done) { + var listOfExpectedResults = flattenArray(dataValues1); + promtests.searchData(dataValues1Time, -1, deviceToken, accountId, deviceId, componentId[0], false, {}) + .then((result) => { + if (result.series.length != 1) done("Wrong number of point series!"); + var comparisonResult = comparePoints(listOfExpectedResults, result.series[0].points); + if (comparisonResult === true) { + done(); + } else { + done(comparisonResult); + } + }) + .catch((err) => { + done(err); + }); + + }, + "sendAggregatedMultipleDataPoints": function(done) { + var proms = []; + dataValues2Time = dataValues2[0][0].ts; + dataValues2.forEach(function(element) { + proms.push(promtests.submitDataList(element, deviceToken, accountId, deviceId, componentId, {})); + }); + Promise.all(proms) + .then(() => { + done() + }) + .catch((err) => { + done(err); + }); + }, + "receiveAggregatedMultipleDataPoints": function(done) { + + var flattenedDataValues = flattenArray(dataValues2); + var listOfExpectedResults = []; + listOfExpectedResults[0] = flattenedDataValues.filter( + (element) => (element.component == 0) + ); + listOfExpectedResults[1] = flattenedDataValues.filter( + (element) => (element.component == 1) + ); + promtests.searchData(dataValues2Time, -1, deviceToken, accountId, deviceId, componentId, false, {}) + .then((result) => { + if (result.series.length != 2) done("Wrong number of point series!"); + var mapping = [0, 1]; + if (result.series[0].componentId !== componentId[0]) { + mapping = [1, 0]; + } + + var comparisonResult = comparePoints(listOfExpectedResults[mapping[0]], result.series[0].points) + if (comparisonResult !== true) { + done(comparisonResult); + } + var listOfExpectedResults1 = flattenedDataValues.filter( + (element) => (element.component == 1) + ); + comparisonResult = comparePoints(listOfExpectedResults[mapping[1]], result.series[1].points); + if (comparisonResult !== true) { + done(comparisonResult); + } else { + done(); + } + }) + .catch((err) => { + done(err); + }); + }, + "sendDataPointsWithLoc": function(done) { + var proms = []; + dataValues3Time = dataValues3[0][0].ts; + dataValues3.forEach(function(element) { + proms.push(promtests.submitDataList(element, deviceToken, accountId, deviceId, componentId)); + }); + Promise.all(proms) + .then(() => { + done() + }) + .catch((err) => { + done(err); + }); + }, + "receiveDataPointsWithLoc": function(done) { + var flattenedDataValues = flattenArray(dataValues3); + promtests.searchData(dataValues3Time, -1, deviceToken, accountId, deviceId, componentId[0], true, {}) + .then((result) => { + if (result.series.length != 1) done("Wrong number of point series!"); + var comparisonResult = comparePoints(flattenedDataValues, result.series[0].points); + if (comparisonResult !== true) { + done(comparisonResult); + } else { + done(); + } + }) + .catch((err) => { + done(err); + }); + + }, + "sendDataPointsWithAttributes": function(done) { + var proms = []; + dataValues4Time = dataValues4[0][0].ts; + dataValues4.forEach(function(element) { + proms.push(promtests.submitDataList(element, deviceToken, accountId, deviceId, componentId)); + }); + Promise.all(proms) + .then(() => { + done() + }) + .catch((err) => { + done(err); + }); + }, + "receiveDataPointsWithAttributes": function(done) { + var flattenedDataValues = flattenArray(dataValues4); + promtests.searchDataAdvanced(dataValues4Time, -1, deviceToken, accountId, deviceId, componentId, true, undefined, undefined, undefined) + .then((result) => { + if (result.data[0].components.length != 2) done("Wrong number of point series!"); + var compIndex = 0; + if (result.data[0].components[1].componentId == componentId[1]) { + compIndex = 1; + } + var resultObjects = result.data[0].components[compIndex].samples.map( + (element) => + createObjectFromData(element, result.data[0].components[compIndex].samplesHeader) + ); + var comparisonResult = comparePoints(flattenedDataValues, resultObjects); + if (comparisonResult !== true) { + done(comparisonResult); + } else { + done(); + } + }) + .catch((err) => { + done(err); + }); + + }, + "cleanup": function(done) { } + }; +}; + +var descriptions = { + "sendAggregatedDataPoints": "Shall send multiple datapoints for one component", + "receiveAggregatedDataPoints": "Shall receive multiple datapoints for one component", + "sendAggregatedMultipleDataPoints": "Shall send multiple datapoints for 2 components", + "receiveAggregatedMultipleDataPoints": "Shall receive multiple datapoints for 2 components", + "sendDataPointsWithLoc": "Sending data points with location metadata", + "receiveDataPointsWithLoc": "Receiving data points with location metadata", + "sendDataPointsWithAttributes": "Sending data points with attributes", + "receiveDataPointsWithAttributes": "Receiving data points with attributes", + "cleanup": "Cleanup components, commands, rules created for subtest" +}; + +module.exports = { + test: test, + descriptions: descriptions +}; \ No newline at end of file diff --git a/tests/subtests/promise-wrap.js b/tests/subtests/promise-wrap.js index 630298cc..14489c4b 100644 --- a/tests/subtests/promise-wrap.js +++ b/tests/subtests/promise-wrap.js @@ -196,6 +196,57 @@ var deleteRule = function(userToken, accountId, ruleId){ }); }; +var submitDataList = function(valueList, deviceToken, accountId, deviceId, cidList){ + return new Promise((resolve, reject) => { + helpers.data.submitDataList(valueList, deviceToken, accountId, deviceId, cidList, function(err, response){ + if (err) { + reject(err); + } else { + resolve(response); + } + }); + }); +} + + +var submitData = function(value, deviceToken, accountId, deviceId, cid){ + return new Promise((resolve, reject) => { + helpers.data.submitData(value, deviceToken, accountId, deviceId, cid, function(err, response){ + if (err) { + reject(err); + } else { + resolve(response); + } + }); + }); +} + + +var searchData = function(from, to, userToken, accountId, deviceId, cid, queryMeasureLocation, targetFilter){ + return new Promise((resolve, reject) => { + helpers.data.searchData(from, to, userToken, accountId, deviceId, cid, queryMeasureLocation, targetFilter, function(err, response){ + if (err) { + reject(err); + } else { + resolve(response); + } + }); + }); +} + + +var searchDataAdvanced = function(from, to, userToken, accountId, deviceId, cidList, showMeasureLocation, returnedMeasureAttributes, aggregations, countOnly){ + return new Promise((resolve, reject) => { + helpers.data.searchDataAdvanced(from, to, userToken, accountId, deviceId, cidList, showMeasureLocation, returnedMeasureAttributes, aggregations, countOnly, function(err, response){ + if (err) { + reject(err); + } else { + resolve(response); + } + }); + }); +} + module.exports = { checkObservations: checkObservations, addComponent: addComponent, @@ -204,5 +255,9 @@ module.exports = { createStatisticRule: createStatisticRule, createTbRule: createTbRule, deleteComponent: deleteComponent, - deleteRule: deleteRule + deleteRule: deleteRule, + submitDataList: submitDataList, + submitData: submitData, + searchData: searchData, + searchDataAdvanced: searchDataAdvanced }; From 4999b7d5282b694352ce20cc83b3798a55e0e240 Mon Sep 17 00:00:00 2001 From: Marcel Wagner Date: Mon, 3 Dec 2018 21:06:59 +0100 Subject: [PATCH 40/68] Added send-data subtest for *receiving selected attributes *rowcount only *aggregations *retrieve subset of data *send and retrieve max amount of data --- tests/lib/helpers/data.js | 3 + tests/oisp-tests.js | 20 ++- tests/subtests/data-sending-tests.js | 180 +++++++++++++++++++++++++-- 3 files changed, 194 insertions(+), 9 deletions(-) diff --git a/tests/lib/helpers/data.js b/tests/lib/helpers/data.js index 4f35e4f0..2d8fce84 100644 --- a/tests/lib/helpers/data.js +++ b/tests/lib/helpers/data.js @@ -217,6 +217,9 @@ function searchDataAdvanced(from, to, userToken, accountId, deviceId, cidList, s if (countOnly !== undefined) { data.body.countOnly = countOnly; } + if (to != undefined) { + data.body.to = to; + } api.data.searchDataAdvanced(data, function(err, response) { if (err) { diff --git a/tests/oisp-tests.js b/tests/oisp-tests.js index 5829c788..ac6559e5 100644 --- a/tests/oisp-tests.js +++ b/tests/oisp-tests.js @@ -728,7 +728,6 @@ describe("Creating rules ... \n".bold, function() { }).timeout(20000); }); - describe("Sending observations and checking rules ...\n".bold, function() { it('Shall send observation and check rules', function(done) { @@ -912,7 +911,6 @@ describe("Do statistics rule subtests ...".bold, }).timeout(10000); }); - describe("Do data sending subtests ...".bold, function() { var test; @@ -942,6 +940,24 @@ describe("Do data sending subtests ...".bold, it(descriptions.receiveDataPointsWithAttributes,function(done) { test.receiveDataPointsWithAttributes(done); }).timeout(10000); + it(descriptions.receiveDataPointsWithSelectedAttributes,function(done) { + test.receiveDataPointsWithSelectedAttributes(done); + }).timeout(10000); + it(descriptions.receiveDataPointsCount,function(done) { + test.receiveDataPointsCount(done); + }).timeout(10000); + it(descriptions.receiveAggregations,function(done) { + test.receiveAggregations(done); + }).timeout(10000); + it(descriptions.receiveSubset,function(done) { + test.receiveSubset(done); + }).timeout(10000); + it(descriptions.sendMaxAmountOfSamples,function(done) { + test.sendMaxAmountOfSamples(done); + }).timeout(10000); + it(descriptions.receiveMaxAmountOfSamples,function(done) { + test.receiveMaxAmountOfSamples(done); + }).timeout(10000); }); describe("Geting and manage alerts ... \n".bold, function(){ diff --git a/tests/subtests/data-sending-tests.js b/tests/subtests/data-sending-tests.js index 04827191..79b6f3c6 100644 --- a/tests/subtests/data-sending-tests.js +++ b/tests/subtests/data-sending-tests.js @@ -33,6 +33,7 @@ var test = function(userToken, accountId, deviceId, deviceToken, cbManager) { var dataValues3Time; var dataValues4Time; const MIN_NUMBER = 0.0001; + const MAX_SAMPLES = 1000; var dataValues1 = [ [{ @@ -255,6 +256,14 @@ var test = function(userToken, accountId, deviceId, deviceToken, cbManager) { }] ]; + var aggregation = { + MAX: 0, + MIN: 1, + COUNT: 2, + SUM: 3, + SUMOFSQUARES: 4 + } + var createObjectFromData = function(sample, sampleHeader){ var o = {}; sample.forEach(function(element, index) { @@ -271,7 +280,12 @@ var test = function(userToken, accountId, deviceId, deviceToken, cbManager) { return o; } - var locEqual = function(dataValue, element) { + var locEqual = function(dataValue, element, onlyExistingAttr) { + if (onlyExistingAttr) { + if (element.lat === undefined && element.long === undefined) { + return true; + } + } if (dataValue.loc == undefined) { if ((element.lat == undefined || element.lat === "") && (element.lat == undefined || element.lon === "") && (element.alt == undefined || element.alt === "")) { return true; @@ -288,26 +302,31 @@ var test = function(userToken, accountId, deviceId, deviceToken, cbManager) { } } -var attrEqual = function(dataValue, element) { +var attrEqual = function(dataValue, element, onlyExistingAttr) { var result = true; if (dataValue.attributes !== undefined) { Object.keys(dataValue.attributes).forEach(function(el) { - if (element[el] != dataValue.attributes[el]) { + if (!onlyExistingAttr && element[el] != dataValue.attributes[el]) { result = false; + } else { + if (element[el] !== undefined && element[el] != dataValue.attributes[el]){ + result = false; + } } }) } return result; } - var comparePoints = function(dataValues, points) { + var comparePoints = function(dataValues, points, onlyExistingAttributes) { var result = true; var reason = ""; + var onlyExistingAttr = onlyExistingAttributes == undefined ? false : onlyExistingAttributes; points.forEach(function(element, index) { if ((element.ts != dataValues[index].ts) || (element.value != dataValues[index].value) || - !locEqual(dataValues[index], element) || - !attrEqual(dataValues[index], element)) { + !locEqual(dataValues[index], element, onlyExistingAttr) || + !attrEqual(dataValues[index], element, onlyExistingAttr)) { result = false; reason = "Point " + JSON.stringify(element) + " does not fit to expected value " + JSON.stringify(dataValues[index]); @@ -327,6 +346,21 @@ var attrEqual = function(dataValue, element) { return results; } + var calcAggregationsPerComponent = function(flattenedArray){ + + return flattenedArray.reduce(function(acc, val) { + if (val.value > acc[val.component][aggregation.MAX]) { + acc[val.component][aggregation.MAX] = val.value; + } + if (val.value < acc[val.component][aggregation.MIN]) { + acc[val.component][aggregation.MIN] = val.value; + } + acc[val.component][aggregation.COUNT]++; + acc[val.component][aggregation.SUM] += val.value; + acc[val.component][aggregation.SUMOFSQUARES] += val.value * val.value; + return acc; + }, [[Number.MIN_VALUE, Number.MAX_VALUE, 0, 0, 0], [Number.MIN_VALUE, Number.MAX_VALUE, 0, 0, 0]]) + } //********************* Main Object *****************// //---------------------------------------------------// @@ -493,6 +527,132 @@ var attrEqual = function(dataValue, element) { }); }, + "receiveDataPointsWithSelectedAttributes": function(done) { + var flattenedDataValues = flattenArray(dataValues4); + promtests.searchDataAdvanced(dataValues4Time, -1, deviceToken, accountId, deviceId, [componentId[1]], false, ["key1"], undefined, undefined) + .then((result) => { + if (result.data[0].components.length != 1) done("Wrong number of point series!"); + var compIndex = 0; + + var resultObjects = result.data[0].components[compIndex].samples.map( + (element) => + createObjectFromData(element, result.data[0].components[compIndex].samplesHeader) + ); + var comparisonResult = comparePoints(flattenedDataValues, resultObjects, true); + if (comparisonResult !== true) { + done(comparisonResult); + } else { + done(); + } + }) + .catch((err) => { + done(err); + }); + }, + "receiveDataPointsCount": function(done) { + var expectedRowCount = flattenArray(dataValues1).length + + flattenArray(dataValues2).length + + flattenArray(dataValues3).length + + flattenArray(dataValues4).length; + promtests.searchDataAdvanced(dataValues1Time, -1, deviceToken, accountId, deviceId, componentId, false, undefined, undefined, true) + .then((result) => { + if (result.data[0].components.length != 2) done("Wrong number of point series!"); + + assert.equal(result.rowCount, expectedRowCount); + done(); + }) + .catch((err) => { + done(err); + }); + }, + "receiveAggregations": function(done) { + var aggr1 = calcAggregationsPerComponent(flattenArray(dataValues1)); + var aggr2 = calcAggregationsPerComponent(flattenArray(dataValues2)); + var aggr3 = calcAggregationsPerComponent(flattenArray(dataValues3)); + var aggr4 = calcAggregationsPerComponent(flattenArray(dataValues4)); + var allAggregation = [aggr1, aggr2, aggr3, aggr4].reduce(function(acc, val){ + [0, 1].forEach(function(index){ + if (acc[index][aggregation.MAX] < val[index][aggregation.MAX]) { + acc[index][aggregation.MAX] = val[index][aggregation.MAX]; + } + if (val[index][aggregation.COUNT] + && acc[index][aggregation.MIN] > val[index][aggregation.MIN]) { + acc[index][aggregation.MIN] = val[index][aggregation.MIN]; + } + acc[index][aggregation.COUNT] += val[index][aggregation.COUNT]; + acc[index][aggregation.SUM] += val[index][aggregation.SUM]; + acc[index][aggregation.SUMOFSQUARES] += val[index][aggregation.SUMOFSQUARES]; + }); + return acc; + },[[Number.MIN_VALUE, Number.MAX_VALUE, 0, 0, 0], [Number.MIN_VALUE, Number.MAX_VALUE, 0, 0, 0]]) + + promtests.searchDataAdvanced(dataValues1Time, -1, deviceToken, accountId, deviceId, componentId, false, undefined, "only", false) + .then((result) => { + if (result.data[0].components.length != 2) done("Wrong number of point series!"); + var mapping = [0, 1]; + if (result.data[0].components[0].componentId !== componentId[0]) { + mapping = [1, 0]; + } + [0, 1].forEach(function(index){ + assert.equal(allAggregation[index][aggregation.MAX], result.data[0].components[mapping[index]].max); + assert.equal(allAggregation[index][aggregation.MIN], result.data[0].components[mapping[index]].min); + assert.equal(allAggregation[index][aggregation.COUNT], result.data[0].components[mapping[index]].count); + assert.equal(allAggregation[index][aggregation.SUM], result.data[0].components[mapping[index]].sum); + assert.equal(allAggregation[index][aggregation.SUMOFSQUARES], result.data[0].components[mapping[index]].sumOfSquares); + }) + done(); + }) + .catch((err) => { + done(err); + }); + }, + "receiveSubset": function(done) { + promtests.searchDataAdvanced(dataValues2Time + 30, dataValues2Time + 60, deviceToken, accountId, deviceId, [componentId[0]], false, undefined, undefined, false) + .then((result) => { + if (result.data[0].components.length != 1) done("Wrong number of point series!"); + assert.equal(result.rowCount, 3); + done(); + }) + .catch((err) => { + done(err); + }); + }, + "sendMaxAmountOfSamples": function(done) { + var dataList = []; + + for (var i = 0; i < MAX_SAMPLES; i++){ + var ts = (i + 1) * 1000000 + var obj = { + component:0, + ts: ts, + value: i + } + dataList.push(obj); + } + promtests.submitDataList(dataList, deviceToken, accountId, deviceId, componentId) + .then(() => { + done() + }) + .catch((err) => { + done(err); + }); + }, + "receiveMaxAmountOfSamples": function(done) { + promtests.searchDataAdvanced(1000000, MAX_SAMPLES * 1000000, deviceToken, accountId, deviceId, [componentId[0]], false, undefined, undefined, false) + .then((result) => { + if (result.data[0].components.length != 1) done("Wrong number of point series!"); + assert.equal(result.rowCount, MAX_SAMPLES); + var samples = result.data[0].components[0].samples; + samples.forEach(function(element, i){ + assert.equal(element[1], i); + assert.equal(element[0], (i + 1) * 1000000); + }) + done(); + }) + .catch((err) => { + done(err); + }); + }, "cleanup": function(done) { } }; }; @@ -505,7 +665,13 @@ var descriptions = { "sendDataPointsWithLoc": "Sending data points with location metadata", "receiveDataPointsWithLoc": "Receiving data points with location metadata", "sendDataPointsWithAttributes": "Sending data points with attributes", - "receiveDataPointsWithAttributes": "Receiving data points with attributes", + "receiveDataPointsWithAttributes": "Receiving data points with all attributes", + "receiveDataPointsCount": "Receive only count of points", + "receiveAggregations": "Receive Max, Min, etc. aggregations", + "receiveSubset": "receive subset based on timestamps", + "sendMaxAmountOfSamples": "Send maximal allowed samples per request", + "receiveMaxAmountOfSamples": "Receive maximal allowed samples per request", + "receiveDataPointsWithSelectedAttributes": "Receiving data points with selected attributes", "cleanup": "Cleanup components, commands, rules created for subtest" }; From 22c14198d56c536fdbdc2a7d87fa47fb2860e015 Mon Sep 17 00:00:00 2001 From: Ali Rasim Kocal Date: Tue, 11 Dec 2018 16:54:46 +0100 Subject: [PATCH 41/68] Added vegeta to debugger container --- docker-debugger/Dockerfile | 6 ++++-- setup-environment.example.sh | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/docker-debugger/Dockerfile b/docker-debugger/Dockerfile index aabaab1e..892d099d 100644 --- a/docker-debugger/Dockerfile +++ b/docker-debugger/Dockerfile @@ -10,14 +10,16 @@ RUN apt-get update RUN apt-get install -y kubectl RUN npm install --global n -RUN n 6 - +RUN n 8 RUN pip3 install locustio oisp ENV OISP_REMOTE https://github.com/Open-IoT-Service-Platform/platform-launcher.git RUN mkdir /home/platform-launcher +RUN wget https://github.com/tsenart/vegeta/releases/download/cli%2Fv12.1.0/vegeta-12.1.0-linux-amd64.tar.gz && \ + tar -xzvf vegeta-12.1.0-linux-amd64.tar.gz && \ + cp vegeta /usr/bin/vegeta WORKDIR /home/platform-launcher diff --git a/setup-environment.example.sh b/setup-environment.example.sh index bfe4b3ac..6e18bf8f 100755 --- a/setup-environment.example.sh +++ b/setup-environment.example.sh @@ -252,7 +252,7 @@ export OISP_ZOOKEEPER_CONFIG=\ '{ "zkCluster": "'${ZOOKEEPER_KAFKA}:${ZOOKEEPER_KAFKA_PORT}'", "zkNode": "/tmp" -'} +}' export OISP_KERBEROS_CONFIG=\ '{ From f1ea3f75f26c3df4ecbe6c1a22911dedee2ea76f Mon Sep 17 00:00:00 2001 From: ogz Date: Wed, 21 Nov 2018 19:24:56 +0300 Subject: [PATCH 42/68] OISP-Frontend configuration independent of VCAP --- docker-compose.yml | 18 ++++++-- setup-environment.example.sh | 81 ++++++++++++++++++++++++++++++++++++ 2 files changed, 95 insertions(+), 4 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 23728e44..17cbb1c4 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -175,12 +175,22 @@ services: - websocket-server - backend - kafka - command: ./wait-for-it.sh postgres:5432 -t 300 -- ./wait-for-it.sh redis:6379 -t 300 -- ./wait-for-it.sh kafka:9092 -t 300 -- ./scripts/wait-for-heartbeat.sh backend websocket-server -- ./scripts/docker-start.sh ${RATE_LIMIT} + command: ./wait-for-it.sh postgres:5432 -t 300 -- ./wait-for-it.sh redis:6379 -t 300 -- ./wait-for-it.sh kafka:9092 -t 300 -- ./scripts/wait-for-heartbeat.sh backend websocket-server -- ./scripts/docker-start.sh volumes: - ./data/keys:/app/keys environment: - - VCAP_SERVICES=${VCAP_SERVICES} - - VCAP_APPLICATION=${VCAP_APPLICATION} + - OISP_FRONTEND_CONFIG=${OISP_FRONTEND_CONFIG} + - OISP_POSTGRES_CONFIG=${OISP_POSTGRES_CONFIG} + - OISP_REDIS_CONFIG=${OISP_REDIS_CONFIG} + - OISP_KAFKA_CONFIG=${OISP_KAFKA_CONFIG} + - OISP_SMTP_CONFIG=${OISP_SMTP_CONFIG} + - OISP_DASHBOARDSECURITY_CONFIG=${OISP_DASHBOARDSECURITY_CONFIG} + - OISP_GATEWAY_CONFIG=${OISP_GATEWAY_CONFIG} + - OISP_BACKENDHOST_CONFIG=${OISP_BACKENDHOST_CONFIG} + - OISP_WEBSOCKETUSER_CONFIG=${OISP_WEBSOCKETUSER_CONFIG} + - OISP_RULEENGINE_CONFIG=${OISP_RULEENGINE_CONFIG} + - OISP_MAIL_CONFIG=${OISP_MAIL_CONFIG} + - RATE_LIMIT=${RATE_LIMIT} - TEST=${TEST} - NODE_ENV=local backend: @@ -239,4 +249,4 @@ services: - 2181:2181 environment: ZOO_MY_ID: 1 - ZOO_SERVERS: server.1=0.0.0.0:2888:3888 \ No newline at end of file + ZOO_SERVERS: server.1=0.0.0.0:2888:3888 diff --git a/setup-environment.example.sh b/setup-environment.example.sh index 6e18bf8f..341b2dcd 100755 --- a/setup-environment.example.sh +++ b/setup-environment.example.sh @@ -235,6 +235,19 @@ export OISP_BACKEND_CONFIG=\ "hbaseConfig": "@@OISP_HBASE_CONFIG" }' +export OISP_FRONTEND_CONFIG=\ +'{ + "postgresConfig": "@@OISP_POSTGRES_CONFIG", + "redisConfig": "@@OISP_REDIS_CONFIG", + "kafkaConfig": "@@OISP_KAFKA_CONFIG", + "smtpConfig": "@@OISP_SMTP_CONFIG", + "dashboardSecurityConfig": "@@OISP_DASHBOARDSECURITY_CONFIG", + "backendHostConfig": "@@OISP_BACKENDHOST_CONFIG", + "websocketUserConfig": "@@OISP_WEBSOCKETUSER_CONFIG", + "mailConfig": "@@OISP_MAIL_CONFIG", + "ruleEngineConfig": "@@OISP_RULEENGINE_CONFIG", + "gatewayConfig": "@@OISP_GATEWAY_CONFIG" +}' export OISP_KAFKA_CONFIG=\ '{ @@ -276,3 +289,71 @@ export OISP_HADOOP_PROPERTIES=\ "hbase.zookeeper.property.clientPort": "'$ZOOKEEPER_HBASE_PORT'", "hbase.zookeeper.quorum": "'$ZOOKEEPER_HBASE'" }' + +export OISP_POSTGRES_CONFIG=\ +'{ + "dbname": "'$POSTGRES_DB'", + "hostname": "'$POSTGRES'", + "password": "'$POSTGRES_PASSWORD'", + "port": "'$POSTGRES_PORT'", + "username": "'$POSTGRES_USERNAME'" +}' + +export OISP_REDIS_CONFIG=\ +'{ + "hostname": "'$REDIS'", + "port": "'$REDIS_PORT'", + "password": "" +}' + +export OISP_SMTP_CONFIG=\ +'{ + "host": "'$SMTP_HOST'", + "port": "'$SMTP_PORT'", + "protocol": "smtp", + "username": "'$SMTP_USERNAME'", + "password": "'$SMTP_PASSWORD'" +}' + +export OISP_DASHBOARDSECURITY_CONFIG=\ +'{ + "captcha_test_code": "s09ef48213s8fe8fw8rwer5489wr8wd5", + "interaction_token_permision_key": "password", + "private_pem_path": "./keys/private.pem", + "public_pem_path": "./keys/public.pem" +}' + +export OISP_GATEWAY_CONFIG=\ +'{ + "password": "7d501829lhbl1or0bb1784462c97bcad6", + "username": "gateway@intel.com" +}' + +export OISP_BACKENDHOST_CONFIG=\ +'{ + "deviceMeasurementTableName": "LOCAL-AA-BACKEND_DEVICE_MEASUREMENT", + "host": "http://'$BACKEND'" +}' + +export OISP_MAIL_CONFIG=\ +'{ + "sender": "test.sender@streammyiot.com" +}' + +export OISP_RULEENGINE_CONFIG=\ +'{ + "password": "7d501829lhbl1or0bb1784462c97bcad6", + "username": "rule_engine@intel.com" +}' + +export OISP_GATEWAY_CONFIG=\ +'{ + "password": "7d501829lhbl1or0bb1784462c97bcad6", + "username": "gateway@intel.com" +}' + +export OISP_WEBSOCKETUSER_CONFIG=\ +'{ + "password": "AgjY7H3eXztyA6AmNjI2rte446gdttgattwRRF61", + "username": "api_actuator" +}' \ No newline at end of file From ac20e5385c43bebc5799b3127b69820652dff387 Mon Sep 17 00:00:00 2001 From: ogz Date: Thu, 22 Nov 2018 20:13:24 +0300 Subject: [PATCH 43/68] OISP-Websocket-Server configuration independent of VCAP --- docker-compose.yml | 9 +++++---- setup-environment.example.sh | 10 +++++++++- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 17cbb1c4..0579a482 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -155,8 +155,10 @@ services: - ./data/keys:/app/keys working_dir: /app environment: - - VCAP_SERVICES=${VCAP_SERVICES} - - VCAP_APPLICATION=${VCAP_APPLICATION} + - OISP_WEBSOCKET_SERVER_CONFIG=${OISP_WEBSOCKET_SERVER_CONFIG} + - OISP_POSTGRES_CONFIG=${OISP_POSTGRES_CONFIG} + - OISP_KAFKA_CONFIG=${OISP_KAFKA_CONFIG} + - OISP_WEBSOCKETUSER_CONFIG=${OISP_WEBSOCKETUSER_CONFIG} - TEST=${TEST} - NODE_ENV=local command: ./wait-for-it.sh postgres:5432 -t 300 -- ./wait-for-it.sh kafka:9092 -t 300 -- ./scripts/docker-start.sh @@ -175,7 +177,7 @@ services: - websocket-server - backend - kafka - command: ./wait-for-it.sh postgres:5432 -t 300 -- ./wait-for-it.sh redis:6379 -t 300 -- ./wait-for-it.sh kafka:9092 -t 300 -- ./scripts/wait-for-heartbeat.sh backend websocket-server -- ./scripts/docker-start.sh + command: ./wait-for-it.sh postgres:5432 -t 300 -- ./wait-for-it.sh redis:6379 -t 300 -- ./wait-for-it.sh kafka:9092 -t 300 -- ./scripts/wait-for-heartbeat.sh backend websocket-server -- ./scripts/docker-start.sh ${RATE_LIMIT} volumes: - ./data/keys:/app/keys environment: @@ -190,7 +192,6 @@ services: - OISP_WEBSOCKETUSER_CONFIG=${OISP_WEBSOCKETUSER_CONFIG} - OISP_RULEENGINE_CONFIG=${OISP_RULEENGINE_CONFIG} - OISP_MAIL_CONFIG=${OISP_MAIL_CONFIG} - - RATE_LIMIT=${RATE_LIMIT} - TEST=${TEST} - NODE_ENV=local backend: diff --git a/setup-environment.example.sh b/setup-environment.example.sh index 341b2dcd..1ea94123 100755 --- a/setup-environment.example.sh +++ b/setup-environment.example.sh @@ -249,6 +249,14 @@ export OISP_FRONTEND_CONFIG=\ "gatewayConfig": "@@OISP_GATEWAY_CONFIG" }' +export OISP_WEBSOCKET_SERVER_CONFIG=\ +'{ + "postgresConfig": "@@OISP_POSTGRES_CONFIG", + "websocketUserConfig": "@@OISP_WEBSOCKETUSER_CONFIG", + "kafkaConfig": "@@OISP_KAFKA_CONFIG", + "uri": "" +}' + export OISP_KAFKA_CONFIG=\ '{ "uri": "'$KAFKA'", @@ -356,4 +364,4 @@ export OISP_WEBSOCKETUSER_CONFIG=\ '{ "password": "AgjY7H3eXztyA6AmNjI2rte446gdttgattwRRF61", "username": "api_actuator" -}' \ No newline at end of file +}' From 3376f6c91f06e76a1ef9c99697a64a2e0341bf4d Mon Sep 17 00:00:00 2001 From: Ali Rasim Kocal Date: Tue, 11 Dec 2018 17:22:15 +0100 Subject: [PATCH 44/68] Added jaeger --- docker-compose.yml | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/docker-compose.yml b/docker-compose.yml index 0579a482..11b197d1 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -251,3 +251,15 @@ services: environment: ZOO_MY_ID: 1 ZOO_SERVERS: server.1=0.0.0.0:2888:3888 + jaeger: + image: jaegertracing/all-in-one:1.8 + ports: + - 5775:5775/udp + - 6831:6831/udp + - 6832:6832/udp + - 5778:5778 + - 16686:16686 + - 14268:14268 + - 9411:9411 + environment: + COLLECTOR_ZIPKIN_HTTP_PORT: 9411 From ec0195e065bd4978867a7afaac0ebf71f6d35a56 Mon Sep 17 00:00:00 2001 From: Ali Rasim Kocal Date: Mon, 17 Dec 2018 11:41:03 +0100 Subject: [PATCH 45/68] tests/Makefile: Adapt k8s tests After Jaeger agent container has been added to k8s (for monitoring) there are multiple containers in the frontend, and it is necessary to specify that the test command should run in 'dashboard' Signed-off-by: Ali Rasim Kocal --- tests/Makefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/Makefile b/tests/Makefile index 7e4fb1bd..2a6de416 100644 --- a/tests/Makefile +++ b/tests/Makefile @@ -60,10 +60,10 @@ ifeq ($(TESTING_PLATFORM), docker) @. ../setup-environment.sh && docker exec -i "$${COMPOSE_PROJECT_NAME}_frontend_1" node /app/admin addUser $(USERNAME) $(PASSWORD) $(ROLE) > /dev/null else @$(call msg,"Resetting database ..."); - @kubectl exec -i $(DASHBOARD_POD) -- node /app/admin resetDB + @kubectl exec -i $(DASHBOARD_POD) -c dashboard -- node /app/admin resetDB @$(call msg,"Adding a user for testing ..."); - @kubectl exec -i $(DASHBOARD_POD) -- node /app/admin addUser $(USERNAME) $(PASSWORD) $(ROLE) > /dev/null + @kubectl exec -i $(DASHBOARD_POD) -c dashboard -- node /app/admin addUser $(USERNAME) $(PASSWORD) $(ROLE) > /dev/null endif @$(call msg,"Starting the e2e testing ..."); From 4b9ea9773f2c9b575f8e5f765c19aa716b075ccc Mon Sep 17 00:00:00 2001 From: Marcel Wagner Date: Sat, 8 Dec 2018 19:09:53 +0100 Subject: [PATCH 46/68] Added openTSDB container with automated creation of needed HBase tables --- docker-compose.yml | 18 +++ docker-hbase/Dockerfile | 2 +- docker-opentsdb/Dockerfile | 38 +++++ docker-opentsdb/conf/hbase-site.xml | 10 ++ docker-opentsdb/scripts/tsdb-startup.sh | 31 +++++ docker-opentsdb/scripts/wait-for-it.sh | 177 ++++++++++++++++++++++++ setup-environment.example.sh | 17 ++- 7 files changed, 288 insertions(+), 5 deletions(-) create mode 100644 docker-opentsdb/Dockerfile create mode 100644 docker-opentsdb/conf/hbase-site.xml create mode 100644 docker-opentsdb/scripts/tsdb-startup.sh create mode 100755 docker-opentsdb/scripts/wait-for-it.sh diff --git a/docker-compose.yml b/docker-compose.yml index 11b197d1..62121e0e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -263,3 +263,21 @@ services: - 9411:9411 environment: COLLECTOR_ZIPKIN_HTTP_PORT: 9411 + opentsdb: + image: oisp/opentsdb:${DOCKER_TAG} + build: + context: ./docker-opentsdb + labels: + - oisp=true + - ois.git_commit=${GIT_COMMIT_OPENTSDB} + ports: + - 4242:4242 + depends_on: + - hbase-master + - zookeeper + command: /opt/wait-for-it.sh hbase-master:9090 -t 300 -- /opt/wait-for-it.sh zookeeper:2181 -t 300 -- /opt/tsdb-startup.sh + environment: + - OISP_ZOOKEEPER_CONFIG=${OISP_ZOOKEEPER_CONFIG} + - OISP_OPENTSDB_CONFIG=${OISP_OPENTSDB_CONFIG} + - OISP_HADOOP_PROPERTIES=${OISP_HADOOP_PROPERTIES} + diff --git a/docker-hbase/Dockerfile b/docker-hbase/Dockerfile index efc4b6b3..680f5134 100644 --- a/docker-hbase/Dockerfile +++ b/docker-hbase/Dockerfile @@ -76,4 +76,4 @@ EXPOSE 2181 # HBase Master web UI at :16010/master-status; ZK at :16010/zk.jsp EXPOSE 16010 -ENTRYPOINT ["/opt/entrypoint.sh"] +#ENTRYPOINT ["/opt/entrypoint.sh"] diff --git a/docker-opentsdb/Dockerfile b/docker-opentsdb/Dockerfile new file mode 100644 index 00000000..91009fac --- /dev/null +++ b/docker-opentsdb/Dockerfile @@ -0,0 +1,38 @@ +# +# Copyright (c) 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM oisp/hbase:latest + +# avoid debconf and initrd +ENV DEBIAN_FRONTEND noninteractive +ENV INITRD No + +ENV OPENTSDB_VERSION 2.3.1 + +ENV JAVA_HOME /usr/lib/jvm/java-8-openjdk-amd64 + +RUN apt-get update -qq && apt-get upgrade -y +RUN apt-get install -y --no-install-recommends wget jq supervisor openjdk-8-jdk-headless dh-autoreconf +RUN echo https://github.com/OpenTSDB/opentsdb/releases/download/v${OPENTSDB_VERSION}/opentsdb-${OPENTSDB_VERSION}.tar.gz +RUN cd /opt && \ + wget -q https://github.com/OpenTSDB/opentsdb/releases/download/v${OPENTSDB_VERSION}/opentsdb-${OPENTSDB_VERSION}.tar.gz +RUN cd /opt && tar xzf opentsdb-${OPENTSDB_VERSION}.tar.gz && \ + mv opentsdb-${OPENTSDB_VERSION} /opt/opentsdb +RUN cd /opt/opentsdb && ./bootstrap && ./configure --prefix=/usr && make install +ADD ./scripts/* /opt/ +ADD ./conf/hbase-site.xml /opt/hbase/conf/ +RUN chmod 550 /opt/*.sh + +EXPOSE 4242 diff --git a/docker-opentsdb/conf/hbase-site.xml b/docker-opentsdb/conf/hbase-site.xml new file mode 100644 index 00000000..d159eb66 --- /dev/null +++ b/docker-opentsdb/conf/hbase-site.xml @@ -0,0 +1,10 @@ + + + + + hbase.zookeeper.quorum + zookeeper-hostname + + + + diff --git a/docker-opentsdb/scripts/tsdb-startup.sh b/docker-opentsdb/scripts/tsdb-startup.sh new file mode 100644 index 00000000..b015b33c --- /dev/null +++ b/docker-opentsdb/scripts/tsdb-startup.sh @@ -0,0 +1,31 @@ +#!/bin/bash +# Copyright (c) 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +echo "Starting $0" +ZKCLUSTER=$(echo ${OISP_ZOOKEEPER_CONFIG} | jq '.zkCluster' | tr -d '"') +ZKBASE=$(echo ${OISP_HADOOP_PROPERTIES} | jq '.["hbase.rootdir"]' | tr -d '"') +PORT=$(echo ${OISP_OPENTSDB_CONFIG} | jq '.port') +echo "Found ZKCLUSTER=${ZKCLUSTER} ZKBASE=${ZKBASE} PORT=${PORT}" +export ZOOKEEPER=${ZKCLUSTER} +export HDFS_NAMENODE=not-needed # not needed +/opt/replace-hostname.sh +export HBASE_HOME=/opt/hbase +export COMPRESSION=NONE +echo status | /opt/hbase/bin/hbase shell +sleep 10 +/opt/opentsdb/src/create_table.sh +echo "Result $?" +echo tsdb tsd $@ --auto-metric --cachedir=/tmp --staticroot=/usr/share/opentsdb/static/ --zkquorum=${ZKCLUSTER} --zkbasedir=${ZKBASE} --port=${PORT} +tsdb tsd $@ --auto-metric --cachedir=/tmp --staticroot=/usr/share/opentsdb/static/ --zkquorum=${ZKCLUSTER} --zkbasedir=${ZKBASE} --port=${PORT} diff --git a/docker-opentsdb/scripts/wait-for-it.sh b/docker-opentsdb/scripts/wait-for-it.sh new file mode 100755 index 00000000..401a6f17 --- /dev/null +++ b/docker-opentsdb/scripts/wait-for-it.sh @@ -0,0 +1,177 @@ +#!/usr/bin/env bash +# Use this script to test if a given TCP host/port are available + +cmdname=$(basename $0) + +echoerr() { if [[ $QUIET -ne 1 ]]; then echo "$@" 1>&2; fi } + +usage() +{ + cat << USAGE >&2 +Usage: + $cmdname host:port [-s] [-t timeout] [-- command args] + -h HOST | --host=HOST Host or IP under test + -p PORT | --port=PORT TCP port under test + Alternatively, you specify the host and port as host:port + -s | --strict Only execute subcommand if the test succeeds + -q | --quiet Don't output any status messages + -t TIMEOUT | --timeout=TIMEOUT + Timeout in seconds, zero for no timeout + -- COMMAND ARGS Execute command with args after the test finishes +USAGE + exit 1 +} + +wait_for() +{ + if [[ $TIMEOUT -gt 0 ]]; then + echoerr "$cmdname: waiting $TIMEOUT seconds for $HOST:$PORT" + else + echoerr "$cmdname: waiting for $HOST:$PORT without a timeout" + fi + start_ts=$(date +%s) + while : + do + if [[ $ISBUSY -eq 1 ]]; then + nc -z $HOST $PORT + result=$? + else + (echo > /dev/tcp/$HOST/$PORT) >/dev/null 2>&1 + result=$? + fi + if [[ $result -eq 0 ]]; then + end_ts=$(date +%s) + echoerr "$cmdname: $HOST:$PORT is available after $((end_ts - start_ts)) seconds" + break + fi + sleep 1 + done + return $result +} + +wait_for_wrapper() +{ + # In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692 + if [[ $QUIET -eq 1 ]]; then + timeout $BUSYTIMEFLAG $TIMEOUT $0 --quiet --child --host=$HOST --port=$PORT --timeout=$TIMEOUT & + else + timeout $BUSYTIMEFLAG $TIMEOUT $0 --child --host=$HOST --port=$PORT --timeout=$TIMEOUT & + fi + PID=$! + trap "kill -INT -$PID" INT + wait $PID + RESULT=$? + if [[ $RESULT -ne 0 ]]; then + echoerr "$cmdname: timeout occurred after waiting $TIMEOUT seconds for $HOST:$PORT" + fi + return $RESULT +} + +# process arguments +while [[ $# -gt 0 ]] +do + case "$1" in + *:* ) + hostport=(${1//:/ }) + HOST=${hostport[0]} + PORT=${hostport[1]} + shift 1 + ;; + --child) + CHILD=1 + shift 1 + ;; + -q | --quiet) + QUIET=1 + shift 1 + ;; + -s | --strict) + STRICT=1 + shift 1 + ;; + -h) + HOST="$2" + if [[ $HOST == "" ]]; then break; fi + shift 2 + ;; + --host=*) + HOST="${1#*=}" + shift 1 + ;; + -p) + PORT="$2" + if [[ $PORT == "" ]]; then break; fi + shift 2 + ;; + --port=*) + PORT="${1#*=}" + shift 1 + ;; + -t) + TIMEOUT="$2" + if [[ $TIMEOUT == "" ]]; then break; fi + shift 2 + ;; + --timeout=*) + TIMEOUT="${1#*=}" + shift 1 + ;; + --) + shift + CLI="$@" + break + ;; + --help) + usage + ;; + *) + echoerr "Unknown argument: $1" + usage + ;; + esac +done + +if [[ "$HOST" == "" || "$PORT" == "" ]]; then + echoerr "Error: you need to provide a host and port to test." + usage +fi + +TIMEOUT=${TIMEOUT:-15} +STRICT=${STRICT:-0} +CHILD=${CHILD:-0} +QUIET=${QUIET:-0} + +# check to see if timeout is from busybox? +# check to see if timeout is from busybox? +TIMEOUT_PATH=$(realpath $(which timeout)) +if [[ $TIMEOUT_PATH =~ "busybox" ]]; then + ISBUSY=1 + BUSYTIMEFLAG="-t" +else + ISBUSY=0 + BUSYTIMEFLAG="" +fi + +if [[ $CHILD -gt 0 ]]; then + wait_for + RESULT=$? + exit $RESULT +else + if [[ $TIMEOUT -gt 0 ]]; then + wait_for_wrapper + RESULT=$? + else + wait_for + RESULT=$? + fi +fi + +if [[ $CLI != "" ]]; then + if [[ $RESULT -ne 0 && $STRICT -eq 1 ]]; then + echoerr "$cmdname: strict mode, refusing to execute subprocess" + exit $RESULT + fi + exec $CLI +else + exit $RESULT +fi diff --git a/setup-environment.example.sh b/setup-environment.example.sh index 1ea94123..a37f4d62 100755 --- a/setup-environment.example.sh +++ b/setup-environment.example.sh @@ -20,6 +20,8 @@ export ZOOKEEPER_KAFKA='zookeeper' export ZOOKEEPER_KAFKA_PORT='2181' export ZOOKEEPER_HBASE='zookeeper' export ZOOKEEPER_HBASE_PORT='2181' + +HBASE_ROOTDIR=/hbase export POSTGRES='postgres' export POSTGRES_DB_REGULAR="iot" export POSTGRES_DB=${POSTGRES_DB_REGULAR} # postgres needs always regular DB name for initialization. It automatically initiates the test DB as well. @@ -39,6 +41,8 @@ export NGINX='nginx' export REDIS='redis' export REDIS_PORT='6379' +OPENTSDB_PORT=4242 + export SMTP_HOST="${SMTP_HOST:-auth.smtp.1and1.co.uk}" export SMTP_PORT="${SMTP_PORT:-587}" export SMTP_USERNAME="${SMTP_USERNAME:-test.sender@streammyiot.com}" @@ -271,9 +275,8 @@ export OISP_KAFKA_CONFIG=\ export OISP_ZOOKEEPER_CONFIG=\ '{ - "zkCluster": "'${ZOOKEEPER_KAFKA}:${ZOOKEEPER_KAFKA_PORT}'", - "zkNode": "/tmp" -}' + "zkCluster": "'${ZOOKEEPER_KAFKA}:${ZOOKEEPER_KAFKA_PORT}'" +'} export OISP_KERBEROS_CONFIG=\ '{ @@ -295,7 +298,13 @@ export OISP_HADOOP_PROPERTIES=\ "hbase.security.authentication": "simple", "ha.zookeeper.quorum": "'$ZOOKEEPER_HBASE'", "hbase.zookeeper.property.clientPort": "'$ZOOKEEPER_HBASE_PORT'", - "hbase.zookeeper.quorum": "'$ZOOKEEPER_HBASE'" + "hbase.zookeeper.quorum": "'$ZOOKEEPER_HBASE'", + "hbase.rootdir": "'$HBASE_ROOTDIR'" +}' + +export OISP_OPENTSDB_CONFIG=\ +'{ + "port": '$OPENTSDB_PORT' }' export OISP_POSTGRES_CONFIG=\ From 43234ea91070679b84eb272ed0741eb575bef703 Mon Sep 17 00:00:00 2001 From: Marcel Wagner Date: Tue, 11 Dec 2018 23:03:28 +0100 Subject: [PATCH 47/68] E2E-test/data-sending-subtests * Fixed receive tests: comparison of number of returned points with expected number of points * Comparison of float values is testing abs difference larger than MIN_NUMBER rather than equality * Added configuration for opentsdb and moved the testing timestamps up to 100000000000 as base * Implemented cleanup of subtest * Added 2s between sending and receiving * setup-environment.example.sh: Added description how to configure tsdbName in OISP_BACKEND_CONFIG --- docker-compose.yml | 2 +- docker-opentsdb/scripts/tsdb-startup.sh | 29 +++++- setup-environment.example.sh | 21 ++++- tests/oisp-tests.js | 19 +++- tests/subtests/data-sending-tests.js | 119 +++++++++++++----------- 5 files changed, 132 insertions(+), 58 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 62121e0e..1b2cea49 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -215,6 +215,7 @@ services: - OISP_KERBEROS_CONFIG=${OISP_KERBEROS_CONFIG} - OISP_HBASE_CONFIG=${OISP_HBASE_CONFIG} - OISP_HADOOP_PROPERTIES=${OISP_HADOOP_PROPERTIES} + - OISP_TSDB_PROPERTIES=${OISP_TSDB_PROPERTIES} hdfs-namenode: image: singularities/hadoop command: start-hadoop namenode @@ -280,4 +281,3 @@ services: - OISP_ZOOKEEPER_CONFIG=${OISP_ZOOKEEPER_CONFIG} - OISP_OPENTSDB_CONFIG=${OISP_OPENTSDB_CONFIG} - OISP_HADOOP_PROPERTIES=${OISP_HADOOP_PROPERTIES} - diff --git a/docker-opentsdb/scripts/tsdb-startup.sh b/docker-opentsdb/scripts/tsdb-startup.sh index b015b33c..99db6bb5 100644 --- a/docker-opentsdb/scripts/tsdb-startup.sh +++ b/docker-opentsdb/scripts/tsdb-startup.sh @@ -13,6 +13,25 @@ # See the License for the specific language governing permissions and # limitations under the License. +CONFIG_FILE=/etc/opentsdb/opentsdb.conf +CONFIG_SRC=/opt/opentsdb/src/opentsdb.conf +mkdir -p /etc/opentsdb +cp ${CONFIG_SRC} ${CONFIG_FILE} + +function replace_config { + config_line=$1 + replacement="${config_line} = ${2//\//\\/}" + line_start=$3 + echo sed -i "s/${line_start}.*${config_line} =.*$/${replacement}/" ${CONFIG_FILE} + sed -i "s/${line_start}.*${config_line} =.*$/${replacement}/" ${CONFIG_FILE} +} + +function add_config { + config_line=$1 + replacement="${config_line} = $2" + echo ${replacement} >> ${CONFIG_FILE} +} + echo "Starting $0" ZKCLUSTER=$(echo ${OISP_ZOOKEEPER_CONFIG} | jq '.zkCluster' | tr -d '"') ZKBASE=$(echo ${OISP_HADOOP_PROPERTIES} | jq '.["hbase.rootdir"]' | tr -d '"') @@ -25,7 +44,13 @@ export HBASE_HOME=/opt/hbase export COMPRESSION=NONE echo status | /opt/hbase/bin/hbase shell sleep 10 +# update the config file +replace_config "tsd.storage.hbase.zk_basedir" ${ZKBASE} "#" +replace_config "tsd.storage.hbase.zk_quorum" ${ZKCLUSTER} "#" +replace_config "tsd.network.port" ${PORT} "" +replace_config "tsd.core.auto_create_metrics" "true" "#" +add_config "tsd.http.request.enable_chunked" "true" + /opt/opentsdb/src/create_table.sh echo "Result $?" -echo tsdb tsd $@ --auto-metric --cachedir=/tmp --staticroot=/usr/share/opentsdb/static/ --zkquorum=${ZKCLUSTER} --zkbasedir=${ZKBASE} --port=${PORT} -tsdb tsd $@ --auto-metric --cachedir=/tmp --staticroot=/usr/share/opentsdb/static/ --zkquorum=${ZKCLUSTER} --zkbasedir=${ZKBASE} --port=${PORT} +tsdb tsd $@ --auto-metric --cachedir=/tmp --staticroot=/usr/share/opentsdb/static/ diff --git a/setup-environment.example.sh b/setup-environment.example.sh index a37f4d62..5369e8ed 100755 --- a/setup-environment.example.sh +++ b/setup-environment.example.sh @@ -21,7 +21,7 @@ export ZOOKEEPER_KAFKA_PORT='2181' export ZOOKEEPER_HBASE='zookeeper' export ZOOKEEPER_HBASE_PORT='2181' -HBASE_ROOTDIR=/hbase +HBASE_ROOTDIR='/hbase' export POSTGRES='postgres' export POSTGRES_DB_REGULAR="iot" export POSTGRES_DB=${POSTGRES_DB_REGULAR} # postgres needs always regular DB name for initialization. It automatically initiates the test DB as well. @@ -41,6 +41,7 @@ export NGINX='nginx' export REDIS='redis' export REDIS_PORT='6379' +OPENTSDB_URI='opentsdb' OPENTSDB_PORT=4242 export SMTP_HOST="${SMTP_HOST:-auth.smtp.1and1.co.uk}" @@ -50,6 +51,14 @@ export SMTP_PASSWORD="${SMTP_PASSWORD:-xxxxx}" export COMPOSE_PROJECT_NAME="oisp" + +OPENTSDB_PROPERTIES='{ + "uri": "'$OPENTSDB_URI'", + "port": "'$OPENTSDB_PORT'" +}' + + +#################### VCAP is deprecated, will be removed in next version export VCAP_SERVICES='{ "postgresql93": [{ "credentials": { @@ -220,7 +229,7 @@ export VCAP_SERVICES='{ export VCAP_APPLICATION='{ "space_name": "local" }' - +######################################### #VCAP variables are left overs from Cloud Foundry times. #To allow a transition to Kubernetes, we will define both, VCAP and regular environment variables. @@ -230,9 +239,17 @@ export VCAP_APPLICATION='{ #References to other environmental variables which can be parsed as OBJECTS are done with "@@OISP_*_CONFIG" #References to other environmental variables which can be parsed as MAPS (e.g. Property Maps) are done with "%%OISP_*_PROPERTIES". MAPS contain only pairs. +export OISP_TSDB_PROPERTIES='{}' #=${OPENTSDB_PROPERTIES} for openTSDB + +# tsdbName can be +# hbase +# then no tsdbProperties should be an empty class {} +# opentsdb +# then the OISP_TSDB_PROPERTIES should contain url and port for openTSDB service export OISP_BACKEND_CONFIG=\ '{ "tsdbName": "hbase", + "tsdbProperties": "%%OISP_TSDB_PROPERTIES", "kafkaConfig": "@@OISP_KAFKA_CONFIG", "zookeeperConfig": "@@OISP_ZOOKEEPER_CONFIG", "kerberosConfig": "@@OISP_KERBEROS_CONFIG", diff --git a/tests/oisp-tests.js b/tests/oisp-tests.js index ac6559e5..c0796e95 100644 --- a/tests/oisp-tests.js +++ b/tests/oisp-tests.js @@ -919,24 +919,36 @@ describe("Do data sending subtests ...".bold, test = require("./subtests/data-sending-tests").test(userToken, accountId, deviceId, deviceToken, cbManager); test.sendAggregatedDataPoints(done); }).timeout(10000); + it(descriptions.waitForBackendSynchronization,function(done) { + test.waitForBackendSynchronization(done); + }).timeout(10000); it(descriptions.receiveAggregatedDataPoints,function(done) { test.receiveAggregatedDataPoints(done); }).timeout(10000); it(descriptions.sendAggregatedMultipleDataPoints,function(done) { test.sendAggregatedMultipleDataPoints(done); }).timeout(10000); + it(descriptions.waitForBackendSynchronization,function(done) { + test.waitForBackendSynchronization(done); + }).timeout(10000); it(descriptions.receiveAggregatedMultipleDataPoints,function(done) { test.receiveAggregatedMultipleDataPoints(done); }).timeout(10000); it(descriptions.sendDataPointsWithLoc,function(done) { test.sendDataPointsWithLoc(done); }).timeout(10000); + it(descriptions.waitForBackendSynchronization,function(done) { + test.waitForBackendSynchronization(done); + }).timeout(10000); it(descriptions.receiveDataPointsWithLoc,function(done) { test.receiveDataPointsWithLoc(done); }).timeout(10000); it(descriptions.sendDataPointsWithAttributes,function(done) { test.sendDataPointsWithAttributes(done); }).timeout(10000); + it(descriptions.waitForBackendSynchronization,function(done) { + test.waitForBackendSynchronization(done); + }).timeout(10000); it(descriptions.receiveDataPointsWithAttributes,function(done) { test.receiveDataPointsWithAttributes(done); }).timeout(10000); @@ -955,9 +967,15 @@ describe("Do data sending subtests ...".bold, it(descriptions.sendMaxAmountOfSamples,function(done) { test.sendMaxAmountOfSamples(done); }).timeout(10000); + it(descriptions.waitForBackendSynchronization,function(done) { + test.waitForBackendSynchronization(done); + }).timeout(10000); it(descriptions.receiveMaxAmountOfSamples,function(done) { test.receiveMaxAmountOfSamples(done); }).timeout(10000); + it(descriptions.cleanup,function(done) { + test.cleanup(done); + }).timeout(10000); }); describe("Geting and manage alerts ... \n".bold, function(){ @@ -1443,4 +1461,3 @@ describe("change password and delete receiver ... \n".bold, function(){ }) }) - diff --git a/tests/subtests/data-sending-tests.js b/tests/subtests/data-sending-tests.js index 79b6f3c6..d5e94883 100644 --- a/tests/subtests/data-sending-tests.js +++ b/tests/subtests/data-sending-tests.js @@ -34,83 +34,84 @@ var test = function(userToken, accountId, deviceId, deviceToken, cbManager) { var dataValues4Time; const MIN_NUMBER = 0.0001; const MAX_SAMPLES = 1000; + const BASE_TIMESTAMP = 1000000000000 var dataValues1 = [ [{ component: 0, value: 10.1, - ts: 1 + ts: 1 + BASE_TIMESTAMP }], [{ component: 0, value: 11.2, - ts: 2 + ts: 2 + BASE_TIMESTAMP }, { component: 0, value: 12.3, - ts: 3 + ts: 3 + BASE_TIMESTAMP }], [{ component: 0, value: 13.4, - ts: 4 + ts: 4 + BASE_TIMESTAMP }, { component: 0, value: 14.5, - ts: 5 + ts: 5 + BASE_TIMESTAMP }, { component: 0, value: 15.6, - ts: 6 + ts: 6 + BASE_TIMESTAMP } ], [{ component: 0, value: 16.7, - ts: 7 + ts: 7 + BASE_TIMESTAMP }, { component: 0, value: 17.8, - ts: 8 + ts: 8 + BASE_TIMESTAMP }, { component: 0, value: 18.9, - ts: 9 + ts: 9 + BASE_TIMESTAMP }, { component: 0, value: 20.0, - ts: 10 + ts: 10 + BASE_TIMESTAMP } ], [{ component: 0, value: 21.1, - ts: 11 + ts: 11 + BASE_TIMESTAMP }, { component: 0, value: 22.2, - ts: 12 + ts: 12 + BASE_TIMESTAMP }, { component: 0, value: 23.3, - ts: 13 + ts: 13 + BASE_TIMESTAMP }, { component: 0, value: 24.4, - ts: 14 + ts: 14 + BASE_TIMESTAMP }, { component: 0, value: 25.5, - ts: 15 + ts: 15 + BASE_TIMESTAMP } ] @@ -121,60 +122,60 @@ var test = function(userToken, accountId, deviceId, deviceToken, cbManager) { [{ component: 0, value: 10.1, - ts: 1000 + ts: 1000 + BASE_TIMESTAMP }], [{ component: 1, value: 10, - ts: 1020 + ts: 1020 + BASE_TIMESTAMP }, { component: 0, value: 12.3, - ts: 1030 + ts: 1030 + BASE_TIMESTAMP } ], [{ component: 0, value: 13.4, - ts: 1040 + ts: 1040 + BASE_TIMESTAMP }, { component: 1, value: 20, - ts: 1050 + ts: 1050 + BASE_TIMESTAMP }, { component: 0, value: 15.6, - ts: 1060 + ts: 1060 + BASE_TIMESTAMP } ], [{ component: 1, value: 30, - ts: 1070 + ts: 1070 + BASE_TIMESTAMP }, { component: 0, value: 17.8, - ts: 1070 + ts: 1070 + BASE_TIMESTAMP }, { component: 0, value: 18.9, - ts: 1090 + ts: 1090 + BASE_TIMESTAMP }, { component: 1, value: 40, - ts: 1100 + ts: 1100 + BASE_TIMESTAMP } ], [{ component: 1, value: 50, - ts: 1170 + ts: 1170 + BASE_TIMESTAMP }] ] @@ -182,30 +183,30 @@ var test = function(userToken, accountId, deviceId, deviceToken, cbManager) { [{ component: 0, value: 10, - ts: 10000, + ts: 10000 + BASE_TIMESTAMP, loc: [99.12345, 12.3456, 456.789] }], [{ component: 0, value: 11, - ts: 20000, + ts: 20000 + BASE_TIMESTAMP, loc: [9.8765, 432.1, 09.876] }], [{ component: 0, value: 12, - ts: 30000, + ts: 30000 + BASE_TIMESTAMP, }], [{ component: 0, value: 13, - ts: 40000, + ts: 40000 + BASE_TIMESTAMP, loc: [0.0, 0.0, 0.0] }], [{ component: 0, value: 14, - ts: 50000, + ts: 50000 + BASE_TIMESTAMP, loc: [200.345, 300.21] }] ]; @@ -214,7 +215,7 @@ var test = function(userToken, accountId, deviceId, deviceToken, cbManager) { [{ component: 1, value: 99, - ts: 100000, + ts: 100000 + BASE_TIMESTAMP, loc: [1.2444, 10.987, 456.789], attributes: { "key1": "value1" @@ -223,7 +224,7 @@ var test = function(userToken, accountId, deviceId, deviceToken, cbManager) { [{ component: 1, value: 98, - ts: 200000, + ts: 200000 + BASE_TIMESTAMP, attributes: { "key1": "value1", "key2": "value2", @@ -233,7 +234,7 @@ var test = function(userToken, accountId, deviceId, deviceToken, cbManager) { [{ component: 1, value: 97, - ts: 300000, + ts: 300000 + BASE_TIMESTAMP, attributes: { "key3": "value1", "key4": "value2" @@ -242,13 +243,13 @@ var test = function(userToken, accountId, deviceId, deviceToken, cbManager) { [{ component: 1, value: 96, - ts: 400000, + ts: 400000 + BASE_TIMESTAMP, loc: [0.0, 0.0, 0.0] }], [{ component: 1, value: 95, - ts: 500000, + ts: 500000 + BASE_TIMESTAMP, loc: [200.345, 300.21], attributes: { "key5": "key1" @@ -293,9 +294,9 @@ var test = function(userToken, accountId, deviceId, deviceToken, cbManager) { return false; } } - if ((dataValue.loc[0] == undefined || (dataValue.loc[0] - Number(element.lat)) <= MIN_NUMBER) - && (dataValue.loc[1] == undefined || (dataValue.loc[1].toString() - Number(element.lon)) <= MIN_NUMBER) - && (dataValue.loc[2] == undefined || (dataValue.loc[2].toString() - Number(element.alt)) <= MIN_NUMBER)) { + if ((dataValue.loc[0] == undefined || (Math.abs(dataValue.loc[0] - Number(element.lat))) <= MIN_NUMBER) + && (dataValue.loc[1] == undefined || (Math.abs(dataValue.loc[1].toString() - Number(element.lon))) <= MIN_NUMBER) + && (dataValue.loc[2] == undefined || (Math.abs(dataValue.loc[2].toString() - Number(element.alt))) <= MIN_NUMBER)) { return true; } else { return false; @@ -322,9 +323,12 @@ var attrEqual = function(dataValue, element, onlyExistingAttr) { var result = true; var reason = ""; var onlyExistingAttr = onlyExistingAttributes == undefined ? false : onlyExistingAttributes; + if (points.length != dataValues.length) { + return "Wrong number of returned points"; + } points.forEach(function(element, index) { if ((element.ts != dataValues[index].ts) || - (element.value != dataValues[index].value) || + ((Math.abs(element.value - dataValues[index].value)) > MIN_NUMBER) || !locEqual(dataValues[index], element, onlyExistingAttr) || !attrEqual(dataValues[index], element, onlyExistingAttr)) { result = false; @@ -377,7 +381,7 @@ var attrEqual = function(dataValue, element, onlyExistingAttr) { }) .then(() => { var proms = []; - dataValues1Time = 0; + dataValues1Time = 0 + BASE_TIMESTAMP; dataValues1.forEach(function(element) { proms.push(promtests.submitDataList(element, deviceToken, accountId, deviceId, componentId)) }); @@ -405,7 +409,6 @@ var attrEqual = function(dataValue, element, onlyExistingAttr) { .catch((err) => { done(err); }); - }, "sendAggregatedMultipleDataPoints": function(done) { var proms = []; @@ -594,11 +597,11 @@ var attrEqual = function(dataValue, element, onlyExistingAttr) { mapping = [1, 0]; } [0, 1].forEach(function(index){ - assert.equal(allAggregation[index][aggregation.MAX], result.data[0].components[mapping[index]].max); - assert.equal(allAggregation[index][aggregation.MIN], result.data[0].components[mapping[index]].min); - assert.equal(allAggregation[index][aggregation.COUNT], result.data[0].components[mapping[index]].count); - assert.equal(allAggregation[index][aggregation.SUM], result.data[0].components[mapping[index]].sum); - assert.equal(allAggregation[index][aggregation.SUMOFSQUARES], result.data[0].components[mapping[index]].sumOfSquares); + assert.closeTo(allAggregation[index][aggregation.MAX], result.data[0].components[mapping[index]].max, MIN_NUMBER); + assert.closeTo(allAggregation[index][aggregation.MIN], result.data[0].components[mapping[index]].min, MIN_NUMBER); + assert.closeTo(allAggregation[index][aggregation.COUNT], result.data[0].components[mapping[index]].count, MIN_NUMBER); + assert.closeTo(allAggregation[index][aggregation.SUM], result.data[0].components[mapping[index]].sum, MIN_NUMBER); + assert.closeTo(allAggregation[index][aggregation.SUMOFSQUARES], result.data[0].components[mapping[index]].sumOfSquares, MIN_NUMBER); }) done(); }) @@ -621,7 +624,7 @@ var attrEqual = function(dataValue, element, onlyExistingAttr) { var dataList = []; for (var i = 0; i < MAX_SAMPLES; i++){ - var ts = (i + 1) * 1000000 + var ts = (i + 1) * 1000000 + BASE_TIMESTAMP var obj = { component:0, ts: ts, @@ -638,14 +641,14 @@ var attrEqual = function(dataValue, element, onlyExistingAttr) { }); }, "receiveMaxAmountOfSamples": function(done) { - promtests.searchDataAdvanced(1000000, MAX_SAMPLES * 1000000, deviceToken, accountId, deviceId, [componentId[0]], false, undefined, undefined, false) + promtests.searchDataAdvanced(BASE_TIMESTAMP + 1000000, MAX_SAMPLES * 1000000 + BASE_TIMESTAMP, deviceToken, accountId, deviceId, [componentId[0]], false, undefined, undefined, false) .then((result) => { if (result.data[0].components.length != 1) done("Wrong number of point series!"); assert.equal(result.rowCount, MAX_SAMPLES); var samples = result.data[0].components[0].samples; samples.forEach(function(element, i){ assert.equal(element[1], i); - assert.equal(element[0], (i + 1) * 1000000); + assert.equal(element[0], (i + 1) * 1000000 + BASE_TIMESTAMP); }) done(); }) @@ -653,8 +656,19 @@ var attrEqual = function(dataValue, element, onlyExistingAttr) { done(err); }); }, - "cleanup": function(done) { } - }; + "waitForBackendSynchronization": function(done) { + setTimeout(done, 2000); + + }, + "cleanup": function(done) { + promtests.deleteComponent(deviceToken, accountId, deviceId, componentId[0]) + .then(() => promtests.deleteComponent(deviceToken, accountId, deviceId, componentId[1])) + .then(() => {done()}) + .catch((err) => { + done(err); + }); + } + }; }; var descriptions = { @@ -672,6 +686,7 @@ var descriptions = { "sendMaxAmountOfSamples": "Send maximal allowed samples per request", "receiveMaxAmountOfSamples": "Receive maximal allowed samples per request", "receiveDataPointsWithSelectedAttributes": "Receiving data points with selected attributes", + "waitForBackendSynchronization": "Waiting maximal tolerable time backend needs to flush so that points are available", "cleanup": "Cleanup components, commands, rules created for subtest" }; From 9b7ed4fd4c745cec21909a620a612267bc490016 Mon Sep 17 00:00:00 2001 From: Marcel Wagner Date: Wed, 19 Dec 2018 22:27:56 +0100 Subject: [PATCH 48/68] docker-compose.yml: Added OISP_TSDB_PROPERTIES environment variable for opentsdb container --- docker-compose.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/docker-compose.yml b/docker-compose.yml index 1b2cea49..3780bdac 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -281,3 +281,4 @@ services: - OISP_ZOOKEEPER_CONFIG=${OISP_ZOOKEEPER_CONFIG} - OISP_OPENTSDB_CONFIG=${OISP_OPENTSDB_CONFIG} - OISP_HADOOP_PROPERTIES=${OISP_HADOOP_PROPERTIES} + - OISP_TSDB_PROPERTIES=${OISP_TSDB_PROPERTIES} From e6d5582d9152f3f8bedd3eaef09a2723d1e3a655 Mon Sep 17 00:00:00 2001 From: Marcel Wagner Date: Mon, 31 Dec 2018 16:10:31 +0100 Subject: [PATCH 49/68] E2E-tests/data-sending-tests: Added more subtests for the new data submsission implementation in frontend * Sending partially correct data, i.e. with wrong component IDs. Expected behaviour is that correct data is submitted and PartialDataProcessed(404) error is returned * Correcting 0 prefix in test data which is not tolerated with nodejs v8 * Correcting some indentation issues * Added test for Admin sending data and User trying to send data and get rejected * Added test to send data to device not belonging to account (Should get NotAuthorized (401)) * Added tests on checking whether device can send data to wrong device (should get NotAuthorized 401 --- tests/Makefile | 7 +- tests/oisp-tests.js | 25 +- tests/subtests/data-sending-tests.js | 352 ++++++++++++++++++++++----- tests/subtests/promise-wrap.js | 132 +++++++++- 4 files changed, 456 insertions(+), 60 deletions(-) diff --git a/tests/Makefile b/tests/Makefile index 2a6de416..cc9efb24 100644 --- a/tests/Makefile +++ b/tests/Makefile @@ -26,6 +26,10 @@ USERNAME = "oisp@testing.com" PASSWORD = "OispTesting1" ROLE = "admin" +USERNAME2 = "oisp2@testing.com" +PASSWORD2 = "OispTesting2" +ROLE2 = "admin" + ifeq ($(TESTING_PLATFORM), docker) KAFKA_BROKER="localhost:9092" else @@ -58,6 +62,7 @@ ifeq ($(TESTING_PLATFORM), docker) @$(call msg,"Adding a user for testing ..."); @. ../setup-environment.sh && docker exec -i "$${COMPOSE_PROJECT_NAME}_frontend_1" node /app/admin addUser $(USERNAME) $(PASSWORD) $(ROLE) > /dev/null + @. ../setup-environment.sh && docker exec -i "$${COMPOSE_PROJECT_NAME}_frontend_1" node /app/admin addUser $(USERNAME2) $(PASSWORD2) $(ROLE2) > /dev/null else @$(call msg,"Resetting database ..."); @kubectl exec -i $(DASHBOARD_POD) -c dashboard -- node /app/admin resetDB @@ -67,7 +72,7 @@ else endif @$(call msg,"Starting the e2e testing ..."); - @. ../setup-environment.sh && env http_proxy="" https_proxy="" USERNAME=$(USERNAME) PASSWORD=$(PASSWORD) LOG_LEVEL="error" NODE_ENV="local" npm test + @. ../setup-environment.sh && env http_proxy="" https_proxy="" USERNAME=$(USERNAME) PASSWORD=$(PASSWORD) USERNAME2=$(USERNAME2) PASSWORD2=$(PASSWORD2) LOG_LEVEL="error" NODE_ENV="local" npm test ifeq (email-account,$(firstword $(MAKECMDGOALS))) OUTPUT_FILE := $(wordlist 2,$(words $(MAKECMDGOALS)),$(MAKECMDGOALS)) diff --git a/tests/oisp-tests.js b/tests/oisp-tests.js index c0796e95..bcc80878 100644 --- a/tests/oisp-tests.js +++ b/tests/oisp-tests.js @@ -967,11 +967,32 @@ describe("Do data sending subtests ...".bold, it(descriptions.sendMaxAmountOfSamples,function(done) { test.sendMaxAmountOfSamples(done); }).timeout(10000); + it(descriptions.receiveMaxAmountOfSamples,function(done) { + test.receiveMaxAmountOfSamples(done); + }).timeout(10000); + it(descriptions.sendPartiallyWrongData,function(done) { + test.sendPartiallyWrongData(done); + }).timeout(10000); + it(descriptions.sendDataAsAdmin,function(done) { + test.sendDataAsAdmin(done); + }).timeout(10000); + it(descriptions.sendDataAsAdminWithWrongAccount,function(done) { + test.sendDataAsAdminWithWrongAccount(done); + }).timeout(10000); + it(descriptions.sendDataAsUser,function(done) { + test.sendDataAsUser(done); + }).timeout(10000); it(descriptions.waitForBackendSynchronization,function(done) { test.waitForBackendSynchronization(done); }).timeout(10000); - it(descriptions.receiveMaxAmountOfSamples,function(done) { - test.receiveMaxAmountOfSamples(done); + it(descriptions.receivePartiallySentData,function(done) { + test.receivePartiallySentData(done); + }).timeout(10000); + it(descriptions.sendDataAsDeviceToWrongDeviceId,function(done) { + test.sendDataAsDeviceToWrongDeviceId(done); + }).timeout(10000); + it(descriptions.receiveDataFromAdmin,function(done) { + test.receiveDataFromAdmin(done); }).timeout(10000); it(descriptions.cleanup,function(done) { test.cleanup(done); diff --git a/tests/subtests/data-sending-tests.js b/tests/subtests/data-sending-tests.js index d5e94883..c163dad3 100644 --- a/tests/subtests/data-sending-tests.js +++ b/tests/subtests/data-sending-tests.js @@ -26,12 +26,24 @@ var test = function(userToken, accountId, deviceId, deviceToken, cbManager) { var componentNames = ["temperature-sensor-sdt", "humidity-sensor-sdt"]; var componentTypes = ["temperature.v1.0", "humidity.v1.0"]; var promtests = require('./promise-wrap'); + var uuidv4 = require('uuid/v4'); var rules = []; var componentId = []; var dataValues1Time; var dataValues2Time; var dataValues3Time; var dataValues4Time; + var dataValues5StartTime; + var dataValues5StopTime; + var dataValues6StartTime; + var dataValues6StopTime; + var accountId2; + var accountName2 = "badAccount"; + var userToken2; + var username2 = process.env.USERNAME2; + var password2 = process.env.PASSWORD2; + var newDeviceId = "d-e-v-i-c-e"; + var newComponentId; const MIN_NUMBER = 0.0001; const MAX_SAMPLES = 1000; const BASE_TIMESTAMP = 1000000000000 @@ -190,7 +202,7 @@ var test = function(userToken, accountId, deviceId, deviceToken, cbManager) { component: 0, value: 11, ts: 20000 + BASE_TIMESTAMP, - loc: [9.8765, 432.1, 09.876] + loc: [9.8765, 432.1, 9.876] }], [{ component: 0, @@ -257,6 +269,66 @@ var test = function(userToken, accountId, deviceId, deviceToken, cbManager) { }] ]; + var dataValues5 = [ + [{ + component: 2, + value: 10.1, + ts: 1100000000 + BASE_TIMESTAMP + }], + [{ + component: 0, + value: 10, + ts: 1100020000 + BASE_TIMESTAMP + }, + { + component: 2, + value: 12.3, + ts: 1100030000 + BASE_TIMESTAMP + } + ], + [{ + component: 0, + value: 13.4, + ts: 1100040000 + BASE_TIMESTAMP + }, + { + component: 0, + value: 20, + ts: 1100050000 + BASE_TIMESTAMP + }, + { + component: 2, + value: 15.6, + ts: 1100060000 + BASE_TIMESTAMP + }] + ]; + + var dataValues6 = [ + { + component: 0, + value: 1, + ts: 1200000000 + BASE_TIMESTAMP + }, + { + component: 0, + value: 2, + ts: 1200020000 + BASE_TIMESTAMP + } + ] + + var dataValues7 = [ + { + component: 0, + value: 101, + ts: 1200000001 + BASE_TIMESTAMP + }, + { + component: 0, + value: 102, + ts: 1200020001 + BASE_TIMESTAMP + } + ] + var aggregation = { MAX: 0, MIN: 1, @@ -265,7 +337,7 @@ var test = function(userToken, accountId, deviceId, deviceToken, cbManager) { SUMOFSQUARES: 4 } - var createObjectFromData = function(sample, sampleHeader){ + var createObjectFromData = function(sample, sampleHeader) { var o = {}; sample.forEach(function(element, index) { if (element != "") { @@ -303,21 +375,21 @@ var test = function(userToken, accountId, deviceId, deviceToken, cbManager) { } } -var attrEqual = function(dataValue, element, onlyExistingAttr) { - var result = true; - if (dataValue.attributes !== undefined) { - Object.keys(dataValue.attributes).forEach(function(el) { - if (!onlyExistingAttr && element[el] != dataValue.attributes[el]) { - result = false; - } else { - if (element[el] !== undefined && element[el] != dataValue.attributes[el]){ + var attrEqual = function(dataValue, element, onlyExistingAttr) { + var result = true; + if (dataValue.attributes !== undefined) { + Object.keys(dataValue.attributes).forEach(function(el) { + if (!onlyExistingAttr && element[el] != dataValue.attributes[el]) { result = false; + } else { + if (element[el] !== undefined && element[el] != dataValue.attributes[el]) { + result = false; + } } - } - }) + }) + } + return result; } - return result; -} var comparePoints = function(dataValues, points, onlyExistingAttributes) { var result = true; @@ -350,19 +422,19 @@ var attrEqual = function(dataValue, element, onlyExistingAttr) { return results; } - var calcAggregationsPerComponent = function(flattenedArray){ + var calcAggregationsPerComponent = function(flattenedArray) { return flattenedArray.reduce(function(acc, val) { - if (val.value > acc[val.component][aggregation.MAX]) { - acc[val.component][aggregation.MAX] = val.value; - } - if (val.value < acc[val.component][aggregation.MIN]) { - acc[val.component][aggregation.MIN] = val.value; - } - acc[val.component][aggregation.COUNT]++; - acc[val.component][aggregation.SUM] += val.value; - acc[val.component][aggregation.SUMOFSQUARES] += val.value * val.value; - return acc; + if (val.value > acc[val.component][aggregation.MAX]) { + acc[val.component][aggregation.MAX] = val.value; + } + if (val.value < acc[val.component][aggregation.MIN]) { + acc[val.component][aggregation.MIN] = val.value; + } + acc[val.component][aggregation.COUNT]++; + acc[val.component][aggregation.SUM] += val.value; + acc[val.component][aggregation.SUMOFSQUARES] += val.value * val.value; + return acc; }, [[Number.MIN_VALUE, Number.MAX_VALUE, 0, 0, 0], [Number.MIN_VALUE, Number.MAX_VALUE, 0, 0, 0]]) } @@ -516,7 +588,7 @@ var attrEqual = function(dataValue, element, onlyExistingAttr) { } var resultObjects = result.data[0].components[compIndex].samples.map( (element) => - createObjectFromData(element, result.data[0].components[compIndex].samplesHeader) + createObjectFromData(element, result.data[0].components[compIndex].samplesHeader) ); var comparisonResult = comparePoints(flattenedDataValues, resultObjects); if (comparisonResult !== true) { @@ -539,7 +611,7 @@ var attrEqual = function(dataValue, element, onlyExistingAttr) { var resultObjects = result.data[0].components[compIndex].samples.map( (element) => - createObjectFromData(element, result.data[0].components[compIndex].samplesHeader) + createObjectFromData(element, result.data[0].components[compIndex].samplesHeader) ); var comparisonResult = comparePoints(flattenedDataValues, resultObjects, true); if (comparisonResult !== true) { @@ -573,21 +645,21 @@ var attrEqual = function(dataValue, element, onlyExistingAttr) { var aggr2 = calcAggregationsPerComponent(flattenArray(dataValues2)); var aggr3 = calcAggregationsPerComponent(flattenArray(dataValues3)); var aggr4 = calcAggregationsPerComponent(flattenArray(dataValues4)); - var allAggregation = [aggr1, aggr2, aggr3, aggr4].reduce(function(acc, val){ - [0, 1].forEach(function(index){ - if (acc[index][aggregation.MAX] < val[index][aggregation.MAX]) { - acc[index][aggregation.MAX] = val[index][aggregation.MAX]; - } - if (val[index][aggregation.COUNT] - && acc[index][aggregation.MIN] > val[index][aggregation.MIN]) { - acc[index][aggregation.MIN] = val[index][aggregation.MIN]; - } - acc[index][aggregation.COUNT] += val[index][aggregation.COUNT]; - acc[index][aggregation.SUM] += val[index][aggregation.SUM]; - acc[index][aggregation.SUMOFSQUARES] += val[index][aggregation.SUMOFSQUARES]; + var allAggregation = [aggr1, aggr2, aggr3, aggr4].reduce(function(acc, val) { + [0, 1].forEach(function(index) { + if (acc[index][aggregation.MAX] < val[index][aggregation.MAX]) { + acc[index][aggregation.MAX] = val[index][aggregation.MAX]; + } + if (val[index][aggregation.COUNT] + && acc[index][aggregation.MIN] > val[index][aggregation.MIN]) { + acc[index][aggregation.MIN] = val[index][aggregation.MIN]; + } + acc[index][aggregation.COUNT] += val[index][aggregation.COUNT]; + acc[index][aggregation.SUM] += val[index][aggregation.SUM]; + acc[index][aggregation.SUMOFSQUARES] += val[index][aggregation.SUMOFSQUARES]; }); return acc; - },[[Number.MIN_VALUE, Number.MAX_VALUE, 0, 0, 0], [Number.MIN_VALUE, Number.MAX_VALUE, 0, 0, 0]]) + }, [[Number.MIN_VALUE, Number.MAX_VALUE, 0, 0, 0], [Number.MIN_VALUE, Number.MAX_VALUE, 0, 0, 0]]) promtests.searchDataAdvanced(dataValues1Time, -1, deviceToken, accountId, deviceId, componentId, false, undefined, "only", false) .then((result) => { @@ -596,7 +668,7 @@ var attrEqual = function(dataValue, element, onlyExistingAttr) { if (result.data[0].components[0].componentId !== componentId[0]) { mapping = [1, 0]; } - [0, 1].forEach(function(index){ + [0, 1].forEach(function(index) { assert.closeTo(allAggregation[index][aggregation.MAX], result.data[0].components[mapping[index]].max, MIN_NUMBER); assert.closeTo(allAggregation[index][aggregation.MIN], result.data[0].components[mapping[index]].min, MIN_NUMBER); assert.closeTo(allAggregation[index][aggregation.COUNT], result.data[0].components[mapping[index]].count, MIN_NUMBER); @@ -623,10 +695,10 @@ var attrEqual = function(dataValue, element, onlyExistingAttr) { "sendMaxAmountOfSamples": function(done) { var dataList = []; - for (var i = 0; i < MAX_SAMPLES; i++){ + for (var i = 0; i < MAX_SAMPLES; i++) { var ts = (i + 1) * 1000000 + BASE_TIMESTAMP var obj = { - component:0, + component: 0, ts: ts, value: i } @@ -646,29 +718,190 @@ var attrEqual = function(dataValue, element, onlyExistingAttr) { if (result.data[0].components.length != 1) done("Wrong number of point series!"); assert.equal(result.rowCount, MAX_SAMPLES); var samples = result.data[0].components[0].samples; - samples.forEach(function(element, i){ - assert.equal(element[1], i); - assert.equal(element[0], (i + 1) * 1000000 + BASE_TIMESTAMP); + samples.forEach(function(element, i) { + assert.equal(element[1], i); + assert.equal(element[0], (i + 1) * 1000000 + BASE_TIMESTAMP); }) done(); }) .catch((err) => { done(err); }); - }, - "waitForBackendSynchronization": function(done) { - setTimeout(done, 2000); + }, + "waitForBackendSynchronization": function(done) { + setTimeout(done, 2000); - }, - "cleanup": function(done) { - promtests.deleteComponent(deviceToken, accountId, deviceId, componentId[0]) + }, + "sendPartiallyWrongData": function(done) { + var proms = []; + var codes = []; + dataValues5StartTime = dataValues5[0][0].ts; + var dataValues5lastElement = dataValues5[dataValues5.length - 1]; + var dataValues5lastLength = dataValues5[dataValues5.length - 1].length; + dataValues5StopTime = dataValues5lastElement[dataValues5lastLength - 1].ts; + componentId.push(uuidv4()); // 3rd id is random + dataValues5.forEach(function(element) { + proms.push(promtests.submitDataList(element, deviceToken, accountId, deviceId, componentId, {})); + }); + Promise.all(proms.map(p => p.catch(e => e))) + .then(results => { + var parsedResults = results.map( (result) => { + //some results are string, some others objects + //Therefore if parsing fails, it must be an object already + try { + return JSON.parse(result); + } catch (e) { + return result; + } + }) + assert.equal(parsedResults[0].code, 1412); + assert.equal(parsedResults[1].code, 6402); + assert.equal(parsedResults[2].code, 6402); + done(); + }) + .catch(err => done(err)); + + }, + "receivePartiallySentData": function(done) { + var listOfExpectedResults = flattenArray(dataValues5) + .filter((elem) => elem.component != 2); + promtests.searchData(dataValues5StartTime, dataValues5StopTime, deviceToken, accountId, deviceId, componentId[0], false, {}) + .then((result) => { + if (result.series.length != 1) done("Wrong number of point series!"); + var comparisonResult = comparePoints(listOfExpectedResults, result.series[0].points); + if (comparisonResult === true) { + done(); + } else { + done(comparisonResult); + } + }) + .catch((err) => { + done(err); + }); + }, + "sendDataAsAdmin": function(done) { + dataValues6StartTime = dataValues6[0].ts; + dataValues6StopTime = dataValues6[1].ts; + var username = process.env.USERNAME; + var password = process.env.PASSWORD; + assert.isNotEmpty(username, "no username provided"); + assert.isNotEmpty(password, "no password provided"); + promtests.authGetToken(username, password) + .then((userToken) => promtests.submitDataList(dataValues6, userToken, accountId, deviceId, componentId, {})) + .then(() => { + done() + }) + .catch((err) => { + done(new Error(err)); + }) + }, + "sendDataAsUser": function(done) { + var username = process.env.USERNAME; + var password = process.env.PASSWORD; + var username2 = process.env.USERNAME2; + var password2 = process.env.PASSWORD2; + var admin2Token; + var inviteId; + assert.isNotEmpty(username, "no username provided"); + assert.isNotEmpty(password, "no password provided"); + assert.isNotEmpty(username2, "no username provided"); + assert.isNotEmpty(password2, "no password provided"); + //First create a user for the account, accept the invitation and try to send data + promtests.authGetToken(username, password) + .then((userToken) => {return promtests.createInvitation(userToken, accountId, username2)}) + .then((result) => {inviteId = result._id; return promtests.authGetToken(username2, password2)}) + .then((userToken) => {admin2Token = userToken; return promtests.acceptInvitation(userToken, accountId, inviteId)}) + .then(() => promtests.submitDataList(dataValues7, + admin2Token, accountId, deviceId, componentId, {}).catch(e => e)) + .then((result) => { + var parsedResult = JSON.parse(result); + assert.equal(parsedResult.code, 401) + done() + }) + .catch((err) => { + done(err); + }) + }, + "sendDataAsAdminWithWrongAccount": function(done) { + assert.isNotEmpty(username2, "no username provided"); + assert.isNotEmpty(password2, "no password provided"); + promtests.authGetToken(username2, password2) + .then((userToken) => { + userToken2 = userToken; + return promtests.createAccount(accountName2, userToken) + }) + .then(() => promtests.authTokenInfo(userToken2)) + .then((tokenInfo) => { + accountId2 = tokenInfo.payload.accounts[0].id; + return accountId2;}) + .then((accountId2) => + promtests.submitDataList(dataValues7, + userToken2, accountId2, deviceId, componentId, {}).catch(e => e)) + .then((result) => { + var parsedResult = JSON.parse(result); + assert.equal(parsedResult.code, 401) + done() + }) + .catch((err) => { + done(err); + }) + }, + "receiveDataFromAdmin": function(done) { + var listOfExpectedResults = dataValues6; + promtests.searchData(dataValues6StartTime, dataValues6StopTime, deviceToken, accountId, deviceId, componentId[0], false, {}) + .then((result) => { + if (result.series.length != 1) done("Wrong number of point series!"); + var comparisonResult = comparePoints(listOfExpectedResults, result.series[0].points); + if (comparisonResult === true) { + done(); + } else { + done(comparisonResult); + } + }) + .catch((err) => { + done(err); + }); + }, + "sendDataAsDeviceToWrongDeviceId": function(done) { + var username = process.env.USERNAME; + var password = process.env.PASSWORD; + var newDeviceName = "innocentDevice"; + var componentName = "evilDeviceComponent"; + var componentType = componentTypes[0]; + assert.isNotEmpty(username, "no username provided"); + assert.isNotEmpty(password, "no password provided"); + //First create a user for the account, accept the invitation and try to send data + promtests.authGetToken(username, password) + .then((userToken) => promtests.createDevice(newDeviceName, newDeviceId, userToken, accountId)) + .then(() => promtests.activateDevice(userToken, accountId, newDeviceId)) + .then((result) => { + return promtests.addComponent(componentName, componentType, userToken, accountId, newDeviceId) + }) + .then((id) => {newComponentId = id;}) + .then(() => promtests.submitDataList(dataValues7, + deviceToken, accountId, newDeviceId, newComponentId, {}).catch(e => e)) + .then((result) => { + var parsedResult = JSON.parse(result); + assert.equal(parsedResult.code, 401) + done() + }) + .catch((err) => { + done(err); + }) + }, + "cleanup": function(done) { + promtests.deleteComponent(deviceToken, accountId, deviceId, componentId[0]) .then(() => promtests.deleteComponent(deviceToken, accountId, deviceId, componentId[1])) - .then(() => {done()}) + .then(() => promtests.deleteComponent(userToken, accountId, newDeviceId, newComponentId)) + .then(() => promtests.deleteDevice(userToken, accountId, newDeviceId)) + .then(() => promtests.deleteAccount(userToken2, accountId2)) + .then(() => promtests.deleteInvite(userToken, accountId, username2)) + .then(() => { done() }) .catch((err) => { done(err); }); - } - }; + } + }; }; var descriptions = { @@ -687,10 +920,17 @@ var descriptions = { "receiveMaxAmountOfSamples": "Receive maximal allowed samples per request", "receiveDataPointsWithSelectedAttributes": "Receiving data points with selected attributes", "waitForBackendSynchronization": "Waiting maximal tolerable time backend needs to flush so that points are available", + "sendPartiallyWrongData": "Send data with partially unknown cid's", + "receivePartiallySentData": "Recieve the submitted data of the partially wrong data", + "sendDataAsAdmin": "Send test data as admin on behalf of a device", + "sendDataAsUser": "Send test data with user role and get rejected", + "sendDataAsAdminWithWrongAccount": "Send test data as admin with wrong accountId", + "receiveDataFromAdmin": "Test whether data sent from admin earlier has been stored", + "sendDataAsDeviceToWrongDeviceId": "Test whether Device data submission is rejected if it goes to wrong device", "cleanup": "Cleanup components, commands, rules created for subtest" }; module.exports = { test: test, descriptions: descriptions -}; \ No newline at end of file +}; diff --git a/tests/subtests/promise-wrap.js b/tests/subtests/promise-wrap.js index 14489c4b..ac951386 100644 --- a/tests/subtests/promise-wrap.js +++ b/tests/subtests/promise-wrap.js @@ -247,6 +247,126 @@ var searchDataAdvanced = function(from, to, userToken, accountId, deviceId, cidL }); } +var authGetToken = (username, password) => { + return new Promise(function(resolve, reject){ + helpers.auth.login(username, password, function(err, token) { + if (err) { + reject(err); + } else { + resolve(token); + } + }); + }); +}; + +var invitationCreate = (userToken, accountId, receiverEmail) => { + return new Promise(function(resolve, reject){ + helpers.invitation.createInvitation(userToken, accountId, receiverEmail, function(err, response) { + if (err) { + reject(err); + } else { + resolve(response); + } + }); + }); +}; + +var invitationAccept = (userToken, accountId, inviteId) => { + return new Promise(function(resolve, reject){ + helpers.invitation.acceptInvitation(userToken, accountId, inviteId, function(err, response) { + if (err) { + reject(err); + } else { + resolve(response); + } + }); + }); +}; + +var authTokenInfo = (userToken) => { + return new Promise(function(resolve, reject){ + helpers.auth.tokenInfo(userToken, function(err, response) { + if (err) { + reject(err); + } else { + resolve(response); + } + }); + }); +}; + +var accountCreate = (name, userToken) => { + return new Promise(function(resolve, reject){ + helpers.accounts.createAccount(name, userToken, function(err, response) { + if (err) { + reject(err); + } else { + resolve(response); + } + }); + }); +}; + +var accountDelete = (userToken, accountId) => { + return new Promise(function(resolve, reject){ + helpers.accounts.deleteAccount(userToken, accountId, function(err, response) { + if (err) { + reject(err); + } else { + resolve(response); + } + }); + }); +}; + +var inviteDelete = (userToken, accountId, email) => { + return new Promise(function(resolve, reject){ + helpers.invitation.deleteInvitations(userToken, accountId,email, function(err, response) { + if (err) { + reject(err); + } else { + resolve(response); + } + }); + }); +}; + +var createDevice = (name, deviceId, userToken, accountId) => { + return new Promise(function(resolve, reject){ + helpers.devices.createDevice(name, deviceId, userToken, accountId, function(err, response) { + if (err) { + reject(err); + } else { + resolve(response); + } + }); + }); +}; + +var deleteDevice = (userToken, accountId, deviceId) => { + return new Promise(function(resolve, reject){ + helpers.devices.deleteDevice(userToken, accountId, deviceId, function(err, response) { + if (err) { + reject(err); + } else { + resolve(response); + } + }); + }); +}; + +var activateDevice = (userToken, accountId, deviceId) => { + return new Promise(function(resolve, reject){ + helpers.devices.activateDevice(userToken, accountId, deviceId, function(err, response) { + if (err) { + reject(err); + } else { + resolve(response); + } + }); + }); +}; + module.exports = { checkObservations: checkObservations, addComponent: addComponent, @@ -259,5 +379,15 @@ module.exports = { submitDataList: submitDataList, submitData: submitData, searchData: searchData, - searchDataAdvanced: searchDataAdvanced + searchDataAdvanced: searchDataAdvanced, + authGetToken: authGetToken, + createInvitation: invitationCreate, + acceptInvitation: invitationAccept, + authTokenInfo: authTokenInfo, + createAccount: accountCreate, + deleteAccount: accountDelete, + deleteInvite: inviteDelete, + createDevice: createDevice, + deleteDevice: deleteDevice, + activateDevice: activateDevice }; From 83c7819bba2876ff8cd13df4b3042ec395adcca8 Mon Sep 17 00:00:00 2001 From: Ali Rasim Kocal Date: Tue, 8 Jan 2019 10:29:18 +0100 Subject: [PATCH 50/68] E2E-tests/data-sending-tests: Update user creation for k8s e6d5582d9 introduces a second user, which must be created when testing with k8s as well Signed-off-by: Ali Rasim Kocal --- tests/Makefile | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/Makefile b/tests/Makefile index cc9efb24..0669b0e2 100644 --- a/tests/Makefile +++ b/tests/Makefile @@ -69,6 +69,7 @@ else @$(call msg,"Adding a user for testing ..."); @kubectl exec -i $(DASHBOARD_POD) -c dashboard -- node /app/admin addUser $(USERNAME) $(PASSWORD) $(ROLE) > /dev/null + @kubectl exec -i $(DASHBOARD_POD) -c dashboard -- node /app/admin addUser $(USERNAME2) $(PASSWORD2) $(ROLE2) > /dev/null endif @$(call msg,"Starting the e2e testing ..."); From 7ada8acebcb31b3c49dfd5ccf32ef8af78917c7d Mon Sep 17 00:00:00 2001 From: Ali Rasim Kocal Date: Tue, 8 Jan 2019 14:39:26 +0100 Subject: [PATCH 51/68] docker-compose.yml: Move Gearpump to new config system Signed-off-by: Ali Rasim Kocal --- docker-compose.yml | 13 ++- setup-environment.example.sh | 187 ++---------------------------- tests/oisp-tests.js | 7 +- tests/test-config-docker.json | 3 +- tests/test-config-kubernetes.json | 3 +- 5 files changed, 24 insertions(+), 189 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 3780bdac..5cfc1538 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -113,11 +113,11 @@ services: - frontend working_dir: /app environment: - - VCAP_SERVICES=${VCAP_SERVICES} - - VCAP_APPLICATION=${VCAP_APPLICATION} - - KAFKA=${KAFKA} - - KAFKA_HEARTBEAT_TOPIC=${KAFKA_HEARTBEAT_TOPIC} - - GEARPUMP=${GEARPUMP} + - OISP_RULEENGINE_CONFIG=${OISP_RULEENGINE_CONFIG} + - OISP_HBASE_CONFIG=${OISP_HBASE_CONFIG} + - OISP_HADOOP_PROPERTIES=${OISP_HADOOP_PROPERTIES} + - OISP_ZOOKEEPER_CONFIG=${OISP_ZOOKEEPER_CONFIG} + - OISP_KAFKA_CONFIG=${OISP_KAFKA_CONFIG} ports: - 8090:8090 command: bash wait-for-it.sh kafka:9092 -t 300 -- bash bootstrap.sh @@ -192,6 +192,9 @@ services: - OISP_WEBSOCKETUSER_CONFIG=${OISP_WEBSOCKETUSER_CONFIG} - OISP_RULEENGINE_CONFIG=${OISP_RULEENGINE_CONFIG} - OISP_MAIL_CONFIG=${OISP_MAIL_CONFIG} + - OISP_HBASE_CONFIG=${OISP_HBASE_CONFIG} + - OISP_HADOOP_PROPERTIES=${OISP_HADOOP_PROPERTIES} + - OISP_ZOOKEEPER_CONFIG=${OISP_ZOOKEEPER_CONFIG} - TEST=${TEST} - NODE_ENV=local backend: diff --git a/setup-environment.example.sh b/setup-environment.example.sh index 5369e8ed..275abe16 100755 --- a/setup-environment.example.sh +++ b/setup-environment.example.sh @@ -37,6 +37,7 @@ export KAFKA_PORT='9092' export KAFKA_HEARTBEAT_TOPIC='heartbeat' export GEARPUMP='gearpump:8090' export BACKEND='backend:8080' +export FRONTEND='frontend:4001' export NGINX='nginx' export REDIS='redis' export REDIS_PORT='6379' @@ -57,183 +58,6 @@ OPENTSDB_PROPERTIES='{ "port": "'$OPENTSDB_PORT'" }' - -#################### VCAP is deprecated, will be removed in next version -export VCAP_SERVICES='{ -"postgresql93": [{ - "credentials": { - "dbname": "'$POSTGRES_DB'", - "hostname": "'$POSTGRES'", - "password": "'$POSTGRES_PASSWORD'", - "port": "'$POSTGRES_PORT'", - "username": "'$POSTGRES_USERNAME'" - }, - "name": "mypostgres" -}], - "websocket-server": [{ - "server-name": "websocket-server", - "name": "mywsserver" -}], -"hbase": [{ - "credentials": { - "HADOOP_CONFIG_KEY": { - "hadoop.security.authentication": "simple", - "hadoop.security.authorization": "false", - "hbase.security.authentication": "simple", - "ha.zookeeper.quorum": "'$ZOOKEEPER_HBASE'", - "hbase.zookeeper.property.clientPort": "'$ZOOKEEPER_HBASE_PORT'", - "hbase.zookeeper.quorum": "'$ZOOKEEPER_HBASE'" - } - }, - "name": "myhbase" -}], -"hdfs": [{ - "credentials": { - "HADOOP_CONFIG_KEY": { - "ha.zookeeper.quorum": "'${ZOOKEEPER_HBASE}':'${ZOOKEEPER_HBASE_PORT}'", - "hadoop.security.authorization": "false" - } - }, - "name": "myhdfs" -}], -"kafka": [{ - "credentials": { - "uri": "'$KAFKA'" - }, - "name": "mykafka" -}], -"zookeeper": [{ - "credentials": { - "zk.cluster": "'${ZOOKEEPER_KAFKA}:${ZOOKEEPER_KAFKA_PORT}'", - "zk.node": "/tmp" - }, - "label": "zookeeper", - "name": "myzookeeper", - "plan": "local", - "tags": [] -}], -"gearpump": [{ - "credentials": { - "username": "admin", - "password": "admin", - "dashboardUrl": "'$GEARPUMP'" - }, - "name": "mygearpump" -}], -"smtp": [ - { - "credentials": { - "host": "'$SMTP_HOST'", - "port": "'$SMTP_PORT'", - "protocol": "smtp", - "username": "'$SMTP_USERNAME'", - "password": "'$SMTP_PASSWORD'" - }, - "label": "smtp", - "name": "mysmtp" - } - ], -"redis": [ - { - "credentials": { - "hostname": "'$REDIS'", - "port": "'$REDIS_PORT'", - "password": "" - }, - "label": "redis", - "name": "myredis" - } - ], -"user-provided": [{ - "credentials": { - "kdc": "localhost", - "kpassword": "pass", - "krealm": "realm", - "kuser": "user" - }, - "name": "kerberos-service" -}, -{ - "credentials": { - "hosts": "'$KAFKA'", - "enabled": true, - "partitions": 1, - "replication": 1, - "timeout_ms": 10000, - "topics": { - "observations": "metrics", - "rule_engine": "rules-update", - "heartbeat": { - "name": "'$KAFKA_HEARTBEAT_TOPIC'", - "interval": 5000 - } - } - }, - "name": "kafka-ups" -}, -{ - "credentials": { - "host": "http://'$NGINX'", - "strictSSL": false - }, - "name": "dashboard-endpoint-ups" -}, -{ - "credentials": { - "captcha_test_code": "s09ef48213s8fe8fw8rwer5489wr8wd5", - "interaction_token_permision_key": "password", - "private_pem_path": "./keys/private.pem", - "public_pem_path": "./keys/public.pem" - }, - "name": "dashboard-security-ups" -}, -{ - "credentials": { - "deviceMeasurementTableName": "LOCAL-AA-BACKEND_DEVICE_MEASUREMENT", - "host": "http://'$BACKEND'" - - }, - "name": "backend-ups" -}, -{ - "credentials": { - "sender": "test.sender@streammyiot.com" - }, - "name": "mail-ups" -}, -{ - "credentials": { - "password": "7d501829lhbl1or0bb1784462c97bcad6", - "username": "gateway@intel.com" - - }, - "name": "gateway-credentials-ups" -}, -{ - "credentials": { - "password": "7d501829lhbl1or0bb1784462c97bcad6", - "username": "rule_engine@intel.com" - - }, - "name": "rule-engine-credentials-ups" -}, -{ - "credentials": { - "password": "AgjY7H3eXztyA6AmNjI2rte446gdttgattwRRF61", - "username": "api_actuator" - }, - "name": "websocket-ups" -}] -}' - -export VCAP_APPLICATION='{ - "space_name": "local" -}' -######################################### - -#VCAP variables are left overs from Cloud Foundry times. -#To allow a transition to Kubernetes, we will define both, VCAP and regular environment variables. -#The following is how the configuration looks in future #The root variable for every service looks like OISP_SERVICE_CONFIG #It contains a JSON object, starting with single quote, hash names with double quotes, environmental variables with double, then single quotes, e.g. "'$VAR'" #References to other environmental variables which can be parsed as OBJECTS are done with "@@OISP_*_CONFIG" @@ -376,8 +200,15 @@ export OISP_MAIL_CONFIG=\ export OISP_RULEENGINE_CONFIG=\ '{ + "uri": "gearpump:8090", "password": "7d501829lhbl1or0bb1784462c97bcad6", - "username": "rule_engine@intel.com" + "username": "rule_engine@intel.com", + "gearpumpUsername": "admin", + "gearpumpPassword": "admin", + "frontendUri": "'$FRONTEND'", + "hbaseConfig": "@@OISP_HBASE_CONFIG", + "kafkaConfig": "@@OISP_KAFKA_CONFIG", + "zookeeperConfig": "@@OISP_ZOOKEEPER_CONFIG" }' export OISP_GATEWAY_CONFIG=\ diff --git a/tests/oisp-tests.js b/tests/oisp-tests.js index bcc80878..75ead636 100644 --- a/tests/oisp-tests.js +++ b/tests/oisp-tests.js @@ -191,10 +191,9 @@ describe("Waiting for OISP services to be ready ...\n".bold, function() { done(); }); - it('Shall wait for oisp services to start', function(done) { - var kafkaConsumer; - var kafka_credentials = cfenvReader.getServiceCredentials("kafka-ups"); - var topic = kafka_credentials.topics.heartbeat.name; + it('Shall wait for oisp services to start', function(done) { + var kafkaConsumer; + var topic = config["connector"]["kafka"]["topic"]; var partition = 0; var kafkaAddress = config["connector"]["kafka"]["host"] + ":" + config["connector"]["kafka"]["port"]; var kafkaClient = new kafka.KafkaClient({kafkaHost: kafkaAddress}); diff --git a/tests/test-config-docker.json b/tests/test-config-docker.json index 41c7e7a3..590c0b8e 100644 --- a/tests/test-config-docker.json +++ b/tests/test-config-docker.json @@ -33,7 +33,8 @@ }, "kafka": { "host": "localhost", - "port": 9092 + "port": 9092, + "topic": "heartbeat" } } } diff --git a/tests/test-config-kubernetes.json b/tests/test-config-kubernetes.json index 5063d828..c25d711d 100644 --- a/tests/test-config-kubernetes.json +++ b/tests/test-config-kubernetes.json @@ -33,7 +33,8 @@ }, "kafka": { "host": "kafka", - "port": 9092 + "port": 9092, + "topic": "heartbeat" } } } From 3234732d56bb49d3abccab474beacb09868794cc Mon Sep 17 00:00:00 2001 From: Oguzcan Kirmemis Date: Thu, 24 Jan 2019 21:50:43 +0300 Subject: [PATCH 52/68] Add jaegerTracing environment variable to OISP_FRONTEND_CONFIG variable, disable jaeger by default Signed-off-by: Oguzcan Kirmemis --- setup-environment.example.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/setup-environment.example.sh b/setup-environment.example.sh index 275abe16..fb4da33b 100755 --- a/setup-environment.example.sh +++ b/setup-environment.example.sh @@ -91,7 +91,8 @@ export OISP_FRONTEND_CONFIG=\ "websocketUserConfig": "@@OISP_WEBSOCKETUSER_CONFIG", "mailConfig": "@@OISP_MAIL_CONFIG", "ruleEngineConfig": "@@OISP_RULEENGINE_CONFIG", - "gatewayConfig": "@@OISP_GATEWAY_CONFIG" + "gatewayConfig": "@@OISP_GATEWAY_CONFIG", + "jaegerTracing": false }' export OISP_WEBSOCKET_SERVER_CONFIG=\ From 0e3e238f9296a29dcbeba197ae32f1008ce595ee Mon Sep 17 00:00:00 2001 From: Marcel Wagner Date: Tue, 22 Jan 2019 22:52:17 +0100 Subject: [PATCH 53/68] MQTT integration: * Added oisp-mqtt-gw submodule, * Adapted to OISP config structure and added to setup-environment.sh * Added broker and gateway to main docker-compose * Added cafile, certfile, etc to broker config. --- .gitmodules | 5 ++++- Makefile | 6 ++++++ docker-compose.yml | 30 ++++++++++++++++++++++++++++++ oisp-mqtt-gw | 1 + setup-environment.example.sh | 36 ++++++++++++++++++++++++++++++++++++ 5 files changed, 77 insertions(+), 1 deletion(-) create mode 160000 oisp-mqtt-gw diff --git a/.gitmodules b/.gitmodules index dff03c60..e6615884 100644 --- a/.gitmodules +++ b/.gitmodules @@ -9,4 +9,7 @@ url = https://github.com/Open-IoT-Service-Platform/oisp-gearpump-rule-engine.git [submodule "oisp-websocket-server"] path = oisp-websocket-server - url = https://github.com/Open-IoT-Service-Platform/oisp-websocket-server.git \ No newline at end of file + url = https://github.com/Open-IoT-Service-Platform/oisp-websocket-server.git +[submodule "oisp-mqtt-gw"] + path = oisp-mqtt-gw + url = https://github.com/Open-IoT-Service-Platform/oisp-mqtt-gw.git diff --git a/Makefile b/Makefile index 3d888e28..5d3a4b8a 100644 --- a/Makefile +++ b/Makefile @@ -124,6 +124,12 @@ endif mkdir -p ${SSL_CERT_PATH}; \ openssl req -nodes -new -x509 -keyout ${SSL_CERT_PATH}/server.key -out ${SSL_CERT_PATH}/server.cert -subj "/C=UK/ST=NRW/L=London/O=My Inc/OU=DevOps/CN=www.streammyiot.com/emailAddress=donotreply@www.streammyiot.com"; \ fi; + @if [ -f data/keys/mqtt/mqtt_gw_secret.key ]; then echo "MQTT/GW key existing already. Skipping creating new key"; else \ + echo "Creating MQTT/GW secret."; \ + mkdir -p data/keys/mqtt; \ + openssl rand -base64 16 > data/keys/mqtt/mqtt_gw_secret.key; \ + fi; + @touch $@ ## build: Build OISP images locally. diff --git a/docker-compose.yml b/docker-compose.yml index 5cfc1538..06bfb3d9 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -285,3 +285,33 @@ services: - OISP_OPENTSDB_CONFIG=${OISP_OPENTSDB_CONFIG} - OISP_HADOOP_PROPERTIES=${OISP_HADOOP_PROPERTIES} - OISP_TSDB_PROPERTIES=${OISP_TSDB_PROPERTIES} + mqtt-gateway: + build: + context: ./oisp-mqtt-gw + dockerfile: Dockerfile-gateway + restart: always + depends_on: + - mqtt-broker + working_dir: /app + environment: + - NODE_ENV=local + - OISP_MQTT_GATEWAY_CONFIG + - OISP_REDIS_CONFIG + volumes: + - ./data/keys:/app/keys + - ./oisp-mqtt-gw/ingestion:/app/ingestion + - ./oisp-mqtt-gw/lib:/app/lib + command: ./wait-for-it.sh broker:8883 -t 300 -- ./wait-for-it.sh redis:6379 -t 300 + mqtt-broker: + build: + context: ./oisp-mqtt-gw + dockerfile: Dockerfile-broker + restart: always + ports: + - 8883:8883 + working_dir: /app + environment: + - OISP_MQTT_BROKER_CONFIG + - OISP_REDIS_CONFIG + volumes: + - ./data/keys:/app/keys diff --git a/oisp-mqtt-gw b/oisp-mqtt-gw new file mode 160000 index 00000000..cdfe9757 --- /dev/null +++ b/oisp-mqtt-gw @@ -0,0 +1 @@ +Subproject commit cdfe9757a867de47d213c281ae8dcfe28816071c diff --git a/setup-environment.example.sh b/setup-environment.example.sh index fb4da33b..f28990b5 100755 --- a/setup-environment.example.sh +++ b/setup-environment.example.sh @@ -39,12 +39,21 @@ export GEARPUMP='gearpump:8090' export BACKEND='backend:8080' export FRONTEND='frontend:4001' export NGINX='nginx' +NGINX_PORT='443' export REDIS='redis' export REDIS_PORT='6379' OPENTSDB_URI='opentsdb' OPENTSDB_PORT=4242 +MQTT_BROKER_URI='mqtt-broker' +MQTT_BROKER_PORT=8883 +MQTT_BROKER_USERNAME='admin' +MQTT_BROKER_PASSWORD='8dhh1f2471' + +FRONTEND_SYSTEMUSER="gateway@intel.com" +FRONTEND_SYSTEMPASSWORD="7d501829lhbl1or0bb1784462c97bcad6" + export SMTP_HOST="${SMTP_HOST:-auth.smtp.1and1.co.uk}" export SMTP_PORT="${SMTP_PORT:-587}" export SMTP_USERNAME="${SMTP_USERNAME:-test.sender@streammyiot.com}" @@ -223,3 +232,30 @@ export OISP_WEBSOCKETUSER_CONFIG=\ "password": "AgjY7H3eXztyA6AmNjI2rte446gdttgattwRRF61", "username": "api_actuator" }' + +export OISP_MQTT_GATEWAY_CONFIG=\ +'{ + "mqttBrokerUrl": "'${MQTT_BROKER_URI}'", + "mqttBrokerPort": "'${MQTT_BROKER_PORT}'", + "mqttBrokerUsername": "'${MQTT_BROKER_USERNAME}'", + "mqttBrokerPassword": "'${MQTT_BROKER_PASSWORD}'", + "frontendUri": "'${NGINX}'", + "frontendPort": "'${NGINX_PORT}'", + "frontendSystemUser": "'${FRONTEND_SYSTEMUSER}'", + "frontendSystemPassword": "'${FRONTEND_SYSTEMPASSWORD}'", + "redisConf": "@@OISP_REDIS_CONFIG", + "aesKey": "/app/keys/mqtt/mqtt_gw_secret.key" +}' + +export OISP_MQTT_BROKER_CONFIG=\ +'{ + "redisConf": "@@OISP_REDIS_CONFIG", + "jwtPubKey": "/app/keys/public.pem", + "mqttBrokerUserName": "'${MQTT_BROKER_USERNAME}'", + "mqttBrokerPassword": "'${MQTT_BROKER_PASSWORD}'", + "mqttBrokerPort": "'${MQTT_BROKER_PORT}'", + "aesKey": "/app/keys/mqtt/mqtt_gw_secret.key", + "cafile": "/app/keys/ssl/server.cert", + "keyfile": "/app/keys/ssl/server.key", + "certfile": "/app/keys/ssl/server.cert" + }' From 37e23e0c55d1dfea99c42ca12200d09126d11bf3 Mon Sep 17 00:00:00 2001 From: Marcel Wagner Date: Mon, 28 Jan 2019 23:36:56 +0100 Subject: [PATCH 54/68] Added backend config for jaeger Signed-off-by: Marcel Wagner --- setup-environment.example.sh | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/setup-environment.example.sh b/setup-environment.example.sh index f28990b5..01606585 100755 --- a/setup-environment.example.sh +++ b/setup-environment.example.sh @@ -71,6 +71,16 @@ OPENTSDB_PROPERTIES='{ #It contains a JSON object, starting with single quote, hash names with double quotes, environmental variables with double, then single quotes, e.g. "'$VAR'" #References to other environmental variables which can be parsed as OBJECTS are done with "@@OISP_*_CONFIG" #References to other environmental variables which can be parsed as MAPS (e.g. Property Maps) are done with "%%OISP_*_PROPERTIES". MAPS contain only pairs. +export OISP_BACKEND_JAEGER_CONFIG=\ +'{ + "serviceName": "backend", + "agentHost": "jaeger", + "agentPort": 6832, + "logSpans": true, + "samplerType": "probabilistic", + "samplerParam": 0.1, + "tracing": false +}' export OISP_TSDB_PROPERTIES='{}' #=${OPENTSDB_PROPERTIES} for openTSDB @@ -86,7 +96,8 @@ export OISP_BACKEND_CONFIG=\ "kafkaConfig": "@@OISP_KAFKA_CONFIG", "zookeeperConfig": "@@OISP_ZOOKEEPER_CONFIG", "kerberosConfig": "@@OISP_KERBEROS_CONFIG", - "hbaseConfig": "@@OISP_HBASE_CONFIG" + "hbaseConfig": "@@OISP_HBASE_CONFIG", + "jaegerConfig": "@@OISP_BACKEND_JAEGER_CONFIG" }' export OISP_FRONTEND_CONFIG=\ From 75c8f45068e77f50aedf29043ee87c96a3015e3f Mon Sep 17 00:00:00 2001 From: Marcel Wagner Date: Sun, 3 Feb 2019 18:30:48 +0100 Subject: [PATCH 55/68] docker-compose.yml: Added OISP_BACKEND_JAEGER_CONFIG variable for backend Signed-off-by: Marcel Wagner --- docker-compose.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/docker-compose.yml b/docker-compose.yml index 06bfb3d9..24886874 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -219,6 +219,7 @@ services: - OISP_HBASE_CONFIG=${OISP_HBASE_CONFIG} - OISP_HADOOP_PROPERTIES=${OISP_HADOOP_PROPERTIES} - OISP_TSDB_PROPERTIES=${OISP_TSDB_PROPERTIES} + - OISP_BACKEND_JAEGER_CONFIG hdfs-namenode: image: singularities/hadoop command: start-hadoop namenode From 3df85d114afab6fb1a463fb2e3675182a2ba06c6 Mon Sep 17 00:00:00 2001 From: Ali Rasim Kocal Date: Wed, 6 Feb 2019 06:56:19 +0000 Subject: [PATCH 56/68] setup-environment.example.sh: Remove unnecessary exports This should close #120 Signed-off-by: Ali Rasim Kocal --- setup-environment.example.sh | 36 +++++++++++++++++++----------------- 1 file changed, 19 insertions(+), 17 deletions(-) diff --git a/setup-environment.example.sh b/setup-environment.example.sh index 01606585..a4a11770 100755 --- a/setup-environment.example.sh +++ b/setup-environment.example.sh @@ -15,33 +15,36 @@ # limitations under the License. export DOCKER_TAG=${DOCKER_TAG:-'latest'} - export ZOOKEEPER_KAFKA='zookeeper' export ZOOKEEPER_KAFKA_PORT='2181' +# This needs to be exported because hbase uses a standard container +# which won't parse a JSON config export ZOOKEEPER_HBASE='zookeeper' -export ZOOKEEPER_HBASE_PORT='2181' +ZOOKEEPER_HBASE_PORT='2181' HBASE_ROOTDIR='/hbase' -export POSTGRES='postgres' +POSTGRES='postgres' export POSTGRES_DB_REGULAR="iot" export POSTGRES_DB=${POSTGRES_DB_REGULAR} # postgres needs always regular DB name for initialization. It automatically initiates the test DB as well. if [ "$TEST" = "1" ]; then export POSTGRES_DB='test' echo "Warning: Test database selected." fi -export POSTGRES_PORT='5432' +POSTGRES_PORT='5432' export POSTGRES_USERNAME='postgres' export POSTGRES_PASSWORD='intel123' + +# These needs to be exported as test scripts also read from Kafka export KAFKA='kafka:9092' export KAFKA_PORT='9092' export KAFKA_HEARTBEAT_TOPIC='heartbeat' -export GEARPUMP='gearpump:8090' -export BACKEND='backend:8080' -export FRONTEND='frontend:4001' -export NGINX='nginx' +GEARPUMP='gearpump:8090' +BACKEND='backend:8080' +FRONTEND='frontend:4001' +NGINX='nginx' NGINX_PORT='443' -export REDIS='redis' -export REDIS_PORT='6379' +REDIS='redis' +REDIS_PORT='6379' OPENTSDB_URI='opentsdb' OPENTSDB_PORT=4242 @@ -54,19 +57,18 @@ MQTT_BROKER_PASSWORD='8dhh1f2471' FRONTEND_SYSTEMUSER="gateway@intel.com" FRONTEND_SYSTEMPASSWORD="7d501829lhbl1or0bb1784462c97bcad6" -export SMTP_HOST="${SMTP_HOST:-auth.smtp.1and1.co.uk}" -export SMTP_PORT="${SMTP_PORT:-587}" -export SMTP_USERNAME="${SMTP_USERNAME:-test.sender@streammyiot.com}" -export SMTP_PASSWORD="${SMTP_PASSWORD:-xxxxx}" - -export COMPOSE_PROJECT_NAME="oisp" - +SMTP_HOST="${SMTP_HOST:-auth.smtp.1and1.co.uk}" +SMTP_PORT="${SMTP_PORT:-587}" +SMTP_USERNAME="${SMTP_USERNAME:-test.sender@streammyiot.com}" +SMTP_PASSWORD="${SMTP_PASSWORD:-xxxxx}" OPENTSDB_PROPERTIES='{ "uri": "'$OPENTSDB_URI'", "port": "'$OPENTSDB_PORT'" }' +export COMPOSE_PROJECT_NAME="oisp" + #The root variable for every service looks like OISP_SERVICE_CONFIG #It contains a JSON object, starting with single quote, hash names with double quotes, environmental variables with double, then single quotes, e.g. "'$VAR'" #References to other environmental variables which can be parsed as OBJECTS are done with "@@OISP_*_CONFIG" From 1b499b1bea13145750a2423334bb6fd422d1c856 Mon Sep 17 00:00:00 2001 From: Ali Rasim Kocal Date: Mon, 18 Feb 2019 14:09:13 +0100 Subject: [PATCH 57/68] debugger: Include apache2-utils and ipython Signed-off-by: Ali Rasim Kocal --- docker-debugger/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-debugger/Dockerfile b/docker-debugger/Dockerfile index 892d099d..c8aec858 100644 --- a/docker-debugger/Dockerfile +++ b/docker-debugger/Dockerfile @@ -1,6 +1,6 @@ FROM ubuntu:18.04 -RUN apt-get update && apt-get -y install nano make git kafkacat python python-pip python3-pip python3-dev python3-setuptools python-setuptools build-essential nodejs dnsutils virtualenv snapd npm wget apt-transport-https curl +RUN apt-get update && apt-get -y install nano make git kafkacat python python-pip python3-pip python3-dev python3-setuptools ipython3 python-setuptools build-essential nodejs dnsutils virtualenv snapd npm wget apt-transport-https curl apache2-utils # install kubectl RUN curl -s https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add - From ff514f2a7bd564ae312f122a92287a3ea88c7195 Mon Sep 17 00:00:00 2001 From: Ali Rasim Kocal Date: Thu, 21 Feb 2019 11:00:19 +0100 Subject: [PATCH 58/68] debugger: Add load test creator util. Signed-off-by: Ali Rasim Kocal --- docker-debugger/Dockerfile | 3 + docker-debugger/load-tests/create_test.py | 74 +++++++++++++++++++++++ 2 files changed, 77 insertions(+) create mode 100644 docker-debugger/load-tests/create_test.py diff --git a/docker-debugger/Dockerfile b/docker-debugger/Dockerfile index c8aec858..57737546 100644 --- a/docker-debugger/Dockerfile +++ b/docker-debugger/Dockerfile @@ -21,6 +21,9 @@ RUN wget https://github.com/tsenart/vegeta/releases/download/cli%2Fv12.1.0/veget tar -xzvf vegeta-12.1.0-linux-amd64.tar.gz && \ cp vegeta /usr/bin/vegeta +RUN mkdir /home/load-tests/ +ADD load-tests/create_test.py /home/load-tests/create_test.py + WORKDIR /home/platform-launcher EXPOSE 8089 5557 5558 diff --git a/docker-debugger/load-tests/create_test.py b/docker-debugger/load-tests/create_test.py new file mode 100644 index 00000000..d9e37ac3 --- /dev/null +++ b/docker-debugger/load-tests/create_test.py @@ -0,0 +1,74 @@ +#! /usr/bin/python3 +"""Create a payload and apache benchmark command to run tests. + +This assumes a user 'user1@example.com' with password 'password' +already exists. + +Prints the filename suffix. runtest{suffix}.sh is the generated script. +The output is stored in perc{suffix}.txt and out{suffix}.txt +""" +import uuid + +import argparse +import oisp + +DASHBOARD_URL = "http://dashboard:4001" +USERNAME = "user1@example.com" +PASSWORD = "password" + +DEFAULT_TIMEOUT = 120 +DEFAULT_CONCURRENCY = 20 +# Just set this to a very large number to actually run into timeout +DEFAULT_NR_REQUESTS = 640000 + +def create_test(concurrency=DEFAULT_CONCURRENCY, + timeout = DEFAULT_TIMEOUT, + filename_suffix=None): + if filename_suffix is None: + filename_suffix = "" + else: + filename_suffix = "_" + filename_suffix + filename_suffix += "_c{}_t{}".format(concurrency, timeout) + + client = oisp.Client(DASHBOARD_URL + "/v1/api") + client.auth(USERNAME, PASSWORD) + + account_name = str(uuid.uuid4()) + account = client.create_account(account_name) + + device_id = str(uuid.uuid4()) + gateway_id = device_id + device = account.create_device(gateway_id, device_id) + device.activate() + + component_name = str(uuid.uuid4()) + cid = device.add_component(component_name, "temperature.v1.0")["cid"] + + device.add_sample(cid, 10) + device.submit_data() + + request = client.response.request + + script_filename = "runtest{}.sh".format(filename_suffix) + payload_filename = "payload{}.json".format(filename_suffix) + perc_filename = "perc{}.txt".format(filename_suffix) + out_filename = "out{}.txt".format(filename_suffix) + with open(payload_filename, "w") as payload_file: + payload_file.write(request.body) + with open(script_filename, "w") as test_script_file: + test_script_file.write("""ab -l -p {} -T application/json -H "Authorization: {}" """ + """ -n {} -t {} -e {} -c {} {} > {}""".format(payload_filename, + request.headers["Authorization"], + DEFAULT_NR_REQUESTS, timeout, + perc_filename, concurrency, + request.url, out_filename)) + return script_filename + + +if __name__=="__main__": + parser = argparse.ArgumentParser(description='Create a simple load test script.') + parser.add_argument("--suffix", metavar="suffix", default=None, help="Filename suffix") + parser.add_argument("--concurrency", metavar="concurrency", type=int, + help="Passed to ab", default=DEFAULT_CONCURRENCY) + args = parser.parse_args() + print(create_test(filename_suffix=args.suffix, concurrency=args.concurrency)) From 16db8d6d19eba559c29dce3502fd33569780008c Mon Sep 17 00:00:00 2001 From: Jamal El Youssefi Date: Tue, 5 Mar 2019 22:46:39 +0100 Subject: [PATCH 59/68] Modified the oisp testing framework to support the binary components (images, videos, ...) - Created new classes for Data, Rules and Components - Removed the all hard coded test values - Added an image component to test the binary encoding feature - Updated oisp-sdk-js hash to make sure sdk supports cbor Signed-off-by: Marcel Wagner --- tests/lib/common.js | 188 ++++++ tests/lib/helpers/cmpcatalog.js | 15 +- tests/lib/helpers/devices.js | 5 +- tests/oisp-tests.js | 1042 +++++++++++++++++++------------ tests/package.json | 4 +- 5 files changed, 823 insertions(+), 431 deletions(-) create mode 100644 tests/lib/common.js diff --git a/tests/lib/common.js b/tests/lib/common.js new file mode 100644 index 00000000..aacc6156 --- /dev/null +++ b/tests/lib/common.js @@ -0,0 +1,188 @@ +class Data { + constructor(value, expectedActuation, expectedEmailReason) { + this.value = value; + this.expectedActuation = expectedActuation; + this.expectedEmailReason = expectedEmailReason; + this.ts = null; + } +} + +class Rule { + constructor(name, op, value, actions) { + this.name = name; + this.basicConditionOperator = op; + this.basicConditionValue = value.toString(); + this.actions = []; + if ( actions ) { + this.actions = actions; + } + this.id = null; + this.cid = null; + } + + addAction(type, target) { + for(var action of this.actions) { + if ( action.type == type ) { + action.target.push(target); + return; + } + } + + var action = new Object(); + action.type = type; + action.target = []; + action.target.push(target) + this.actions.push(action) + } +} + +class Component { + constructor(name, dataType, format, unit, display, min, max, rules, getDataFn, checkDataFn) { + this._name = name; + this.catalog = new Object(); + this.catalog.dimension = name; + this.catalog.version = "1.0"; + this.catalog.type = "sensor"; + this.catalog.dataType = dataType; + this.catalog.format = format; + if ( min != null ) { + this.catalog.min = min; + } + if ( max != null ) { + this.catalog.max = max; + } + this.catalog.measureunit = unit; + this.catalog.display = display; + + this.rules = [] + if ( rules ) { + this.rules = rules; + } + this.data = []; + if ( getDataFn ) { + var values = getDataFn(this.name, this, function(obj, value) { + obj.data.push(value) + }) + if ( values ) { + this.data = values; + } + } + this.checkDataFn = checkDataFn; + this.dataIndex = 0; + this.next = null; + this.prev = null; + this.alerts = []; + } + + get name() { + return this._name + "-" + this.catalog.type; + } + + get type() { + return this._name + ".v" + this.catalog.version; + } + + get cId() { + return this.catalog.dimension + ".v" + this.catalog.version; + } + + reset() { + if ( this.data ) { + for (var i = 0; i < this.data.length; i++) { + this.data[i].ts = null; + } + } + this.dataIndex = 0; + } + + upgradeVersion() { + var version = this.catalog.version.split("."); + if ( version.length == 2 ) { + this.catalog.version = version[0] + "." + (parseInt(version[1]) + 1); + return true; + } + else { + return false; + } + } + + checkData(data) { + if ( data && this.checkDataFn ) { + return this.checkDataFn(this.data, data) + } + + return "Cannot check data"; + } + + alertsNumber() { + var nb = 0; + this.data.forEach(function(data) { + if ( data.expectedActuation != null ) { + nb++; + } + }) + return nb; + } + + checkAlert(value, condition ) { + for (var i=0; i 0 ) { + this.list[this.list.length-1].next = component; + component.prev = this.list[this.list.length-1]; + } + + this.list.push(component); + } + + get size() { + return this.list.length; + } + + reset() { + for (var i=0; i 0 ) { + return this.list[0]; + } + else { + return null; + } + } + get last() { + if ( this.list.length > 0 ) { + return this.list[this.list.length-1]; + } + else { + return null; + } + } +} + +module.exports = { + Data, + Rule, + Component, + Components +} + diff --git a/tests/lib/helpers/cmpcatalog.js b/tests/lib/helpers/cmpcatalog.js index 080b9656..196eadb0 100644 --- a/tests/lib/helpers/cmpcatalog.js +++ b/tests/lib/helpers/cmpcatalog.js @@ -47,7 +47,7 @@ function getCatalog(userToken, accountId, cb){ } -function createCatalog(userToken, accountId, cb){ +function createCatalog(userToken, accountId, options, cb){ if (!cb) { throw "Callback required"; } @@ -55,18 +55,7 @@ function createCatalog(userToken, accountId, cb){ var data = { userToken: userToken, accountId: accountId, - body: - { - "dimension": "speed", - "version": "1.0", - "type": "sensor", - "dataType": "Number", - "format": "float", - "min": 0, - "max": 500, - "measureunit": "Degress Celsius", - "display": "timeSeries" - } + body: options }; api.cmpcatalog.createCatalog(data, function(err, response) { diff --git a/tests/lib/helpers/devices.js b/tests/lib/helpers/devices.js index 9bb0e581..c41c2841 100644 --- a/tests/lib/helpers/devices.js +++ b/tests/lib/helpers/devices.js @@ -194,7 +194,7 @@ function addDeviceComponent(name, type, userToken, accountId, deviceId, cb) { api.devices.addDeviceComponent(data, function(err, response) { if (err) { - cb(null); + cb(err); } else { assert.notEqual(response, null, 'response is null') cb(null, response.cid); @@ -217,12 +217,11 @@ function submitData(value, deviceToken, accountId, deviceId, cid, cb) { on: ts, data: [{ componentId: cid, - value: value.toString(), + value: Buffer.isBuffer(value) ? value : value.toString(), on: ts }] } } - api.devices.submitData(data, function(err, response) { if (err) { cb(err) diff --git a/tests/oisp-tests.js b/tests/oisp-tests.js index 75ead636..a8275d7c 100644 --- a/tests/oisp-tests.js +++ b/tests/oisp-tests.js @@ -26,15 +26,13 @@ var kafka = require('kafka-node'); var cfenvReader = require('./lib/cfenv/reader'); var helpers = require("./lib/helpers"); var colors = require('colors'); - var exec = require('child_process').exec; +var gm = require('gm').subClass({imageMagick: true}); +var { Data, Rule, Component, Components } = require('./lib/common') var accountName = "oisp-tests"; var deviceName = "oisp-tests-device"; -var componentName = "temperature-sensor"; -var componentType = "temperature.v1.0"; - var actuatorName = "powerswitch-actuator"; var actuatorType = "powerswitch.v1.0"; @@ -50,42 +48,123 @@ var imap_host = process.env.IMAP_HOST; var imap_port = process.env.IMAP_PORT; var recipientEmail = imap_username; - var rules = []; -rules[switchOnCmdName] = { - name: "oisp-tests-rule-low-temp", - conditionComponent: componentName, - basicConditionOperator: "<=", - basicConditionValue: "15", - actions: [ - { - type: "actuation", - target: [ switchOnCmdName ] - }, - { - type: "mail", - target: [ emailRecipient ] - } - ], -}; - -rules[switchOffCmdName] = { - name: "oisp-tests-rule-high-temp", - conditionComponent: componentName, - basicConditionOperator: ">", - basicConditionValue: "25", - actions: [ - { - type: "actuation", - target: [ switchOffCmdName ] - }, - { - type: "mail", - target: [ emailRecipient ] - } - ], -}; + + +//------------------------------------------------------------------------------------------------------- +// Rules +//------------------------------------------------------------------------------------------------------- +var lowTemperatureRule = new Rule("oisp-tests-rule-low-temp","<=", 15); + lowTemperatureRule.addAction("actuation", switchOnCmdName); + lowTemperatureRule.addAction("mail", emailRecipient); + +var highTemperatureRule = new Rule("oisp-tests-rule-high-temp",">", 25); + highTemperatureRule.addAction("actuation", switchOffCmdName); + highTemperatureRule.addAction("mail", emailRecipient); + +//------------------------------------------------------------------------------------------------------- +// Components +//------------------------------------------------------------------------------------------------------- +var components = new Components() + +components.add( new Component("temperatures", "Number", "float", "Degress Celsius", "timeSeries", -150, 150, + [lowTemperatureRule, highTemperatureRule], + temperatureData, temperatureCheckData) + ); + +components.add( new Component("images", "ByteArray", "image/jpeg", "pixel", "binaryDataRenderer", null, null, + [], + imageData, imageCheckData) + ); + +function temperatureData(componentName) { + + var data = [ + new Data(-15, 1, componentName + " <= 15"), + new Data( -5, 1, componentName + " <= 15"), + new Data( 5, 1, componentName + " <= 15"), + new Data( 15, 1, componentName + " <= 15"), + new Data( 25, null, null), + new Data( 30, 0, componentName + " > 25"), + new Data( 20, null, null), + new Data( 14, 1, componentName + " <= 15"), + new Data( 20, null, null), + new Data( 28, 0, componentName + " > 25") + ]; + + return data; +} + +function temperatureCheckData(sentData, receivedData) { + if ( sentData.length == receivedData.length) { + for (var i = 0; i < sentData.length; i++) { + if (sentData[i].ts == receivedData[i].ts && sentData[i].value == receivedData[i].value) { + sentData[i].ts = null; + } + } + } + + var err = null; + for (var i = 0; i < sentData.length; i++) { + if (sentData[i].ts != null) { + err += "[" + i + "]=" + sentData[i].value + " "; + } + } + if (err) { + err = "Got wrong data for " + err; + } + + return err; +} + + +function imageData(componentName, opaque, cb) { + if (!cb) { + throw "Callback required"; + } + + var images = [ + new gm(100, 200, "red"), + new gm(400, 600, "white"), + new gm(1280, 720, "blue") + ]; + + images.forEach(function(image) { + image.toBuffer("JPEG", function(err, buffer) { + if (!err) { + cb(opaque, new Data(buffer, null, null)) + } + }) + }) + + return null; +} + + +function imageCheckData(sentData, receivedData) { + if ( sentData.length == receivedData.length) { + for (var i = 0; i < sentData.length; i++) { + if (sentData[i].ts == receivedData[i].ts && + sentData[i].value.equals(receivedData[i].value)) { + sentData[i].ts = null; + } + } + } + + var err = null; + for (var i = 0; i < sentData.length; i++) { + if (sentData[i].ts != null) { + err += i + " " ; + } + } + if (err) { + err = "Got wrong data for items" + err; + } + + return err; + return null; +} //------------------------------------------------------------------------------------------------------- // Tests @@ -98,64 +177,11 @@ var userId; var accountId; var deviceId; var deviceToken; -var componentId; var actuatorId; var rulelist; -var alertlist; var componentParamName; var firstObservationTime; -var temperatureValues = [{ - value: -15, - expectedActuation: 1, // swich on - expectedEmailReason: "temperature-sensor <= 15" - }, - { - value: -5, - expectedActuation: 1, // swich on - expectedEmailReason: "temperature-sensor <= 15" - }, - { - value: 5, - expectedActuation: 1, // swich on - expectedEmailReason: "temperature-sensor <= 15" - }, - { - value: 15, - expectedActuation: 1, // swich on - expectedEmailReason: "temperature-sensor <= 15" - }, - { - value: 25, - expectedActuation: null, // do nothing (no actuation) - expectedEmailReason: null, - }, - { - value: 30, - expectedActuation: 0, // swich off - expectedEmailReason: "temperature-sensor > 25" - }, - { - value: 20, - expectedActuation: null, // do nothing (no actuation) - expectedEmailReason: null - }, - { - value: 14, - expectedActuation: 1, // swich on - expectedEmailReason: "temperature-sensor <= 15" - }, - { - value: 20, - expectedActuation: null, // do nothing (no actuation) - expectedEmailReason: null - }, - { - value: 28, - expectedActuation: 0, // swich off - expectedEmailReason: "temperature-sensor > 25" - } -]; process.stdout.write("_____________________________________________________________________\n".bold); process.stdout.write(" \n"); @@ -181,10 +207,8 @@ describe("Waiting for OISP services to be ready ...\n".bold, function() { accountId = null; deviceId = "00-11-22-33-44-55"; deviceToken = null; - componentId = null; actuatorId = null; rulelist = null; - alertlist = null; componentParamName = "LED"; firstObservationTime = null; @@ -530,32 +554,136 @@ describe("Creating account and device ...\n".bold, function() { }) }) -describe("Creating and getting components ... \n".bold, function() { +describe("Managing components catalog ... \n".bold, function() { - it('Shall add device a component', function(done) { + it('Shall create new custom Components Types', function(done) { + var createCatalog = function(component) { + if ( component ) { + helpers.cmpcatalog.createCatalog(userToken, accountId, component.catalog, function(err, response) { + if (err) { + done(new Error("Cannot create component: " + err)); + } else { + //component.cId = response.id; + // assert.equal(response.id, component.cId, 'cannot create new component type') + createCatalog(component.next); + } + }) + } else { + done() + } + } + createCatalog(components.first); + }).timeout(10000); - helpers.devices.addDeviceComponent(componentName, componentType, deviceToken, accountId, deviceId, function(err, id) { + it('Shall list all component types for account', function(done) { + helpers.cmpcatalog.getCatalog(userToken, accountId, function(err, response) { if (err) { done(new Error("Cannot create component: " + err)); } else { - componentId = id; + assert.equal(response.length, components.size + 3, 'get wrong number of components ') done(); } }) }).timeout(10000); + + it('Shall get component type details', function(done) { + var getCatalogDetail = function(component) { + if ( component ) { + if ( component.catalog.max ) { + helpers.cmpcatalog.getCatalogDetail(userToken, accountId, component.cId, function(err, response) { + if (err) { + done(new Error("Cannot get component type details " + err)); + } else { + assert.equal(response.id, component.cId, 'cannot get component '+component.name) + assert.equal(response.max, component.catalog.max) + getCatalogDetail(component.next) + } + }) + } else { + getCatalogDetail(component.next) + } + } else { + done() + } + } + getCatalogDetail(components.first) + }).timeout(10000); + + it('Shall update a component type', function(done) { + var updateCatalog = function(component) { + if ( component ) { + if ( component.catalog.min != null && + component.catalog.max != null ) { + var newmin = 10; + var newmax = 1000; + + helpers.cmpcatalog.updateCatalog(userToken, accountId, component.cId, newmin, newmax, function(err, response) { + if (err) { + done(new Error("Cannot get component type details " + err)); + } else { + if ( component.upgradeVersion() == false ) { + done(new Error("Cannot upgrade version for component " + component.name)) + } + assert.equal(response.id, component.cId, 'cannot update '+component.name+' component to '+component.catalog.version) + assert.equal(response.max, newmax) + updateCatalog(component.next) + } + }) + } else { + updateCatalog(component.next) + } + } else { + done() + } + } + updateCatalog(components.first) + }).timeout(10000); +}) + +describe("Creating and getting components ... \n".bold, function() { + + it('Shall add device a component', function(done) { + var addDeviceComponent = function(component) { + if ( component ) { + helpers.devices.addDeviceComponent(component.name, component.type, deviceToken, accountId, deviceId, function(err, id) { + if (err) { + done(new Error("Cannot create component " + component + " : " +err)); + } else { + if ( id ) { + component.id = id; + addDeviceComponent(component.next); + } + else { + done(new Error("Wrong id for component " + component )); + } + } + }) + } + else { + done(); + } + } + addDeviceComponent(components.first); + + }).timeout(10000); it('Shall not add device a component with the name that a component of the device already has, or crash', function(done) { - - helpers.devices.addDeviceComponent(componentName, componentType, deviceToken, accountId, deviceId, function(err, id) { - if (err) { - done(new Error("Cannot create or try to create component: " + err)); - } else if (id === undefined) { - // Response with code 409, id should be undefined - done(); - } else { - done(new Error("No error is thrown and the component is added successfully: " + id)); + var addDeviceComponent = function(component) { + if ( component ) { + helpers.devices.addDeviceComponent(component.name, component.type, deviceToken, accountId, deviceId, function(err, id) { + if (err) { + addDeviceComponent(component.next); + } else { + done(new Error("No error is thrown and the component is added successfully: " + id)); + } + }) } - }) + else { + done(); + } + } + addDeviceComponent(components.first); + }).timeout(10000); it('Shall add device an actuator', function(done) { @@ -631,248 +759,220 @@ describe("Creating and getting components ... \n".bold, function() { }); -describe("Getting components catalog ... \n".bold, function() { - it('Shall create a new custom Component Type', function(done) { - helpers.cmpcatalog.createCatalog(userToken, accountId, function(err, response) { - if (err) { - done(new Error("Cannot create component: " + err)); - } else { - assert.equal(response.id, 'speed.v1.0', 'cannot create new component type') - done(); - } +describe("Creating rules ... \n".bold, function() { + it('Shall create rules', function(done) { + var nbRules = 0; + components.list.forEach(function(component) { + nbRules += component.rules.length; }) - }).timeout(10000); - it('Shall list all component types for account', function(done) { - helpers.cmpcatalog.getCatalog(userToken, accountId, function(err, response) { - if (err) { - done(new Error("Cannot list component: " + err)); - } else { - assert.equal(response.length, 4, 'get wrong number of component types') - done(); - } - }) - }).timeout(10000); + if ( nbRules > 0 ) { + components.list.forEach(function(component) { + component.rules.forEach(function(rule) { + rule.cid = component.id; + rule.conditionComponent = component.name; + helpers.rules.createRule(rule, userToken, accountId, deviceId, function(err, id) { + if (err) { + done(new Error("Cannot create rule " + rule.name +": " + err)); + } else { + rule.id = id; + if ( --nbRules == 0 ) { + done(); + } + } + }) + }) + }) + } + else { + done() + } - it('Shall get component type details', function(done) { - helpers.cmpcatalog.getCatalogDetail(userToken, accountId, 'speed.v1.0', function(err, response) { - if (err) { - done(new Error("Cannot get component type details " + err)); - } else { - assert.equal(response.id, 'speed.v1.0', 'cannot get speed component') - assert.equal(response.max, 500) - done(); - } - }) - }).timeout(10000); + }).timeout(20000); - it('Shall update a component type', function(done) { - var newmin = 10; - var newmax = 1000; - helpers.cmpcatalog.updateCatalog(userToken, accountId, 'speed.v1.0', newmin, newmax, function(err, response) { + + it('Shall get all rules', function(done) { + helpers.rules.getRules(userToken, accountId, function(err, response) { if (err) { - done(new Error("Cannot get component type details " + err)); + done(new Error("Cannot get rules " + err)); } else { - assert.equal(response.id, 'speed.v1.1', 'cannot update speed component to v1.1') - assert.equal(response.max, 1000) + components.list.forEach(function(component) { + if ( component.hasOwnProperty('rules') ) { + component.rules.forEach(function(rule) { + var found = false; + for(var i=0; i helpers.mail.getAllEmailMessages(imap_username, imap_password, imap_host, imap_port)) + .then( (messages) => { + messages.forEach( (message) => { + var lines = message.toString().split("\n"); + var i; + lines.forEach((line) => { + var reExecReason = /^- Reason:.*/; + if ( reExecReason.test(line) ) { + var reason = line.split(":")[1].trim(); + var index = expectedEmailReasons.findIndex( (element) => { + return (reason == element); + }) + if ( index >= 0 ) { + expectedEmailReasons.splice(index, 1); + } + } + }) + }) + assert.equal(expectedEmailReasons.length, 0, "Received emails do not match expected emails sent from rule-engine"); + done(); + }).catch( (err) => {done(err)}); + }).timeout(30 * 1000); + + it('Shall check observations', function(done) { + var checkObservations = function(component) { + if ( component ) { + if ( component.data.length > 0 ) { + + helpers.data.searchData(component.data[0].ts, component.data[component.data.length-1].ts, + userToken, accountId, deviceId, component.id, false, {}, function(err, result) { + if (err) { + done(new Error("Cannot get data: " + err)) + } + else if (result && result.series && result.series.length == 1) { + err = component.checkData(result.series[0].points); + } + else { + done(new Error("Cannot get data.")); + } - }).timeout(5*60*1000) - - //--------------------------------------------------------------- - - it('Shall check received emails', function(done){ - helpers.mail.waitForNewEmail(imap_username, imap_password, imap_host, imap_port, 7) - .then(() => helpers.mail.getAllEmailMessages(imap_username, imap_password, imap_host, imap_port)) - .then( (messages) => { - var temperatureValuesCopy = temperatureValues.map( (elem) => elem); - messages.forEach( (message) => { - var lines = message.toString().split("\n"); - var i; - lines.forEach((line) => { - var reExecReason = /^- Reason:.*/; - if ( reExecReason.test(line) ) { - var reason = line.split(":")[1].trim(); - var index = temperatureValuesCopy.findIndex( (element) => { - return (reason == element.expectedEmailReason); - }) - temperatureValuesCopy.splice(index, 1); - } - }) - }) - assert.equal(temperatureValuesCopy.length, 3, "Received emails do not match expected emails sent from rule-engine"); - done(); - }).catch( (err) => {done(err)}); - }).timeout(30 * 1000); - - it('Shall check observation', function(done) { - helpers.data.searchData(firstObservationTime, -1, userToken, accountId, deviceId, componentId, false, {}, function(err, result) { - if (err) { - done(new Error("Cannot get data: " + err)) - } - - var data = {} - if (result && result.series && result.series.length == 1){ - data = result.series[0].points; - } - else { - done(new Error("Cannot get data.")); - } - if (data && data.length == temperatureValues.length) { - for (var j = 0; j < temperatureValues.length; j++) { - if (temperatureValues[j].ts == data[j].ts && temperatureValues[j].value == data[j].value) { - temperatureValues[j].ts = null; - } - } - } - - var err = ""; - for (var i = 0; i < temperatureValues.length; i++) { - if (temperatureValues[i].ts != null) { - err += "[" + i + "]=" + temperatureValues[i].value + " "; - } - } - if (err.length == 0) { - done(); - } else { - done(new Error("Got wrong data for " + err)) - } - - }) - }).timeout(10000); + if (err) { + done(new Error(err)) + } + else { + checkObservations(component.next) + } + }) + } + else { + checkObservations(component.next) + } + } + else { + done() + } + } + checkObservations(components.first) + }).timeout(10000); }); @@ -1000,69 +1100,141 @@ describe("Do data sending subtests ...".bold, describe("Geting and manage alerts ... \n".bold, function(){ - it('Shall get list of alerts', function(done){ - helpers.alerts.getListOfAlerts(userToken, accountId, function(err, response) { - if (err) { - done(new Error("Cannot get list of alerts: " + err)); - } else { - assert.notEqual(response, null ,'response is null') - assert.equal(response.length, 7, 'get wrong number of alerts') - alertlist = response - done(); + it('Shall get list of alerts', function(done) { + var getListOfAlerts = function(component) { + if ( component ) { + var alertsNumer = component.alertsNumber(); + if ( alertsNumer > 0 ) { + helpers.alerts.getListOfAlerts(userToken, accountId, function(err, response) { + if (err) { + done(new Error("Cannot get list of alerts: " + err)); + } else { + assert.notEqual(response, null ,'response is null') + assert.equal(response.length, alertsNumer, 'get wrong number of alerts') + component.alerts = response; + getListOfAlerts(component.next) + } + }) + } + else { + getListOfAlerts(component.next) + } } - }) - }) + else { + done() + } + } + getListOfAlerts(components.first) + }).timeout(10000); it('Shall add comments to the Alert', function(done){ - var comments = { - "user": "alertcomment@intel.com", - "timestamp": 123233231221, - "text": "comment" - } + var addCommentsToAlert = function(component) { + if ( component ) { + if ( component.alerts.length > 0 ) { + var comments = { + "user": "alertcomment@intel.com", + "timestamp": 123233231221, + "text": "comment" + } - helpers.alerts.addCommentsToAlert(userToken, accountId, alertlist[0].alertId, comments, function(err, response) { - if (err) { - done(new Error("Cannot add comments to the Alert" + err)); - } else { - assert.notEqual(response, null ,'response is null') - assert.equal(response.status, 'OK') + helpers.alerts.addCommentsToAlert(userToken, accountId, component.alerts[0].alertId, comments, function(err, response) { + if (err) { + done(new Error("Cannot add comments to the Alert" + err)); + } else { + assert.notEqual(response, null ,'response is null') + assert.equal(response.status, 'OK') + done() + } + }) + } + else { + addCommentsToAlert(component.next) + } + } + else { done(); } - }) + } + addCommentsToAlert(components.first) + }) - it('Shall get alert infomation', function(done){ - helpers.alerts.getAlertDetails(userToken, accountId, alertlist[0].alertId, function(err, response) { - if (err) { - done(new Error("Cannot get list of alerts: " + err)); - } else { - assert.notEqual(response, null ,'response is null') - assert.equal(response.conditions[0].condition, 'temperature-sensor > 25', 'get error alert') - done(); + it('Shall get alert infomation', function(done) { + var getAlertDetails = function(component) { + if ( component ) { + if ( component.alerts.length > 0 ) { + helpers.alerts.getAlertDetails(userToken, accountId, component.alerts[0].alertId, function(err, response) { + if (err) { + done(new Error("Cannot get list of alerts: " + err)); + } + else { + assert.notEqual(response, null ,'response is null') + if (!component.checkAlert(parseInt(response.conditions[0].components[0].valuePoints[0].value), + response.conditions[0].condition) ) { + done(new Error("get error alert for: " + response.conditions[0].condition)); + } + done(); + } + }) + } + else { + getAlertDetails(component.next); + } } - }) + else { + done() + } + } + getAlertDetails(components.first); }) - it('Shall update alert status', function(done){ - helpers.alerts.updateAlertStatus(userToken, accountId, alertlist[1].alertId, 'Open', function(err, response) { - if (err) { - done(new Error("Cannot update alert status " + err)); - } else { - assert.notEqual(response, null ,'response is null') - assert.equal(response.status, 'Open', 'wrong alert status') - done(); + it('Shall update alert status', function(done) { + var updateAlertStatus = function(component) { + if ( component ) { + if ( component.alerts.length > 1 ) { + helpers.alerts.updateAlertStatus(userToken, accountId, component.alerts[1].alertId, 'Open', function(err, response) { + if (err) { + done(new Error("Cannot update alert status " + err)); + } else { + assert.notEqual(response, null ,'response is null') + assert.equal(response.status, 'Open', 'wrong alert status') + done(); + } + }) + } + else { + updateAlertStatus(component.next) + } } - }) + else { + updateAlertStatus(component.next) + } + } + updateAlertStatus(components.first) }) - it('Shall clear alert infomation', function(done){ - helpers.alerts.closeAlert(userToken, accountId, alertlist[2].alertId, function(err, response) { - if (err) { - done(new Error("Cannot clear alert infomation " + err)); - } else { - done(); + it('Shall clear alert infomation', function(done) { + var closeAlert = function(component) { + if ( component ) { + if ( component.alerts.length > 2 ) { + helpers.alerts.closeAlert(userToken, accountId, component.alerts[2].alertId, function(err, response) { + if (err) { + done(new Error("Cannot clear alert infomation " + err)); + } else { + done(); + } + }) + } + else { + closeAlert(component.next) + } } - }) + else + { + closeAlert(component.next) + } + } + closeAlert(components.first) }) }); @@ -1071,54 +1243,88 @@ describe("update rules and create draft rules ... \n".bold, function(){ var cloneruleId; - it('Shall clone a rule', function(done){ - var rulename_clone = rulelist[1].name + ' - cloned'; - - helpers.rules.cloneRule (userToken, accountId, rulelist[1].id, function(err, response) { - if (err) { - done(new Error("cannot clone a rule " + err)); - } else { - assert.notEqual(response, null ,'response is null') - assert.equal(response.name, rulename_clone, 'clone error rule') - cloneruleId = response.id - done(); + it('Shall clone a rule', function(done) { + var cloneRule = function(component) { + if ( component ) { + if ( component.rules.length > 0 ) { + var rulename_clone = component.rules[0].name + ' - cloned'; + + helpers.rules.cloneRule (userToken, accountId, component.rules[0].id, function(err, response) { + if (err) { + done(new Error("cannot clone a rule " + err)); + } else { + assert.notEqual(response, null ,'response is null') + assert.equal(response.name, rulename_clone, 'clone error rule') + cloneruleId = response.id + done(); + } + }) + } + else { + cloneRule(component.next) + } } - }) + else { + done() + } + } + cloneRule(components.first) }) it('Shall update a rule', function(done) { - - var newrule = { - name: "oisp-tests-rule-high-temp-new", - synchronizationStatus: "NotSync", - conditionComponent: componentName, - basicConditionOperator: ">", - basicConditionValue: "28", - actuationCmd: switchOffCmdName, - cid: componentId - } - - helpers.rules.updateRule(newrule, userToken, accountId, cloneruleId, function(err, response) { - if (err) { - done(new Error("Cannot update rules " + err)); - } else { - assert.equal(response.name, newrule.name, 'update rule wrong') - done(); + var updateRule = function(component) { + if ( component ) { + if ( component.rules.length > 0 ) { + var newrule = new Rule("oisp-tests-rule-high-temp-new",">", 28); + newrule.synchronizationStatus = "NotSync"; + newrule.actuationCmd = switchOffCmdName, + newrule.cid = component.id; + newrule.conditionComponent = component.name; + + helpers.rules.updateRule(newrule, userToken, accountId, cloneruleId, function(err, response) { + if (err) { + done(new Error("Cannot update rules " + err)); + } else { + assert.equal(response.name, newrule.name, 'update rule wrong') + done(); + } + }) + } + else { + updateRule(component.next); + } } - }) + else { + done() + } + } + updateRule(components.first); }).timeout(20000); - it('Shall update rule status', function(done){ - helpers.rules.updateRuleStatus (userToken, accountId, rulelist[1].id, 'Archived', function(err, response) { - if (err) { - done(new Error("Cannot update rule status " + err)); - } else { - assert.notEqual(response, null ,'response is null') - assert.equal(response.status, 'Archived', 'wrong rule status') - done(); + it('Shall update rule status', function(done){ + var updateRuleStatus = function(component) { + if ( component ) { + if ( component.rules.length > 1 ) { + helpers.rules.updateRuleStatus (userToken, accountId, component.rules[1].id, 'Archived', function(err, response) { + if (err) { + done(new Error("Cannot update rule status " + err)); + } else { + assert.notEqual(response, null ,'response is null') + assert.equal(response.status, 'Archived', 'wrong rule status') + done(); + } + }) + } + else { + updateRuleStatus(component.next) + } } - }) + else { + done() + } + } + updateRuleStatus(components.first) }) it('Shall create a draft rule', function(done){ @@ -1433,15 +1639,23 @@ describe("change password and delete receiver ... \n".bold, function(){ }) it('Shall delete a component', function(done){ - helpers.devices.deleteDeviceComponent (userToken, accountId, deviceId, componentId, function(err, response){ - if (err) { - done(new Error("cannot delete a component " + err)); - } else { - assert.notEqual(response, null ,'response is null') - assert.equal(response.status, 'Done') - done(); - } - }) + var deleteComponent = function(component) { + if ( component ) { + helpers.devices.deleteDeviceComponent (userToken, accountId, deviceId, component.id, function(err, response){ + if (err) { + done(new Error("cannot delete a component " + err)); + } else { + assert.notEqual(response, null ,'response is null') + assert.equal(response.status, 'Done') + deleteComponent(component.next) + } + }) + } + else { + done() + } + } + deleteComponent(components.first) }) it('Shall delete a device', function(done){ diff --git a/tests/package.json b/tests/package.json index d2784fa8..e71f7b56 100644 --- a/tests/package.json +++ b/tests/package.json @@ -19,6 +19,8 @@ "nodemailer": "^4.6.6", "imap": "^0.8.19", "mailparser": "^2.2.0", - "@open-iot-service-platform/oisp-sdk-js":"https://github.com/Open-IoT-Service-Platform/oisp-sdk-js.git#fdd16923d2d244027638f0b42909d8c29c5444f6" + "cbor": "^4.1.5", + "gm": "^1.23.1", + "@open-iot-service-platform/oisp-sdk-js": "https://github.com/Open-IoT-Service-Platform/oisp-sdk-js.git#15e53c93f3a6589682a84c22f1ed4da552c9a084" } } From 0f5417bfc941afec595a3b84d469329af46a8dab Mon Sep 17 00:00:00 2001 From: Marcel Wagner Date: Sat, 16 Mar 2019 20:29:03 +0100 Subject: [PATCH 60/68] Added imagemagick install to platform-setup Signed-off-by: Marcel Wagner --- platform-setup/setup-ubuntu16.04.sh | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/platform-setup/setup-ubuntu16.04.sh b/platform-setup/setup-ubuntu16.04.sh index cd2305e4..cecbbde8 100755 --- a/platform-setup/setup-ubuntu16.04.sh +++ b/platform-setup/setup-ubuntu16.04.sh @@ -23,9 +23,7 @@ sudo curl -L https://github.com/docker/compose/releases/download/1.22.0/docker-c sudo chmod +x /usr/local/bin/docker-compose #other packages needed -sudo apt-get install make git -sudo apt install kafkacat - +sudo apt-get install -y make git kafkacat imagemagick sudo usermod -aG docker ${USER} From 377fe61ce30c528bf592be00804ee314c5da2b9a Mon Sep 17 00:00:00 2001 From: Marcel Wagner Date: Sat, 16 Mar 2019 20:35:25 +0100 Subject: [PATCH 61/68] Removed tests/test-config.json Signed-off-by: Marcel Wagner --- tests/test-config.json | 35 ----------------------------------- 1 file changed, 35 deletions(-) delete mode 100644 tests/test-config.json diff --git a/tests/test-config.json b/tests/test-config.json deleted file mode 100644 index 3c7868b5..00000000 --- a/tests/test-config.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "logger": { - "LEVEL": "info", - "PATH": "/tmp/", - "MAX_SIZE": 134217728 - }, - "default_connector": "rest+ws", - "connector": { - "rest": { - "host": "localhost", - "port": 80, - "protocol": "http", - "strictSSL": false, - "timeout": 30000, - "proxy": { - "host": false, - "port": false - } - }, - "ws": { - "host": "localhost", - "port": 5000, - "minRetryTime": 2500, - "maxRetryTime": 600000, - "testTimeout": 40000, - "pingPongIntervalMs": 30000, - "enablePingPong": true, - "secure": false, - "proxy": { - "host": false, - "port": false - } - } - } -} From d70d37393eff4b3753458ac4db288a292b92bb42 Mon Sep 17 00:00:00 2001 From: Marcel Wagner Date: Mon, 1 Apr 2019 08:45:29 +0000 Subject: [PATCH 62/68] CircleCI integration: * Added config file with sign-off checking * First test for ssh triggering with circleci * Updated ci script to avoid interactive host checking Signed-off-by: Marcel Wagner --- .circleci/config.yml | 43 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 .circleci/config.yml diff --git a/.circleci/config.yml b/.circleci/config.yml new file mode 100644 index 00000000..f6a399b5 --- /dev/null +++ b/.circleci/config.yml @@ -0,0 +1,43 @@ +# Javascript Node CircleCI 2.0 configuration file +# +# Check https://circleci.com/docs/2.0/language-javascript/ for more details +# +version: 2 +jobs: + build: + docker: + # specify the version you desire here + - image: circleci/node:8.0 + + # Specify service dependencies here if necessary + # CircleCI maintains a library of pre-built images + # documented at https://circleci.com/docs/2.0/circleci-images/ + # - image: circleci/mongo:3.4.4 + + working_directory: ~/repo + environment: + shell: /bin/bash + TERM: xterm + + steps: + - checkout + + - run: + name: Check whether most recent commit is signedoff + shell: /bin/bash + command: | + MESSAGE=`git log -1 --pretty=%B` + echo "Checking whether signed" + if [[ "${MESSAGE}" == *Signed-off-by:*@* ]]; then + echo "Commit is signedoff" + else + echo "Commit is not signedoff" + exit 1 + fi + + # run tests! + - run: + shell: /bin/bash + name: Run remote build & e2e tests + command: | + ssh -o "StrictHostKeyChecking no" root@ci.streammyiot.com /opt/test/testTest.sh test From f1aad934af47004453a7fc183c752c14bbfe21fd Mon Sep 17 00:00:00 2001 From: Marcel Wagner Date: Mon, 1 Apr 2019 12:21:35 +0000 Subject: [PATCH 63/68] docker.sh: Removed redsocks and added explicit exit 1 to make sure makefile realizes that compose failed Signed-off-by: Marcel Wagner --- docker.sh | 21 +-------------------- 1 file changed, 1 insertion(+), 20 deletions(-) diff --git a/docker.sh b/docker.sh index 3180446f..de07cb2b 100755 --- a/docker.sh +++ b/docker.sh @@ -23,24 +23,5 @@ export GIT_COMMIT_GEARPUMP=$(git -C oisp-gearpump-rule-engine rev-parse HEAD) export GIT_COMMIT_WEBSOCKET_SERVER=$(git -C oisp-websocket-server rev-parse HEAD) export GIT_COMMIT_BACKEND=$(git -C oisp-backend rev-parse HEAD) -redsocks_container_name='redsocks' -if [ -n "$http_proxy" ] && [ -n "$https_proxy" ] && [ "$1" == "up" ]; then - # Run redsocks to allow containers to use proxy - - - if ! [[ $(docker ps -f "name=$redsocks_container_name" --format '{{.Names}}') == $redsocks_container_name ]]; then - echo "Starting redsocks..." - - docker run -d --net=host --privileged --name $redsocks_container_name --rm -e http_proxy=$http_proxy -e https_proxy=$https_proxy klabs/forgetproxy - echo - fi -fi - -docker-compose $* - -if [ "$1" == "stop" ]; then - if [[ $(docker ps -f "name=$redsocks_container_name" --format '{{.Names}}') == $redsocks_container_name ]]; then - docker stop $redsocks_container_name - fi -fi +docker-compose $* || exit 1 From a1f1099234677ce658f327df5f8ac64b2fe932f6 Mon Sep 17 00:00:00 2001 From: Marcel Wagner Date: Sat, 30 Mar 2019 19:29:49 +0100 Subject: [PATCH 64/68] Updated Kafka to default bitnami container Added scnd kafka listener for localhost Signed-off-by: Marcel Wagner --- Makefile | 6 +- docker-compose.yml | 22 +++--- docker-kafka/Dockerfile | 36 ---------- docker-kafka/scripts/start-kafka.sh | 90 ------------------------ docker-kafka/supervisor/kafka.conf | 5 -- docker-kafka/supervisor/supervisord.conf | 29 -------- docker-kafka/supervisor/zookeeper.conf | 4 -- 7 files changed, 16 insertions(+), 176 deletions(-) delete mode 100644 docker-kafka/Dockerfile delete mode 100755 docker-kafka/scripts/start-kafka.sh delete mode 100644 docker-kafka/supervisor/kafka.conf delete mode 100644 docker-kafka/supervisor/supervisord.conf delete mode 100644 docker-kafka/supervisor/zookeeper.conf diff --git a/Makefile b/Makefile index 5d3a4b8a..9fa21c1a 100644 --- a/Makefile +++ b/Makefile @@ -129,7 +129,11 @@ endif mkdir -p data/keys/mqtt; \ openssl rand -base64 16 > data/keys/mqtt/mqtt_gw_secret.key; \ fi; - + @if [ -f data/kafka ]; then echo "Kafka persitence dir existing already. Skipping creating new dir"; else \ + echo "Creating kafka folder"; \ + mkdir -p data/kafka; \ + chmod 777 data/kafka; \ + fi; @touch $@ ## build: Build OISP images locally. diff --git a/docker-compose.yml b/docker-compose.yml index 24886874..67b3c9c7 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -77,21 +77,21 @@ services: - POSTGRES_USER=${POSTGRES_USERNAME} - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} kafka: - image: oisp/kafka:${DOCKER_TAG} - build: - context: ./docker-kafka/ - labels: - - oisp=true - - oisp.git_commit=${GIT_COMMIT_PLATFORM_LAUNCHER} + image: bitnami/kafka:1.1.1 ports: - - 9092:9092 + - 9093:9093 + - 9092:9093 depends_on: - zookeeper environment: - - ADVERTISED_HOST=${HOST_IP_ADDRESS} - - ADVERTISED_PORT=${KAFKA_PORT} - - AUTO_CREATE_TOPICS=true - - ZK_CONNECT=${ZOOKEEPER_KAFKA}:${ZOOKEEPER_KAFKA_PORT} + - KAFKA_ZOOKEEPER_CONNECT=${ZOOKEEPER_KAFKA}:${ZOOKEEPER_KAFKA_PORT} + - ALLOW_PLAINTEXT_LISTENER=yes + - KAFKA_LISTENERS=LISTENER_BOB://0.0.0.0:9092,LISTENER_FRED://0.0.0.0:9093 + - KAFKA_ADVERTISED_LISTENERS=LISTENER_BOB://kafka:9092,LISTENER_FRED://localhost:9093 + - KAFKA_LISTENER_SECURITY_PROTOCOL_MAP=LISTENER_BOB:PLAINTEXT,LISTENER_FRED:PLAINTEXT + - KAFKA_INTER_BROKER_LISTENER_NAME=LISTENER_BOB + volumes: + - ./data/kafka:/bitnami/kafka redis: image: redis:3.0 volumes: diff --git a/docker-kafka/Dockerfile b/docker-kafka/Dockerfile deleted file mode 100644 index a90c52dd..00000000 --- a/docker-kafka/Dockerfile +++ /dev/null @@ -1,36 +0,0 @@ -# Kafka and Zookeeper - -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# File copied from: https://github.com/spotify/docker-kafka/kafka - -FROM java:openjdk-8-jre - -ENV DEBIAN_FRONTEND noninteractive -ENV SCALA_VERSION 2.11 -#ENV KAFKA_VERSION 0.10.2.0 -ENV KAFKA_VERSION 1.0.0 -ENV KAFKA_HOME /opt/kafka_"$SCALA_VERSION"-"$KAFKA_VERSION" - -# Install Kafka, Zookeeper and other needed things -RUN apt-get update && \ - apt-get install -y zookeeper wget supervisor dnsutils && \ - rm -rf /var/lib/apt/lists/* && \ - apt-get clean && \ - wget -q http://archive.apache.org/dist/kafka/"$KAFKA_VERSION"/kafka_"$SCALA_VERSION"-"$KAFKA_VERSION".tgz -O /tmp/kafka_"$SCALA_VERSION"-"$KAFKA_VERSION".tgz && \ - tar xfz /tmp/kafka_"$SCALA_VERSION"-"$KAFKA_VERSION".tgz -C /opt && \ - rm /tmp/kafka_"$SCALA_VERSION"-"$KAFKA_VERSION".tgz - -ADD scripts/start-kafka.sh /usr/bin/start-kafka.sh - -# Supervisor config -ADD supervisor/kafka.conf /etc/supervisor/conf.d/ - -# 2181 is zookeeper, 9092 is kafka -EXPOSE 9092 - -CMD ["supervisord", "-n"] diff --git a/docker-kafka/scripts/start-kafka.sh b/docker-kafka/scripts/start-kafka.sh deleted file mode 100755 index 023c087f..00000000 --- a/docker-kafka/scripts/start-kafka.sh +++ /dev/null @@ -1,90 +0,0 @@ -#!/usr/bin/env bash - -# Optional ENV variables: -# * ADVERTISED_HOST: the external ip for the container, e.g. `docker-machine ip \`docker-machine active\`` -# * ADVERTISED_PORT: the external port for Kafka, e.g. 9092 -# * ZK_CHROOT: the zookeeper chroot that's used by Kafka (without / prefix), e.g. "kafka" -# * LOG_RETENTION_HOURS: the minimum age of a log file in hours to be eligible for deletion (default is 168, for 1 week) -# * LOG_RETENTION_BYTES: configure the size at which segments are pruned from the log, (default is 1073741824, for 1GB) -# * NUM_PARTITIONS: configure the default number of log partitions per topic - -# Configure advertised host/port if we run in helios - - -if [ ! -z "$HELIOS_PORT_kafka" ]; then - ADVERTISED_HOST=`echo $HELIOS_PORT_kafka | cut -d':' -f 1 | xargs -n 1 dig +short | tail -n 1` - ADVERTISED_PORT=`echo $HELIOS_PORT_kafka | cut -d':' -f 2` -fi - - -# Set the external host and port -if [ ! -z "$ADVERTISED_HOST" ]; then - echo "advertised host: $ADVERTISED_HOST" - if grep -q "^advertised.host.name" $KAFKA_HOME/config/server.properties; then - sed -r -i "s/#(advertised.host.name)=(.*)/\1=$ADVERTISED_HOST/g" $KAFKA_HOME/config/server.properties - else - echo -e "\nadvertised.host.name=$ADVERTISED_HOST" >> $KAFKA_HOME/config/server.properties - fi -fi -if [ ! -z "$ADVERTISED_PORT" ]; then - echo "advertised port: $ADVERTISED_PORT" - if grep -q "^advertised.port" $KAFKA_HOME/config/server.properties; then - sed -r -i "s/#(advertised.port)=(.*)/\1=$ADVERTISED_PORT/g" $KAFKA_HOME/config/server.properties - else - echo "advertised.port=$ADVERTISED_PORT" >> $KAFKA_HOME/config/server.properties - fi -fi - -# Set the zookeeper chroot -if [ ! -z "$ZK_CHROOT" ]; then - # wait for zookeeper to start up - until /usr/share/zookeeper/bin/zkServer.sh status; do - sleep 0.1 - done - - # create the chroot node - echo "create /$ZK_CHROOT \"\"" | /usr/share/zookeeper/bin/zkCli.sh || { - echo "can't create chroot in zookeeper, exit" - exit 1 - } - - # configure kafka - sed -r -i "s/(zookeeper.connect)=(.*)/\1=locallhost:2181\/$ZK_CHROOT/g" $KAFKA_HOME/config/server.properties -fi - -# Configure external Zookeeper -if [ ! -z "$ZK_CONNECT" ]; then - echo "Setting external Zookeeper service" - sed -r -i "s/(zookeeper.connect)=(.*)/\1=$ZK_CONNECT/g" $KAFKA_HOME/config/server.properties -fi - -# Allow specification of log retention policies -if [ ! -z "$LOG_RETENTION_HOURS" ]; then - echo "log retention hours: $LOG_RETENTION_HOURS" - sed -r -i "s/(log.retention.hours)=(.*)/\1=$LOG_RETENTION_HOURS/g" $KAFKA_HOME/config/server.properties -fi -if [ ! -z "$LOG_RETENTION_BYTES" ]; then - echo "log retention bytes: $LOG_RETENTION_BYTES" - sed -r -i "s/#(log.retention.bytes)=(.*)/\1=$LOG_RETENTION_BYTES/g" $KAFKA_HOME/config/server.properties -fi - -# Configure the default number of log partitions per topic -if [ ! -z "$NUM_PARTITIONS" ]; then - echo "default number of partition: $NUM_PARTITIONS" - sed -r -i "s/(num.partitions)=(.*)/\1=$NUM_PARTITIONS/g" $KAFKA_HOME/config/server.properties -fi - -# Enable/disable auto creation of topics -if [ ! -z "$AUTO_CREATE_TOPICS" ]; then - echo "auto.create.topics.enable: $AUTO_CREATE_TOPICS" - if grep auto.create.topics.enable $KAFKA_HOME/config/server.properties; then - sed -r -i "s/(auto.create.topics.enable)=(.*)/\1=${AUTO_CREATE_TOPICS}/g" $KAFKA_HOME/config/server.properties - else - echo "auto.create.topics.enable=$AUTO_CREATE_TOPICS" >> $KAFKA_HOME/config/server.properties - fi -fi - -# Run kafka-server -$KAFKA_HOME/bin/kafka-server-start.sh $KAFKA_HOME/config/server.properties - - diff --git a/docker-kafka/supervisor/kafka.conf b/docker-kafka/supervisor/kafka.conf deleted file mode 100644 index 62e2896b..00000000 --- a/docker-kafka/supervisor/kafka.conf +++ /dev/null @@ -1,5 +0,0 @@ -[program:kafka] -command=bash /usr/bin/start-kafka.sh -autostart=true -autorestart=true -stopsignal=TERM diff --git a/docker-kafka/supervisor/supervisord.conf b/docker-kafka/supervisor/supervisord.conf deleted file mode 100644 index 78f44461..00000000 --- a/docker-kafka/supervisor/supervisord.conf +++ /dev/null @@ -1,29 +0,0 @@ -; supervisor config file - -[unix_http_server] -file=/var/run/supervisor.sock ; (the path to the socket file) -chmod=0700 ; sockef file mode (default 0700) - -[supervisord] -logfile=/var/log/supervisor/supervisord.log ; (main log file;default $CWD/supervisord.log) -pidfile=/var/run/supervisord.pid ; (supervisord pidfile;default supervisord.pid) -childlogdir=/var/log/supervisor ; ('AUTO' child log dir, default $TEMP) -nodaemon=true - -; the below section must remain in the config file for RPC -; (supervisorctl/web interface) to work, additional interfaces may be -; added by defining them in separate rpcinterface: sections -[rpcinterface:supervisor] -supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface - -[supervisorctl] -serverurl=unix:///var/run/supervisor.sock ; use a unix:// URL for a unix socket - -; The [include] section can just contain the "files" setting. This -; setting can list multiple files (separated by whitespace or -; newlines). It can also contain wildcards. The filenames are -; interpreted as relative to this file. Included files *cannot* -; include files themselves. - -[include] -files = /etc/supervisor/conf.d/*.conf diff --git a/docker-kafka/supervisor/zookeeper.conf b/docker-kafka/supervisor/zookeeper.conf deleted file mode 100644 index 5650d95b..00000000 --- a/docker-kafka/supervisor/zookeeper.conf +++ /dev/null @@ -1,4 +0,0 @@ -[program:zookeeper] -command=/usr/share/zookeeper/bin/zkServer.sh start-foreground -autostart=true -autorestart=true \ No newline at end of file From f3f144c844222b3fd28c4eae472164092eb7dc90 Mon Sep 17 00:00:00 2001 From: Marcel Wagner Date: Sat, 6 Apr 2019 09:05:12 +0000 Subject: [PATCH 65/68] tests/Makefile: Removed -i switch from docker Signed-off-by: Marcel Wagner --- tests/Makefile | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/tests/Makefile b/tests/Makefile index 0669b0e2..dde10d65 100644 --- a/tests/Makefile +++ b/tests/Makefile @@ -55,14 +55,12 @@ test: npm-install @until echo new-test | kafkacat -P -b $(KAFKA_BROKER) -t heartbeat; do echo "Kafka not ready. Waiting ..."; sleep 1; done @ (for i in {1..4}; do echo new-test ; done) | kafkacat -P -b $(KAFKA_BROKER) -t heartbeat; @. ../setup-environment.sh && until kafkacat -C -b $(KAFKA_BROKER) -t heartbeat -e -o -4 2> /dev/null | grep -m 1 "dashboard"; do sleep 1 ; done - ifeq ($(TESTING_PLATFORM), docker) @$(call msg,"Resetting database ..."); - @. ../setup-environment.sh && docker exec -i "$${COMPOSE_PROJECT_NAME}_frontend_1" node /app/admin resetDB - + @. ../setup-environment.sh && docker exec "$${COMPOSE_PROJECT_NAME}_frontend_1" node /app/admin resetDB @$(call msg,"Adding a user for testing ..."); - @. ../setup-environment.sh && docker exec -i "$${COMPOSE_PROJECT_NAME}_frontend_1" node /app/admin addUser $(USERNAME) $(PASSWORD) $(ROLE) > /dev/null - @. ../setup-environment.sh && docker exec -i "$${COMPOSE_PROJECT_NAME}_frontend_1" node /app/admin addUser $(USERNAME2) $(PASSWORD2) $(ROLE2) > /dev/null + @. ../setup-environment.sh && docker exec "$${COMPOSE_PROJECT_NAME}_frontend_1" node /app/admin addUser $(USERNAME) $(PASSWORD) $(ROLE) > /dev/null + @. ../setup-environment.sh && docker exec "$${COMPOSE_PROJECT_NAME}_frontend_1" node /app/admin addUser $(USERNAME2) $(PASSWORD2) $(ROLE2) > /dev/null else @$(call msg,"Resetting database ..."); @kubectl exec -i $(DASHBOARD_POD) -c dashboard -- node /app/admin resetDB From 2c3a47fd12400f2973660a20880d2d533b9c09ad Mon Sep 17 00:00:00 2001 From: Marcel Wagner Date: Sat, 6 Apr 2019 11:39:06 +0200 Subject: [PATCH 66/68] Build and E2E Test implemented for CircleCi environment Signed-off-by: Marcel Wagner --- .circleci/config.yml | 149 ++++++++++++++++++++++++++++++++++++------- 1 file changed, 126 insertions(+), 23 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index f6a399b5..45e42b88 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -2,28 +2,80 @@ # # Check https://circleci.com/docs/2.0/language-javascript/ for more details # -version: 2 -jobs: - build: - docker: - # specify the version you desire here - - image: circleci/node:8.0 - - # Specify service dependencies here if necessary - # CircleCI maintains a library of pre-built images - # documented at https://circleci.com/docs/2.0/circleci-images/ - # - image: circleci/mongo:3.4.4 - +version: 2.1 +executors: + my-executor: + machine: + enabled: true + image: ubuntu-1604:201903-01 working_directory: ~/repo environment: shell: /bin/bash TERM: xterm - + TZ: "Europe/Berlin" +commands: + setup-build-environment: + description: "Setup build Environment" + steps: + - run: + shell: /bin/bash + name: Setup build environment + command: | + cd platform-setup + sudo ./setup-ubuntu16.04.sh + setup-branch: + description: "Setup branch" steps: - - checkout - - run: - name: Check whether most recent commit is signedoff + shell: /bin/bash + name: Setup branch + command: | + if [ "$CIRCLE_BRANCH" = "develop" ]; then + git branch --set-upstream-to=origin/develop develop + make update + fi + build-branch: + description: "Build branch" + steps: + - run: + shell: /bin/bash + name: Build branch + command: | + yes| make build + e2e-test: + description: "E2E test" + steps: + - run: + shell: /bin/bash + name: E2e Test + command: | + yes| make test + push-images: + description: "Push images" + parameters: + tag: + type: string + default: "latest" + steps: + - run: + shell: /bin/bash + name: Push images to docker hub + command: | + docker login -u ${DOCKER_USERNAME} -p ${DOCKER_PASSWORD} + DOCKER_TAG="latest" + if [ "<< parameters.tag >>" = "date" ]; then + DOCKER_TAG=`date -I` + fi + if [ "<< parameters.tag >>" = "tag" ]; then + DOCKER_TAG=`git describe --tag` || exit 1 + fi + echo Now trying to push with Tag ${DOCKER_TAG} + make push + check-signed: + description: "Check whether latest commit is signed" + steps: + - run: + name: Check whether most recent commit is signed shell: /bin/bash command: | MESSAGE=`git log -1 --pretty=%B` @@ -34,10 +86,61 @@ jobs: echo "Commit is not signedoff" exit 1 fi - - # run tests! - - run: - shell: /bin/bash - name: Run remote build & e2e tests - command: | - ssh -o "StrictHostKeyChecking no" root@ci.streammyiot.com /opt/test/testTest.sh test +jobs: + build: + executor: my-executor + steps: + - checkout + - check-signed + - setup-build-environment + - setup-branch + - build-branch + - e2e-test + build-master: + executor: my-executor + steps: + - checkout + - setup-build-environment + - setup-branch + - build-branch + - e2e-test + - push-images: + tag: "tag" + build-nightly: + executor: my-executor + steps: + - checkout + - setup-build-environment + - setup-branch + - build-branch + - e2e-test + - push-images: + tag: "date" +workflows: + version: 2.1 + workflow: + jobs: + - build: + filters: + branches: + only: + - develop + - build-master: + filters: + branches: + only: + - master + nightly: + triggers: + - schedule: + cron: "0 0 * * *" + filters: + branches: + only: + - develop + jobs: + - build-nightly: + filters: + branches: + only: + - develop From 9f73d42c6bbc84dd53653507bb830afe9aa115bf Mon Sep 17 00:00:00 2001 From: Marcel Wagner Date: Sat, 6 Apr 2019 22:28:32 +0200 Subject: [PATCH 67/68] Test for docker compose41 Signed-off-by: Marcel Wagner --- .circleci/config.yml | 32 ++++++++++++++++++++++++++++---- 1 file changed, 28 insertions(+), 4 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 45e42b88..7c230d65 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -30,8 +30,32 @@ commands: shell: /bin/bash name: Setup branch command: | - if [ "$CIRCLE_BRANCH" = "develop" ]; then - git branch --set-upstream-to=origin/develop develop + sudo apt install jq + # First find out PR number (CircleCI is not that sophisticated about it) + + if [ ! -z "${CIRCLE_PULL_REQUEST}" ]; then + #echo Pull request active ${CIRCLE_PULL_REQUEST} + PR=${CIRCLE_PR_NUMBER} + PR_REPO=${CIRCLE_PR_REPONAME} + PR_USERNAME=${CIRCLE_PR_USERNAME} + if [ -z "${PR}"]; then + #no forked PR + PR=$(basename ${CIRCLE_PULL_REQUEST}) + PR_REPO=${CIRCLE_PROJECT_REPONAME} + PR_USERNAME=${CIRCLE_PROJECT_USERNAME} + fi + #echo Marcel922 ${PR_REPO} + ${PR} + #echo Marcel352 ${PR_USERNAME} + url="https://api.github.com/repos/${PR_USERNAME}/${PR_REPO}/pulls/${PR}" + BASE_BRANCH=$(curl "$url" | jq '.base.ref' | tr -d '"') + #echo Marcel643 $url + #echo $(curl "$url") + echo Marcel822 ${BASE_BRANCH} + fi + if [ ! "$CIRCLE_BRANCH" = "master" ]; then + git branch --set-upstream-to=origin/${CIRCLE_BRANCH} ${CIRCLE_BRANCH} + fi + if [ "$CIRCLE_BRANCH" = "develop" ] || ["${BASE_BRANCH}" = "develop"]; then make update fi build-branch: @@ -123,8 +147,8 @@ workflows: - build: filters: branches: - only: - - develop + ignore: + - master - build-master: filters: branches: From 52a7c7bccace8610312132ba2ae6ed9e48de84e1 Mon Sep 17 00:00:00 2001 From: Marcel Wagner Date: Sun, 7 Apr 2019 00:43:25 +0200 Subject: [PATCH 68/68] Test for docker compose57 Signed-off-by: Marcel Wagner --- .circleci/config.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 7c230d65..94017758 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -54,8 +54,10 @@ commands: fi if [ ! "$CIRCLE_BRANCH" = "master" ]; then git branch --set-upstream-to=origin/${CIRCLE_BRANCH} ${CIRCLE_BRANCH} + git submodule init + git submodule update fi - if [ "$CIRCLE_BRANCH" = "develop" ] || ["${BASE_BRANCH}" = "develop"]; then + if [ "$CIRCLE_BRANCH" = "develop" ] || [ "${BASE_BRANCH}" = "develop" ]; then make update fi build-branch: