diff --git a/Dockerfile b/Dockerfile deleted file mode 100644 index 3ca26175..00000000 --- a/Dockerfile +++ /dev/null @@ -1,11 +0,0 @@ -FROM golang:alpine -ADD argo-messaging-linux-static /home/argo/argo-messaging -ADD config.json /home/argo/config.json -ADD host.crt /home/argo/host.crt -ADD host.key /home/argo/host.key -ENV HOME /home/argo -EXPOSE 8080 -WORKDIR /home/argo -# Kafka and Zookeper take some time to boot up. -# We wait for 20'' and then start the service. -CMD sleep 20 && /home/argo/argo-messaging \ No newline at end of file diff --git a/Jenkinsfile b/Jenkinsfile index c2322851..7d921cea 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -1,7 +1,7 @@ pipeline { agent { docker { - image 'argo.registry:5000/epel-7-mgo1.14' + image 'argo.registry:5000/epel-7-mgo1.15' args '-u jenkins:jenkins' } } @@ -27,7 +27,16 @@ pipeline { ln -sf ${WORKSPACE}/${PROJECT_DIR} ${WORKSPACE}/go/src/github.com/ARGOeu/${PROJECT_DIR} rm -rf ${WORKSPACE}/go/src/github.com/ARGOeu/${PROJECT_DIR}/${PROJECT_DIR} cd ${WORKSPACE}/go/src/github.com/ARGOeu/${PROJECT_DIR} - go build + export CGO_CFLAGS"=-O2 -fstack-protector --param=ssp-buffer-size=4 -D_FORTIFY_SOURCE=2" + go build -buildmode=pie -ldflags "-s -w -linkmode=external -extldflags '-z relro -z now'" + """ + } + } + stage('Security Tests') { + steps { + sh """ + cd ${WORKSPACE}/go/src/github.com/ARGOeu/${PROJECT_DIR} + /home/jenkins/checksec.py -b ./argo-messaging """ } } @@ -91,7 +100,6 @@ pipeline { success { script{ if ( env.BRANCH_NAME == 'devel' ) { - build job: '/ARGO-utils/argo-swagger-docs', propagate: false build job: '/ARGO/argodoc/devel', propagate: false } else if ( env.BRANCH_NAME == 'master' ) { build job: '/ARGO/argodoc/master', propagate: false diff --git a/Makefile b/Makefile index bc8f2582..3e1ccdc6 100644 --- a/Makefile +++ b/Makefile @@ -18,7 +18,8 @@ go-build-linux-static: mkdir -p ${GOPATH}/src/github.com/ARGOeu/argo-messaging cp -R . ${GOPATH}/src/github.com/ARGOeu/argo-messaging cd ${GOPATH}/src/github.com/ARGOeu/argo-messaging && \ - CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -o ${APPDIR}/argo-messaging-linux-static . &&\ + export CGO_CFLAGS"=-O2 -fstack-protector --param=ssp-buffer-size=4 -D_FORTIFY_SOURCE=2" + GOOS=linux go build -buildmode=pie -ldflags "-s -w -linkmode=external -extldflags '-z relro -z now'" -a -installsuffix cgo -o ${APPDIR}/argo-messaging-linux-static . &&\ chown ${hostUID} ${APPDIR}/argo-messaging-linux-static go-test: diff --git a/README.md b/README.md index 51da1a34..2b2d5546 100644 --- a/README.md +++ b/README.md @@ -1,123 +1,112 @@ -[![Build Status](https://travis-ci.org/ARGOeu/argo-messaging.svg?branch=devel)](https://travis-ci.org/ARGOeu/argo-messaging) # ARGO Messaging -> ## :warning: Warning :warning: -> These installation instructions are meant for running the service for demo purposes. If you want to operate the service for anything else other than a simple demo, please implement a deployment model that meets your requirements. - -In order to build, test and run the service, recent versions of the docker-engine (>=1.12) and the docker-compose (>= 1.8.0) are required. Step 1 refers to the docker installation on Ubuntu 16.04.1, please adopt accordingly your Linux distribution or OS. - -## Install docker from dockerproject.org (Ubuntu 16.04.1) - -```shell -$ sudo apt-key adv --keyserver hkp://pool.sks-keyservers.net:80 --recv-keys 58118E89F3A912897C070ADBF76221572C52609D -$ echo "deb https://apt.dockerproject.org/repo ubuntu-xenial main" | sudo tee /etc/apt/sources.list.d/docker.list -$ sudo apt-get update -$ sudo apt-cache policy docker-engine -$ sudo apt-get install linux-image-extra-$(uname -r) linux-image-extra-virtual -$ sudo apt-get install docker-engine +## Description +The ARGO Messaging Service is a Publish/Subscribe Service, +which implements the Google PubSub protocol. +Instead of focusing on a single Messaging API specification +for handling the logic of publishing/subscribing +to the broker network the API focuses +on creating nodes of Publishers and Subscribers as a Service. +It provides an HTTP API that enables Users/Systems to implement +message oriented services using the Publish/Subscribe Model over plain HTTP. +In the Publish/Subscribe paradigm, Publishers are users/systems +that can send messages to +named-channels called Topics. Subscribers are users/systems that +create Subscriptions to +specific topics and receive messages. + +## Prerequisites + +#### Build Requirements + + - Golang 1.15 + +#### Datastore Requirements + - The service has been tested with mongodb from version `3.2.22` up to `4.2.3`. + +#### Broker requirements + + - Kafka 2.2.1 + - Zookeeper 3.4.5 + +#### Push Server +In order to support push enabled subscriptions AMS relies on an external service +that handles the actual pushing of messages, while AMS holds the configuration +for the subscriptions.You can create push enabled subscriptions even +when the push-server isn't available, they will be picked up automatically +when the push-server is up and running. +- [Push server](https://github.com/ARGOeu/ams-push-server) + + +## Configuration + +#### Configuration Location +Configuration for the service takes place inside a `config.json` file, that +resides in two possible locations: + +1) Same folder as the binary + +2) `/etc/argo-messaging/config.json` + +#### Configuration values + +- `port` - port the service will bind to +- `zookeeper_hosts` - list of zookeeper hosts, e.g. [zoo1:2181,zoo2:2181,zoo3:2181] +- `store_host` - store host, e.g. 'mongo1:27017,mongo2:27017,mongo3:27017' +- `store_db` - mongo db database name +- `certificate` - /path/to/tls/certificate +- `certificate_key` - /path/to/cert/ley +- `certificate_authorities_dir` - dir containing CAs +- `log_level` - DEBUG,INFO,WARNING, ERROR or FATAL +- `push_enabled` - (true|false) whether or not the service will support push enabled subscriptions +- `push_tls_enabled` - (true|false), whether or not the service will communicate over TLS with the push server +- `push_server_host` - push1.grnet.gr +- `push_server_port` - 443 +- `verify_push_server` - (true|false) mutual TLS for the push server +- `push_worker_token` - token for the active push worker user +- `log_facilities` - ["syslog", "console"] +- `auth_option`: (`key`|`header`|`both`), where should the service look for the access token. + + +#### Build & Run the service + +In order to build the service, inside the AMS repo issue the command: +```bash +go build ``` - -We advise you to follow the steps described in docker manual. For Ubuntu: - -- Prerequisites : https://docs.docker.com/engine/installation/linux/ubuntulinux/#prerequisites -- Install : https://docs.docker.com/engine/installation/linux/ubuntulinux/#install -- Add a docker group [https://docs.docker.com/engine/installation/linux/ubuntulinux/#/create-a-docker-group] . - -**Note:** Don't forget to login logout before running the docker as a non root user. This ensures your user is running with the correct permissions. - -## Install docker-compose - -We are using version of the Compose file format. To install the latest docker-compose, follow the guidelines here: https://github.com/docker/compose/releases - -## Clone the argo-messaging repository - -```shell -$ git clone https://github.com/ARGOeu/argo-messaging +In order to run the service, +```bash +./argo-messaging ``` -## Get certificates (skip this step if you already have certificates) +## X509 Authentication +Although AMS doesn't support direct authentication through an x509 certificate, +you can use the [argo-authentication-service](https://github.com/ARGOeu/argo-api-authn) +to map an x509 certificate to an AMS `key`. +The service will also validate the certificate. +The [ams-library](https://github.com/ARGOeu/argo-ams-library) will effortlessly +hide this complexity if you decide to use it in order to access AMS. -The ARGO Messaging services requires certificates in order to operates. The easiest way is to get certificates from letsencrypt. You can follow the instructions from the letsencrypt website or use the docker letsencrypt docker image. One caveat of this approach is that the certificate files end up in the ```etc/live``` directory (see below) and will be owned by the root user. +## Managing the protocol buffers and gRPC definitions -```shell -$ mkdir -p ${HOME}/letsencrypt/{etc,var} -$ docker run -it --rm -p 443:443 -p 80:80 --name certbot \ - -v "$HOME/letsencrypt/etc:/etc/letsencrypt" \ - -v "$HOME/letsencrypt/var:/var/lib/letsencrypt" \ - quay.io/letsencrypt/letsencrypt:latest certonly -$ cd argo-messaging -# Comment: Please change owneship of ${HOME}/letsencrypt to your user -$ cp ${HOME}/letsencrypt/etc/live/*/fullchain.pem host.crt -$ sudo cp ${HOME}/letsencrypt/etc/live/*/privkey.pem host.key -``` -## Edit the default configuration file (config.json) - -In the ```argo-messaging``` directory, edit ```config.json```: - -```diff -{ -"bind_ip":"", -"port":8080, -- "zookeeper_hosts":["localhost"], -- "store_host":"localhost", -+ "zookeeper_hosts":["zookeeper"], -+ "store_host":"mongo", -"store_db":"argo_msg", -- "certificate":"/etc/pki/tls/certs/localhost.crt", -- "certificate_key":"/etc/pki/tls/private/localhost.key", -+ "certificate":"./host.crt", -+ "certificate_key":"./host.key", -"service_token":"CHANGE-THIS-TO-A-LONG-STRING", -"push_enabled": false -} -``` - -**Note:** Make sure that you change the service_token to a long string. +In order to modify any `.proto` file you will need the following -## Edit docker-compose.yml + - Read on how to install the protoc compiler on your platform [here.](https://github.com/protocolbuffers/protobuf) -In the ```argo-messaging``` directory, edit ```docker-compose.yml``` and add the public IP address of your host to the ```KAFKA_ADVERTISED_HOST_NAME``` key. + - Install the go plugin. `go get -u github.com/golang/protobuf/protoc-gen-go` -## Run the tests + - install the go gRPC package. `go get -u google.golang.org/grpc` -```shell -$ docker run --env hostUID=`id -u`:`id -g` --rm -v "$PWD":/usr/src/myapp -w /usr/src/myapp golang:1.7 make go-test -``` + - Inside `push/grpc` compile. `protoc -I proto/ proto/ams.proto --go_out=plugins=grpc:proto` -## Build the service +## Helpful utilities -```shell -$ docker run --env hostUID=`id -u`:`id -g` --rm -v "$PWD":/usr/src/myapp -w /usr/src/myapp golang:1.7 make go-build-linux-static -``` - -## Start the service - -```shell -$ docker-compose build -$ docker-compose up -d -``` - -## Test that the service is running - -```shell -$ curl https:///v1/projects?key= -``` - -**Note:** Change `````` to the hostname of your host and `````` to the service token that you have added in ```config.json```. You should get an empty json response: - -```shell -{} -``` - -## Stop the service - -```shell -$ docker-compose stop -``` +Inside the [tools](https://github.com/ARGOeu/argo-messaging/tree/master/tools) folder you can find various scripts that can help you +perform common tasks OR help you get started with interacting with AMS. -## Congratulations! +There is also a handy python [library]((https://github.com/ARGOeu/argo-ams-library)) +for interacting with AMS. -Please visit http://argoeu.github.io/messaging/v1/ to learn how to use the service. ## Credits diff --git a/argo-messaging.spec b/argo-messaging.spec index f966eb2c..4d361750 100644 --- a/argo-messaging.spec +++ b/argo-messaging.spec @@ -3,13 +3,12 @@ Name: argo-messaging Summary: ARGO Messaging API for broker network -Version: 1.0.7 +Version: 1.0.8 Release: 1%{?dist} License: ASL 2.0 Buildroot: %{_tmppath}/%{name}-buildroot Group: Unspecified Source0: %{name}-%{version}.tar.gz -BuildRequires: golang BuildRequires: git Requires(pre): /usr/sbin/useradd, /usr/bin/getent ExcludeArch: i386 @@ -31,7 +30,8 @@ export PATH=$PATH:$GOPATH/bin cd src/github.com/ARGOeu/argo-messaging/ export GIT_COMMIT=$(git rev-list -1 HEAD) export BUILD_TIME=$(date -u +'%Y-%m-%dT%H:%M:%SZ') -go install -ldflags "-X github.com/ARGOeu/argo-messaging/version.Commit=$GIT_COMMIT -X github.com/ARGOeu/argo-messaging/version.BuildTime=$BUILD_TIME" +export CGO_CFLAGS"=-O2 -fstack-protector --param=ssp-buffer-size=4 -D_FORTIFY_SOURCE=2" +go install -buildmode=pie -ldflags "-s -w -linkmode=external -extldflags '-z relro -z now' -X github.com/ARGOeu/argo-messaging/version.Commit=$GIT_COMMIT -X github.com/ARGOeu/argo-messaging/version.BuildTime=$BUILD_TIME" %install %{__rm} -rf %{buildroot} @@ -63,6 +63,8 @@ go clean %attr(0644,root,root) /usr/lib/systemd/system/argo-messaging.service %changelog +* Wed Mar 31 2021 Agelos Tsalapatis 1.0.8-1%{?dist} +- AMS release 1.0.8 * Wed Aug 05 2020 Agelos Tsalapatis 1.0.7-1%{?dist} - AMS release 1.0.7 * Wed Jan 08 2020 Agelos Tsalapatis 1.0.6-1%{?dist} diff --git a/auth/auth_test.go b/auth/auth_test.go index 29e06d74..3a273748 100644 --- a/auth/auth_test.go +++ b/auth/auth_test.go @@ -121,6 +121,10 @@ func (suite *AuthTestSuite) TestAuth() { suite.Equal(true, IsPushWorker([]string{"push_worker", "publisher"})) suite.Equal(false, IsPushWorker([]string{"publisher"})) + suite.Equal(true, IsAdminViewer([]string{"admin_viewer"})) + suite.Equal(true, IsAdminViewer([]string{"admin_viewer", "service_admin"})) + suite.Equal(false, IsAdminViewer([]string{"publisher"})) + // Check ValidUsers mechanism v, err := AreValidUsers("ARGO", []string{"UserA", "foo", "bar"}, store) suite.Equal(false, v) @@ -511,7 +515,17 @@ func (suite *AuthTestSuite) TestAuth() { var qUsers1 []User qUsers1 = append(qUsers1, User{"uuid8", []ProjectRoles{{"ARGO2", []string{"consumer", "publisher"}, []string{}, []string{}}}, "UserZ", "", "", "", "", "S3CR3T1", "foo-email", []string{}, created, modified, ""}) - qUsers1 = append(qUsers1, User{"uuid7", []ProjectRoles{}, "push_worker_0", "", "", "", "", "push_token", "foo-email", []string{"push_worker"}, created, modified, ""}) + qUsers1 = append(qUsers1, User{ + UUID: "uuid7", + Name: "push_worker_0", + FirstName: "", + LastName: "", + Description: "", + Token: "push_token", + Email: "foo-email", + ServiceRoles: []string{"push_worker"}, CreatedOn: created, ModifiedOn: modified, + CreatedBy: "", + }) qUsers1 = append(qUsers1, User{"same_uuid", []ProjectRoles{{"ARGO", []string{"publisher", "consumer"}, []string{}, []string{}}}, "UserSame2", "", "", "", "", "S3CR3T42", "foo-email", []string{}, created, modified, "UserA"}) qUsers1 = append(qUsers1, User{"same_uuid", []ProjectRoles{{"ARGO", []string{"publisher", "consumer"}, []string{}, []string{}}}, "UserSame1", "", "", "", "", "S3CR3T41", "foo-email", []string{}, created, modified, "UserA"}) qUsers1 = append(qUsers1, User{"uuid4", []ProjectRoles{{"ARGO", []string{"publisher", "consumer"}, []string{"topic2"}, []string{"sub3", "sub4"}}}, "UserZ", "", "", "", "", "S3CR3T4", "foo-email", []string{}, created, modified, "UserA"}) @@ -520,29 +534,39 @@ func (suite *AuthTestSuite) TestAuth() { qUsers1 = append(qUsers1, User{"uuid1", []ProjectRoles{{"ARGO", []string{"consumer", "publisher"}, []string{"topic1", "topic2"}, []string{"sub1", "sub2", "sub3"}}}, "UserA", "FirstA", "LastA", "OrgA", "DescA", "S3CR3T1", "foo-email", []string{}, created, modified, ""}) qUsers1 = append(qUsers1, User{"uuid0", []ProjectRoles{{"ARGO", []string{"consumer", "publisher"}, []string{}, []string{}}}, "Test", "", "", "", "", "S3CR3T", "Test@test.com", []string{}, created, modified, ""}) // return all users - pu1, e1 := PaginatedFindUsers("", 0, "", true, store2) + pu1, e1 := PaginatedFindUsers("", 0, "", true, true, store2) var qUsers2 []User qUsers2 = append(qUsers2, User{"uuid8", []ProjectRoles{{"ARGO2", []string{"consumer", "publisher"}, []string{}, []string{}}}, "UserZ", "", "", "", "", "S3CR3T1", "foo-email", []string{}, created, modified, ""}) - qUsers2 = append(qUsers2, User{"uuid7", []ProjectRoles{}, "push_worker_0", "", "", "", "", "push_token", "foo-email", []string{"push_worker"}, created, modified, ""}) + qUsers2 = append(qUsers2, User{ + UUID: "uuid7", + Name: "push_worker_0", + FirstName: "", + LastName: "", + Description: "", + Token: "push_token", + Email: "foo-email", + ServiceRoles: []string{"push_worker"}, CreatedOn: created, ModifiedOn: modified, + CreatedBy: "", + }) qUsers2 = append(qUsers2, User{"same_uuid", []ProjectRoles{{"ARGO", []string{"publisher", "consumer"}, []string{}, []string{}}}, "UserSame2", "", "", "", "", "S3CR3T42", "foo-email", []string{}, created, modified, "UserA"}) // return the first page with 2 users - pu2, e2 := PaginatedFindUsers("", 3, "", true, store2) + pu2, e2 := PaginatedFindUsers("", 3, "", true, true, store2) var qUsers3 []User qUsers3 = append(qUsers3, User{"uuid4", []ProjectRoles{{"ARGO", []string{"publisher", "consumer"}, []string{"topic2"}, []string{"sub3", "sub4"}}}, "UserZ", "", "", "", "", "S3CR3T4", "foo-email", []string{}, created, modified, "UserA"}) qUsers3 = append(qUsers3, User{"uuid3", []ProjectRoles{{"ARGO", []string{"publisher", "consumer"}, []string{"topic3"}, []string{"sub2"}}}, "UserX", "", "", "", "", "S3CR3T3", "foo-email", []string{}, created, modified, "UserA"}) // return the next 2 users - pu3, e3 := PaginatedFindUsers("NA==", 2, "", true, store2) + pu3, e3 := PaginatedFindUsers("NA==", 2, "", true, true, store2) // empty collection store3 := stores.NewMockStore("", "") store3.UserList = []stores.QUser{} - pu4, e4 := PaginatedFindUsers("", 0, "", true, store3) + pu4, e4 := PaginatedFindUsers("", 0, "", true, true, store3) // invalid id - _, e5 := PaginatedFindUsers("invalid", 0, "", true, store2) + _, e5 := PaginatedFindUsers("invalid", 0, "", true, true, store2) // check user list by project var qUsersB []User @@ -552,7 +576,26 @@ func (suite *AuthTestSuite) TestAuth() { var qUsersC []User qUsersC = append(qUsersC, User{"uuid8", []ProjectRoles{{"ARGO2", []string{"consumer", "publisher"}, []string{}, []string{}}}, "UserZ", "", "", "", "", "", "foo-email", []string{}, created, modified, ""}) - puC, e1 := PaginatedFindUsers("", 1, "argo_uuid2", false, store2) + // check for non detailed view + var ndUser []User + ndUser = append(ndUser, User{ + UUID: "uuid8", + Name: "UserZ", + FirstName: "", + LastName: "", + Organization: "", + Description: "", + Token: "S3CR3T1", + Email: "foo-email", + ServiceRoles: []string{}, + CreatedOn: created, + ModifiedOn: modified, + CreatedBy: ""}) + + ndu, _ := PaginatedFindUsers("", 1, "", true, false, store2) + suite.Equal(ndUser, ndu.Users) + + puC, e1 := PaginatedFindUsers("", 1, "argo_uuid2", false, true, store2) suite.Equal(qUsersC, puC.Users) suite.Equal(int32(1), puC.TotalSize) suite.Equal("", puC.NextPageToken) @@ -563,12 +606,12 @@ func (suite *AuthTestSuite) TestAuth() { suite.Nil(e1) suite.Equal(qUsers2, pu2.Users) - suite.Equal(int32(9), pu2.TotalSize) + suite.Equal(int32(3), pu2.TotalSize) suite.Equal("NQ==", pu2.NextPageToken) suite.Nil(e2) suite.Equal(qUsers3, pu3.Users) - suite.Equal(int32(9), pu3.TotalSize) + suite.Equal(int32(2), pu3.TotalSize) suite.Equal("Mg==", pu3.NextPageToken) suite.Nil(e3) diff --git a/auth/users.go b/auth/users.go index 6625e14c..ccbce39e 100644 --- a/auth/users.go +++ b/auth/users.go @@ -2,7 +2,7 @@ package auth import ( "crypto/rand" - "crypto/sha1" + "crypto/sha256" "encoding/base64" "encoding/hex" "encoding/json" @@ -24,7 +24,7 @@ const ( // User is the struct that holds user information type User struct { UUID string `json:"uuid"` - Projects []ProjectRoles `json:"projects"` + Projects []ProjectRoles `json:"projects,omitempty"` Name string `json:"name"` FirstName string `json:"first_name,omitempty"` LastName string `json:"last_name,omitempty"` @@ -379,7 +379,7 @@ func FindUsers(projectUUID string, uuid string, name string, priviledged bool, s } // PaginatedFindUsers returns a page of users -func PaginatedFindUsers(pageToken string, pageSize int32, projectUUID string, priviledged bool, store stores.Store) (PaginatedUsers, error) { +func PaginatedFindUsers(pageToken string, pageSize int32, projectUUID string, privileged, detailedView bool, store stores.Store) (PaginatedUsers, error) { var totalSize int32 var nextPageToken string @@ -406,7 +406,7 @@ func PaginatedFindUsers(pageToken string, pageSize int32, projectUUID string, pr token := "" usernameC := "" // if call made by priviledged user (superuser), show service roles, token and user creator info - if priviledged { + if privileged { if item.CreatedBy != "" { usr, err := store.QueryUsers("", item.CreatedBy, "") if err == nil && len(usr) > 0 { @@ -418,28 +418,32 @@ func PaginatedFindUsers(pageToken string, pageSize int32, projectUUID string, pr serviceRoles = item.ServiceRoles } - pRoles := []ProjectRoles{} - for _, pItem := range item.Projects { - // if user not priviledged (not superuser) and queried projectUUID doesn't - // match current role item's project UUID, skip the item - if !priviledged && pItem.ProjectUUID != projectUUID { - continue - } - prName := projects.GetNameByUUID(pItem.ProjectUUID, store) + var pRoles []ProjectRoles - // Get User topics and subscriptions - topicList, _ := store.QueryTopicsByACL(pItem.ProjectUUID, item.UUID) - topicNames := []string{} - for _, tpItem := range topicList { - topicNames = append(topicNames, tpItem.Name) - } + if detailedView { - subList, _ := store.QuerySubsByACL(pItem.ProjectUUID, item.UUID) - subNames := []string{} - for _, sbItem := range subList { - subNames = append(subNames, sbItem.Name) + for _, pItem := range item.Projects { + // if user not priviledged (not superuser) and queried projectUUID doesn't + // match current role item's project UUID, skip the item + if !privileged && pItem.ProjectUUID != projectUUID { + continue + } + prName := projects.GetNameByUUID(pItem.ProjectUUID, store) + + // Get User topics and subscriptions + topicList, _ := store.QueryTopicsByACL(pItem.ProjectUUID, item.UUID) + topicNames := []string{} + for _, tpItem := range topicList { + topicNames = append(topicNames, tpItem.Name) + } + + subList, _ := store.QuerySubsByACL(pItem.ProjectUUID, item.UUID) + subNames := []string{} + for _, sbItem := range subList { + subNames = append(subNames, sbItem.Name) + } + pRoles = append(pRoles, ProjectRoles{Project: prName, Roles: pItem.Roles, Topics: topicNames, Subs: subNames}) } - pRoles = append(pRoles, ProjectRoles{Project: prName, Roles: pItem.Roles, Topics: topicNames, Subs: subNames}) } curUser := NewUser(item.UUID, pRoles, item.Name, item.FirstName, item.LastName, @@ -458,9 +462,7 @@ func PaginatedFindUsers(pageToken string, pageSize int32, projectUUID string, pr // Authenticate based on token func Authenticate(projectUUID string, token string, store stores.Store) ([]string, string) { - roles, user := store.GetUserRoles(projectUUID, token) - - return roles, user + return store.GetUserRoles(projectUUID, token) } // ExistsWithName returns true if a user with name exists @@ -742,7 +744,7 @@ func GenToken() (string, error) { if _, err := rand.Read(tokenBytes); err != nil { return "", err } - sha1Bytes := sha1.Sum(tokenBytes) + sha1Bytes := sha256.Sum256(tokenBytes) return hex.EncodeToString(sha1Bytes[:]), nil } @@ -801,6 +803,17 @@ func IsServiceAdmin(roles []string) bool { return false } +// IsAdminViewer checks if the user is an admon viewer +func IsAdminViewer(roles []string) bool { + for _, role := range roles { + if role == "admin_viewer" { + return true + } + } + + return false +} + // RemoveUser removes an existing user func RemoveUser(uuid string, store stores.Store) error { return store.RemoveUser(uuid) diff --git a/brokers/kafka.go b/brokers/kafka.go index 8d9315f2..77fd9536 100644 --- a/brokers/kafka.go +++ b/brokers/kafka.go @@ -378,8 +378,8 @@ ConsumerLoop: "backend_service": "kafka", "topic": topic, "message": msg, - "consumed": string(consumed), - "max": string(max), + "consumed": consumed, + "max": max, }, ).Debug("Consumed message") diff --git a/config.json b/config.json index 41a305fc..b95282ba 100644 --- a/config.json +++ b/config.json @@ -11,10 +11,11 @@ "service_token":"b328c3861f061f87cbd34cf34f36ba2ae20883a5", "log_level":"INFO", "push_enabled":true, - "push_tls_enabled": true, + "push_tls_enabled": false, "push_server_host": "localhost", "push_server_port": 5555, "verify_push_server": true, - "push_worker_token": "18492e78a1e95be564c00ba4537c4171ec628f98", - "log_facilities": ["console"] + "push_worker_token": "8c8cbaeba3e1317c18cd5c03f3b1f596c23f922a", + "log_facilities": ["console"], + "auth_option": "both" } diff --git a/config/config.go b/config/config.go index 5a4f49ee..790af871 100644 --- a/config/config.go +++ b/config/config.go @@ -21,6 +21,27 @@ import ( "strings" ) +// AuthOption defines how the service will handle authentication/authorization +// KEY, HEADER or BOTH are the available values for where the auth token should reside +type AuthOption int + +const ( + // the api key can reside in the url parameter 'key' + // maps to config value 'key' + UrlKey = iota + 1 + // the api key can reside in the header 'x-api-key' + // maps to config value 'header' + HeaderKey + // the api key can reside in either of the two + // maps to config value 'both' + URLKeyAndHeaderKey +) + +// String representation of the iota auth option +func (a AuthOption) String() string { + return [...]string{"key", "header", "both"}[a-1] +} + // APICfg holds kafka configuration type APICfg struct { // values @@ -49,6 +70,9 @@ type APICfg struct { PushWorkerToken string // Logging output(console,file,syslog etc) LogFacilities []string + // AuthOption defines how the service will handle authentication/authorization + // KEY, HEADER or BOTH are the available values for where the auth token should reside + authOption AuthOption } // NewAPICfg creates a new kafka configuration object @@ -259,6 +283,26 @@ func setLogFacilities(facilities []string) { } } +// setAuthOption determines which auth option should be used +func (cfg *APICfg) setAuthOption(authOpt string) { + + switch strings.ToLower(authOpt) { + case "both": + cfg.authOption = URLKeyAndHeaderKey + break + case "header": + cfg.authOption = HeaderKey + break + default: + cfg.authOption = UrlKey + } +} + +// AuthOption returns the value of the config for auth_option +func (cfg *APICfg) AuthOption() AuthOption { + return cfg.authOption +} + // LoadTest the configuration func (cfg *APICfg) LoadTest() { @@ -292,6 +336,12 @@ func (cfg *APICfg) LoadTest() { setLogFacilities(cfg.LogFacilities) // Then load rest of the parameters + cfg.setAuthOption(viper.GetString("auth_option")) + log.WithFields( + log.Fields{ + "type": "service_log", + }, + ).Infof("Parameter Loaded - auth_option: %v", cfg.AuthOption()) // bind ip cfg.BindIP = viper.GetString("bind_ip") @@ -379,7 +429,7 @@ func (cfg *APICfg) LoadTest() { log.Fields{ "type": "service_log", }, - ).Infof("Parameter Loaded - service_token: %v", cfg.ServiceToken) + ).Info("Parameter Loaded - service_token") // push enabled true or false cfg.PushEnabled = viper.GetBool("push_enabled") @@ -387,7 +437,7 @@ func (cfg *APICfg) LoadTest() { log.Fields{ "type": "service_log", }, - ).Infof("Parameter Loaded - push_enabled: %v", cfg.ServiceToken) + ).Infof("Parameter Loaded - push_enabled: %v", cfg.PushEnabled) // push TLS enabled true or false cfg.PushTlsEnabled = viper.GetBool("push_tls_enabled") @@ -427,7 +477,7 @@ func (cfg *APICfg) LoadTest() { log.Fields{ "type": "service_log", }, - ).Infof("Parameter Loaded - push_worker_token: %v", cfg.PushWorkerToken) + ).Info("Parameter Loaded - push_worker_token") } // Load the configuration @@ -494,6 +544,9 @@ func (cfg *APICfg) Load() { pflag.String("log-facilities", "", "logging output(s)") viper.BindPFlag("log_facilities", pflag.Lookup("log-facilities")) + pflag.String("auth-option", "", "where the auth token should reside") + viper.BindPFlag("auth_option", pflag.Lookup("auth-option")) + configPath = pflag.String("config-dir", "", "directory path to an alternative json config file") pflag.Parse() @@ -532,6 +585,13 @@ func (cfg *APICfg) Load() { // Then load rest of the parameters + cfg.setAuthOption(viper.GetString("auth_option")) + log.WithFields( + log.Fields{ + "type": "service_log", + }, + ).Infof("Parameter Loaded - auth_option: %v", cfg.AuthOption()) + // bind ip cfg.BindIP = viper.GetString("bind_ip") log.WithFields( @@ -618,7 +678,7 @@ func (cfg *APICfg) Load() { log.Fields{ "type": "service_log", }, - ).Infof("Parameter Loaded - service_token: %v", cfg.ServiceToken) + ).Info("Parameter Loaded - service_token") // push enabled true or false cfg.PushEnabled = viper.GetBool("push_enabled") @@ -626,7 +686,7 @@ func (cfg *APICfg) Load() { log.Fields{ "type": "service_log", }, - ).Infof("Parameter Loaded - push_enabled: %v", cfg.ServiceToken) + ).Infof("Parameter Loaded - push_enabled: %v", cfg.PushEnabled) // push TLS enabled true or false cfg.PushTlsEnabled = viper.GetBool("push_tls_enabled") @@ -666,7 +726,7 @@ func (cfg *APICfg) Load() { log.Fields{ "type": "service_log", }, - ).Infof("Parameter Loaded - push_worker_token: %v", cfg.PushWorkerToken) + ).Info("Parameter Loaded - push_worker_token") } @@ -761,7 +821,7 @@ func (cfg *APICfg) LoadStrJSON(input string) { log.Fields{ "type": "service_log", }, - ).Infof("Parameter Loaded - service_token: %v", cfg.ServiceToken) + ).Info("Parameter Loaded - service_token:") // push enabled true or false cfg.PushEnabled = viper.GetBool("push_enabled") @@ -769,7 +829,7 @@ func (cfg *APICfg) LoadStrJSON(input string) { log.Fields{ "type": "service_log", }, - ).Infof("Parameter Loaded - push_enabled: %v", cfg.ServiceToken) + ).Infof("Parameter Loaded - push_enabled: %v", cfg.PushEnabled) // push TLS enabled true or false cfg.PushTlsEnabled = viper.GetBool("push_tls_enabled") @@ -809,7 +869,7 @@ func (cfg *APICfg) LoadStrJSON(input string) { log.Fields{ "type": "service_log", }, - ).Infof("Parameter Loaded - push_worker_token: %v", cfg.PushWorkerToken) + ).Info("Parameter Loaded - push_worker_token") cfg.LogFacilities = viper.GetStringSlice("log_facilities") log.WithFields( @@ -817,4 +877,12 @@ func (cfg *APICfg) LoadStrJSON(input string) { "type": "service_log", }, ).Infof("Parameter Loaded - log_facilities: %v", cfg.LogFacilities) + + // auth option + cfg.setAuthOption(viper.GetString("auth_option")) + log.WithFields( + log.Fields{ + "type": "service_log", + }, + ).Infof("Parameter Loaded - auth_option: %v", cfg.AuthOption()) } diff --git a/config/config.json b/config/config.json index 0259ea83..d99643b1 100644 --- a/config/config.json +++ b/config/config.json @@ -15,6 +15,7 @@ "push_server_host": "localhost", "push_server_port": 5555, "verify_push_server": true, - "push_worker_token": "lol", - "log_facilities": [] + "push_worker_token": "pw-token", + "log_facilities": [], + "auth_option": "header" } diff --git a/config/config_test.go b/config/config_test.go index 9c099bd3..b46ba81a 100644 --- a/config/config_test.go +++ b/config/config_test.go @@ -31,10 +31,10 @@ func (suite *ConfigTestSuite) SetupTest() { "push_server_host": "localhost", "push_server_port": 5555, "verify_push_server": "true", - "log_facilities": ["SYSLOG", "CONSOLE"] + "push_worker_token": "pw-token", + "log_facilities": ["SYSLOG", "CONSOLE"], + "auth_option": "header" }` - - log.SetOutput(ioutil.Discard) } func (suite *ConfigTestSuite) TestLoadConfiguration() { @@ -49,7 +49,6 @@ func (suite *ConfigTestSuite) TestLoadConfiguration() { // test "LOADTEST" param APIcfg2 := NewAPICfg("LOADTEST") - log.Infof("\n\n %+v \n\n", APIcfg2) suite.Equal([]string{"localhost"}, APIcfg2.ZooHosts) suite.Equal("", APIcfg2.KafkaZnode) suite.Equal("localhost", APIcfg2.StoreHost) @@ -64,6 +63,7 @@ func (suite *ConfigTestSuite) TestLoadConfiguration() { suite.True(APIcfg2.PushTlsEnabled) suite.Equal("localhost", APIcfg2.PushServerHost) suite.Equal(5555, APIcfg2.PushServerPort) + suite.Equal("pw-token", APIcfg2.PushWorkerToken) suite.True(APIcfg2.VerifyPushServer) suite.Equal(0, len(APIcfg2.LogFacilities)) } @@ -85,9 +85,41 @@ func (suite *ConfigTestSuite) TestLoadStringJSON() { suite.Equal("localhost", APIcfg.PushServerHost) suite.Equal(5555, APIcfg.PushServerPort) suite.True(APIcfg.VerifyPushServer) + suite.Equal("pw-token", APIcfg.PushWorkerToken) suite.Equal([]string{"SYSLOG", "CONSOLE"}, APIcfg.LogFacilities) + suite.Equal(HeaderKey, int(APIcfg.AuthOption())) +} + +func (suite *ConfigTestSuite) TestSetAuthOption() { + cfg := APICfg{} + + cfg.setAuthOption("bOth") + suite.Equal(URLKeyAndHeaderKey, int(cfg.authOption)) + + cfg.setAuthOption("KEY") + suite.Equal(UrlKey, int(cfg.authOption)) + + cfg.setAuthOption("header") + suite.Equal(HeaderKey, int(cfg.authOption)) + + cfg.authOption = 0 + cfg.setAuthOption("") + suite.Equal(UrlKey, int(cfg.authOption)) +} + +func (suite *ConfigTestSuite) TestAuthOption() { + + a1 := AuthOption(UrlKey) + suite.Equal("key", a1.String()) + + a2 := AuthOption(HeaderKey) + suite.Equal("header", a2.String()) + + a3 := AuthOption(URLKeyAndHeaderKey) + suite.Equal("both", a3.String()) } func TestConfigTestSuite(t *testing.T) { + log.SetOutput(ioutil.Discard) suite.Run(t, new(ConfigTestSuite)) } diff --git a/doc/swagger/swagger.yaml b/doc/swagger/swagger.yaml index 36e60d84..0ef9e35b 100644 --- a/doc/swagger/swagger.yaml +++ b/doc/swagger/swagger.yaml @@ -9,6 +9,15 @@ info: url: http://argoeu.github.io/ email: argo-dev@lists.grnet.gr +securityDefinitions: + APIKeyHeader: + type: apiKey + in: header + name: x-api-key +security: + - APIKeyHeader: [] + + host: localhost schemes: - https @@ -227,7 +236,7 @@ paths: description: Schema information required: true schema: - type: object + type: object tags: - Schemas responses: @@ -272,12 +281,12 @@ paths: description: Schema information required: true schema: - type: object - properties: - schema: - type: object - type: - type: string + type: object + properties: + schema: + type: object + type: + type: string tags: - Schemas responses: @@ -351,14 +360,14 @@ paths: description: Update one or all of the fields of a schema required: true schema: - type: object - properties: - schema: - type: object - name: - type: string - type: - type: string + type: object + properties: + schema: + type: object + name: + type: string + type: + type: string tags: - Schemas responses: @@ -444,11 +453,14 @@ paths: /status: get: - summary: List operational metrics of the ams service + summary: List the health status for ams and push server description: | - list operational metrics + list health status + parameters: + - $ref: '#/parameters/Details' + - $ref: '#/parameters/ApiKeyDetails' tags: - - Operational Metrics + - Health Check responses: 200: description: Returns the health status @@ -462,9 +474,9 @@ paths: $ref: "#/responses/500" - /metrics/daily-message-average: + /metrics/va_metrics: get: - summary: List of projects and their message count for the given time window + summary: List of projects and their message count, users, topics, subs for the given time window description: | list operational metrics parameters: @@ -476,9 +488,9 @@ paths: - Operational Metrics responses: 200: - description: Returns the message count metrics per project + description: Returns the message count metrics per project, and overall users, subs and topics schema: - $ref: '#/definitions/TotalProjectMessageCount' + $ref: '#/definitions/VAMetrics' 401: $ref: "#/responses/401" 403: @@ -561,10 +573,10 @@ paths: description: Extra project information such as description required: true schema: - type: object - properties: - description: - type: string + type: object + properties: + description: + type: string tags: - Projects @@ -600,12 +612,12 @@ paths: description: Extra project information such as description required: true schema: - type: object - properties: - name: - type: string - description: - type: string + type: object + properties: + name: + type: string + description: + type: string tags: - Projects responses: @@ -698,6 +710,7 @@ paths: - $ref: '#/parameters/ApiKey' - $ref: '#/parameters/PageToken' - $ref: '#/parameters/PageSize' + - $ref: '#/parameters/UserDetails' - name: PROJECT in: path description: Name of the project @@ -778,14 +791,14 @@ paths: description: Extra user information such as roles and email required: false schema: - type: object - properties: - projects: - type: array - items: - $ref: "#/definitions/ProjectRoles" - email: - type: string + type: object + properties: + projects: + type: array + items: + $ref: "#/definitions/ProjectRoles" + email: + type: string tags: - Projects responses: @@ -826,12 +839,12 @@ paths: description: Extra user information such as roles and email required: false schema: - type: object - properties: - projects: - type: array - items: - $ref: "#/definitions/ProjectRoles" + type: object + properties: + projects: + type: array + items: + $ref: "#/definitions/ProjectRoles" tags: - Projects responses: @@ -874,12 +887,12 @@ paths: description: Extra user information such as roles and email required: false schema: - type: object - properties: - projects: - type: array - items: - $ref: "#/definitions/ProjectRoles" + type: object + properties: + projects: + type: array + items: + $ref: "#/definitions/ProjectRoles" tags: - Projects responses: @@ -944,6 +957,7 @@ paths: - $ref: '#/parameters/ApiKey' - $ref: '#/parameters/PageToken' - $ref: '#/parameters/PageSize' + - $ref: '#/parameters/UserDetails' tags: - Users responses: @@ -1010,18 +1024,18 @@ paths: description: Extra user information such as roles and email required: false schema: - type: object - properties: - projects: - type: array - items: - $ref: "#/definitions/ProjectRoles" - email: - type: string - service_roles: - type: array - items: - type: string + type: object + properties: + projects: + type: array + items: + $ref: "#/definitions/ProjectRoles" + email: + type: string + service_roles: + type: array + items: + type: string tags: - Users responses: @@ -1056,26 +1070,26 @@ paths: description: Extra user information such as roles and email required: false schema: - type: object - properties: - projects: - type: array - items: - $ref: "#/definitions/ProjectRoles" - first_name: - type: string - last_name: - type: string - description: - type: string - organization: - type: string - email: - type: string - service_roles: - type: array - items: - type: string + type: object + properties: + projects: + type: array + items: + $ref: "#/definitions/ProjectRoles" + first_name: + type: string + last_name: + type: string + description: + type: string + organization: + type: string + email: + type: string + service_roles: + type: array + items: + type: string tags: - Users responses: @@ -1359,7 +1373,7 @@ paths: description: Parameters of the new subscription object required: true schema: - $ref: '#/definitions/SubParameters' + $ref: '#/definitions/SubParameters' tags: - Subscriptions responses: @@ -1475,7 +1489,7 @@ paths: description: List of authorized users required: true schema: - $ref: '#/definitions/AuthUsers' + $ref: '#/definitions/AuthUsers' tags: - Subscriptions responses: @@ -1518,7 +1532,7 @@ paths: description: Offset required: true schema: - $ref: '#/definitions/Offset' + $ref: '#/definitions/Offset' tags: - Subscriptions responses: @@ -1560,7 +1574,7 @@ paths: description: AckDeadline required: true schema: - $ref: '#/definitions/AckDeadline' + $ref: '#/definitions/AckDeadline' tags: - Subscriptions responses: @@ -1681,7 +1695,7 @@ paths: description: Parameters to be used during pull required: true schema: - $ref: '#/definitions/PullOptions' + $ref: '#/definitions/PullOptions' tags: - Subscriptions responses: @@ -1723,7 +1737,7 @@ paths: description: Parameters to be used during acknowledgement required: true schema: - $ref: '#/definitions/AckIDs' + $ref: '#/definitions/AckIDs' tags: - Subscriptions responses: @@ -1765,7 +1779,7 @@ paths: description: Parameters to be used during acknowledgement required: true schema: - $ref: '#/definitions/PushConfigRef' + $ref: '#/definitions/PushConfigRef' tags: - Subscriptions responses: @@ -1843,7 +1857,7 @@ paths: description: push status required: true schema: - $ref: '#/definitions/PushStatus' + $ref: '#/definitions/PushStatus' tags: - Subscriptions responses: @@ -1985,10 +1999,10 @@ paths: description: The name of the schema to be linked with the topic.All published messages will be validated against the linked schema. required: false schema: - type: object - properties: - schema: - type: string + type: object + properties: + schema: + type: string tags: - Topics responses: @@ -2071,7 +2085,7 @@ paths: description: Message JSON representation required: true schema: - $ref: '#/definitions/Messages' + $ref: '#/definitions/Messages' tags: - Topics responses: @@ -2182,7 +2196,7 @@ paths: description: List of authorized users required: true schema: - $ref: '#/definitions/AuthUsers' + $ref: '#/definitions/AuthUsers' tags: - Topics responses: @@ -2224,6 +2238,27 @@ parameters: required: true type: string default: SecretKey123 + ApiKeyDetails: + name: key + in: query + description: user key token for authentication when accesing the url parameter details + required: false + type: string + default: "" + Details: + name: details + in: query + description: report detailed error for ams push server + required: false + type: boolean + default: false + UserDetails: + name: details + in: query + description: report detailed information for each user(projects, subscriptions, topics) + required: false + type: boolean + default: false RegistrationStatus: name: status in: query @@ -2328,14 +2363,14 @@ responses: $ref: '#/definitions/ErrorMsg' 409_no_topic: - description: Subscription's topic doesn't exist - schema: - $ref: '#/definitions/ErrorMsg' + description: Subscription's topic doesn't exist + schema: + $ref: '#/definitions/ErrorMsg' 409_no_offset_for_time: - description: Timestamp is out of bounds for the subscription's topic/partition - schema: - $ref: '#/definitions/ErrorMsg' + description: Timestamp is out of bounds for the subscription's topic/partition + schema: + $ref: '#/definitions/ErrorMsg' 500: description: Internal Error @@ -2345,31 +2380,31 @@ responses: definitions: UserRegistration: - type: object - properties: - uuid: + type: object + properties: + uuid: type: string - name: + name: type: string - email: + email: type: string - first_name: + first_name: type: string - last_name: + last_name: type: string - organization: + organization: type: string - description: + description: type: string - activation_token: + activation_token: type: string - status: + status: type: string - registered_at: + registered_at: type: string - modified_at: + modified_at: type: string - modified_by: + modified_by: type: string UserRegistrationList: @@ -2381,19 +2416,19 @@ definitions: $ref: '#/definitions/UserRegistration' UserRegistrationPost: - type: object - properties: - name: + type: object + properties: + name: type: string - email: + email: type: string - first_name: + first_name: type: string - last_name: + last_name: type: string - organization: + organization: type: string - description: + description: type: string SchemaList: @@ -2416,6 +2451,19 @@ definitions: schema: type: object + VAMetrics: + type: object + properties: + projects_metrics: + type: object + $ref: '#/definitions/TotalProjectMessageCount' + users_count: + type: integer + topics_count: + type: integer + subscriptions_count: + type: integer + TotalProjectMessageCount: type: object properties: @@ -2439,12 +2487,12 @@ definitions: type: integer Metrics: - type: object - properties: - metrics: - type: array - items: - $ref: '#/definitions/Metric' + type: object + properties: + metrics: + type: array + items: + $ref: '#/definitions/Metric' Metric: type: object @@ -2473,9 +2521,6 @@ definitions: type: string value: type: string - - - AuthUsers: type: object @@ -2483,7 +2528,7 @@ definitions: authorized_users: type: array items: - type: string + type: string PullOptions: type: object @@ -2515,15 +2560,18 @@ definitions: type: string description: Name of the topic pushConfig: - $ref: '#/definitions/PushConfig' + $ref: '#/definitions/PushConfig' ackDeadlineSeconds: type: integer description: maximum wait time in seconds for Acknowledgement + created_on: + type: string + description: creation date PushConfigRef: type: object properties: pushConfig: - $ref: '#/definitions/PushConfig' + $ref: '#/definitions/PushConfig' PushConfig: type: object properties: @@ -2533,6 +2581,8 @@ definitions: maxMessages: type: integer description: batch size of messages per push action + authorization_header: + $ref: '#/definitions/AuthorizationHeader' retryPolicy: $ref: '#/definitions/RetryPolicy' verification_hash: @@ -2551,38 +2601,48 @@ definitions: type: object properties: ackDeadlineSeconds: - type: integer - description: deadline to acknowledge a pulled message (in seconds) + type: integer + description: deadline to acknowledge a pulled message (in seconds) Offset: type: object properties: offset: - type: integer - description: offset number of current subscriptions + type: integer + description: offset number of current subscriptions Offsets: type: object properties: min: - type: integer - description: minimum offset + type: integer + description: minimum offset max: - type: integer - description: max offset + type: integer + description: max offset current: - type: integer - description: current offset + type: integer + description: current offset RetryPolicy: type: object properties: - type: - type: string - description: type of the retry policy used (Only linear policy supported) - period: - type: integer - description: period of retry policy in milliseconds + type: + type: string + description: type of the retry policy used (Only linear policy supported) + period: + type: integer + description: period of retry policy in milliseconds + + AuthorizationHeader: + type: object + properties: + type: + type: string + description: indicates how to generate the authorization header value. autogen and disabled are the supported values. + value: + type: string + description: the value that will be included in the Authotization header for every message delivered to a push endpoint Topic: type: object @@ -2590,6 +2650,9 @@ definitions: name: type: string description: Name of the topic + created_on: + type: string + description: creation date Topics: type: object @@ -2667,17 +2730,17 @@ definitions: $ref: '#/definitions/Project' Project: - type: object - properties: - name: + type: object + properties: + name: type: string - created_on: + created_on: type: string - modified_on: + modified_on: type: string - created_by: + created_by: type: string - description: + description: type: string Users: @@ -2693,64 +2756,64 @@ definitions: type: integer User: - type: object - properties: - uuid: + type: object + properties: + uuid: type: string - name: + name: type: string - first_name: + first_name: type: string - last_name: + last_name: type: string - organization: + organization: type: string - description: + description: type: string - projects: + projects: type: array items: - $ref: '#/definitions/ProjectRoles' - token: + $ref: '#/definitions/ProjectRoles' + token: type: string - email: + email: type: string - service_roles: + service_roles: type: array items: type: string - created_on: + created_on: type: string - modified_on: + modified_on: type: string - created_by: + created_by: type: string ProjectRoles: - type: object - properties: - project: + type: object + properties: + project: + type: string + roles: + type: array + items: type: string - roles: - type: array - items: - type: string MessageIDs: - type: object - properties: - messageIds: - type: array - items: - type: string + type: object + properties: + messageIds: + type: array + items: + type: string AckIDs: - type: object - properties: - ackIds: - type: array - items: - type: string + type: object + properties: + ackIds: + type: array + items: + type: string HealthCheck: type: object @@ -2793,21 +2856,17 @@ definitions: status: type: string description: status of the error - + Version: type: object properties: - release: - type: string - commit: - type: string - build_time: + build_time: type: string golang: type: string - compiler: + compiler: type: string - os: + os: type: string architecture: - type: string \ No newline at end of file + type: string \ No newline at end of file diff --git a/doc/v1/docs/api_auth.md b/doc/v1/docs/api_auth.md index 8e8f8343..f9833f73 100644 --- a/doc/v1/docs/api_auth.md +++ b/doc/v1/docs/api_auth.md @@ -2,6 +2,8 @@ Each user is authenticated by adding the url parameter `?key=T0K3N` in each API request +Users can also authenticate using the header `x-api-key`. + If a user does not provide a valid token the following response is returned: ```json { diff --git a/doc/v1/docs/api_basic.md b/doc/v1/docs/api_basic.md index 21ca88f1..a68e1f98 100644 --- a/doc/v1/docs/api_basic.md +++ b/doc/v1/docs/api_basic.md @@ -6,6 +6,7 @@ The ARGO Messaging Service API implements the Google PubSub specification and th All methods must be called using HTTPS. Arguments can be passed as GET or POST params, or a mix. The response contains a `200 OK` for a successful request and a JSON object in case of an error. For failure results, the error property will contain a short machine-readable error code. In the case of problematic calls, during handling user’s request the API responds using a predefined schema (described in chapter Errors), that contains a short machine-readable warning code, an error code and an error description (or list of them, in the case of multiple errors). Each user is authenticated by adding the url parameter `?key=T0K3N` in each API request +Users can also authenticate using the header `x-api-key`. ## Configuration file: config.json diff --git a/doc/v1/docs/api_health.md b/doc/v1/docs/api_health.md new file mode 100644 index 00000000..9eac4600 --- /dev/null +++ b/doc/v1/docs/api_health.md @@ -0,0 +1,45 @@ +# Service health check for ams and push server + +This method can be used to retrieve api information regarding the proper functionality +of the ams service and the push server + +## [GET] Get Health status + +### Request +``` +GET "/v1/status" +``` + +### Example request + +- `details=(true|false)` indicates if we need detailed +information about errors regarding the push server. + +- A user token corresponding to a `service_admin` or `admin_viewer` +has to be provided when using the `details` parameter. + +``` +curl -H "Content-Type: application/json" + "https://{URL}/v1/status?details=true&key=token" +``` + +### Responses +If successful, the response returns the health status of the service + +Success Response +`200 OK` + +```json +{ + "status": "ok", + "push_servers": [ + { + "endpoint": "localhost:5555", + "status": "Success: SERVING" + } + ] +} +``` + +### Errors +Please refer to section [Errors](api_errors.md) to see all possible Errors \ No newline at end of file diff --git a/doc/v1/docs/api_metrics.md b/doc/v1/docs/api_metrics.md index 6666e835..a18bfd68 100644 --- a/doc/v1/docs/api_metrics.md +++ b/doc/v1/docs/api_metrics.md @@ -62,49 +62,16 @@ Success Response ### Errors Please refer to section [Errors](api_errors.md) to see all possible Errors -## [GET] Get Health status - -### Request -``` -GET "/v1/status" -``` - -### Example request - -``` -curl -H "Content-Type: application/json" - "https://{URL}/v1/status" -``` - -### Responses -If successful, the response returns the health status of the service - -Success Response -`200 OK` - -```json -{ - "status": "ok", - "push_servers": [ - { - "endpoint": "localhost:5555", - "status": "Success: SERVING" - } - ] -} -``` - -### Errors -Please refer to section [Errors](api_errors.md) to see all possible Errors - -## [GET] Get Daily Message Average +## [GET] Get VA Metrics This request returns the total amount of messages per project for the given time window.The number of messages is calculated using the `daily message count` for each one of the project's topics. +It also returns the amount of created `users`, `topics` and `subscriptions` +within the given time window. ### Request ``` -GET "/v1/metrics/daily-message-average" +GET "/v1/metrics/va_metrics" ``` ### URL parameters @@ -116,24 +83,26 @@ GET "/v1/metrics/daily-message-average" ``` curl -H "Content-Type: application/json" - "https://{URL}/v1/metrics/daily-message-average" + "https://{URL}/v1/metrics/va_metrics" ``` ### Example request with URL parameters ``` curl -H "Content-Type: application/json" - "https://{URL}/v1/metrics/daily-message-average?start_date=2019-03-01&end_date=2019-07-24&projects=ARGO,ARGO-2" + "https://{URL}/v1/metrics/va_metrics?start_date=2019-03-01&end_date=2019-07-24&projects=ARGO,ARGO-2" ``` ### Responses -If successful, the response returns the total amount of messages per project for the given time window +If successful, the response returns the total amount of messages per project, +users,topics and subscriptions for the given time window Success Response `200 OK` ```json { + "projects_metrics": { "projects": [ { "project": "ARGO-2", @@ -148,6 +117,10 @@ Success Response ], "total_message_count": 25677, "average_daily_messages": 122 + }, + "users_count": 44, + "topics_count": 33, + "subscriptions_counter": 100 } ``` ### Errors diff --git a/doc/v1/docs/api_projects.md b/doc/v1/docs/api_projects.md index 70698bdd..a515cfd8 100644 --- a/doc/v1/docs/api_projects.md +++ b/doc/v1/docs/api_projects.md @@ -212,10 +212,14 @@ Please refer to section [Errors](api_errors.md) to see all possible Errors ### [GET] List all users that are members of a specific project +- `details`, if set to `true`, it will return the detailed view of each user, +containing the projects, subscriptions and topics that the user belongs to. + + ### Example request ``` curl -X GET -H "Content-Type: application/json" - "https://{URL}/v1/projects/ARGO2/members?key=S3CR3T" + "https://{URL}/v1/projects/ARGO2/members?key=S3CR3T&details=true" ``` ### Responses @@ -253,7 +257,7 @@ Success Response } ``` -### Unpriviledge mode (non service_admin user) +### The Unprivileged mode (non service_admin user) When a user is project_admin instead of service_admin and lists a project's users the results returned remove user information such as `token`, `service_roles` and `created_by` For example: diff --git a/doc/v1/docs/api_subs.md b/doc/v1/docs/api_subs.md index 8da3612e..c89a5068 100644 --- a/doc/v1/docs/api_subs.md +++ b/doc/v1/docs/api_subs.md @@ -32,7 +32,8 @@ Success Response { "name": "projects/BRAND_NEW/subscriptions/alert_engine", "topic": "projects/BRAND_NEW/topics/monitoring", - "ackDeadlineSeconds": 10 + "ackDeadlineSeconds": 10 , + "created_on": "2020-11-19T00:00:00Z" } ``` @@ -111,16 +112,32 @@ should expect a request body with the following schema: "pushConfig": { "pushEndpoint": "https://127.0.0.1:5000/receive_here", "maxMessages": 3, + "authorization_header": { + "type": "autogen", + "value": "4551h9j7f7dde380a5f8bc4fdb4fe980c565b67b" + } , "retryPolicy": { "type": "linear", "period": 1000 }, "verification_hash": "9d5189f7f758e380a5f8bc4fdb4fe980c565b67b", "verified": false - } + }, + "created_on": "2020-11-19T00:00:00Z" } ``` +### Authorization headers + +Specify an `authorization header` value and how it is going to be generated, +to be included in the outgoing push request with each message, to the remote +push endpoint. + +- `autogen(default)`: The authorization header value will be automatically +generated by the service itself. +- `disabled`: No authorization header will be provided with the outgoing +push requests. + ### Different Retry Policies Creating a push enabled subscription with a `linear` retry policy and a `period` of 3000 means that you will be receiving message(s) every `3000ms`. @@ -261,13 +278,15 @@ Success Response "name": "projects/BRAND_NEW/subscriptions/alert_engine", "topic": "projects/BRAND_NEW/topics/monitoring", "pushConfig": {}, - "ackDeadlineSeconds": 10 + "ackDeadlineSeconds": 10, + "created_on": "2020-11-19T00:00:00Z" }, { "name": "projects/BRAND_NEW/subscriptions/alert_engine2", "topic": "projects/BRAND_NEW/topics/monitoring", "pushConfig": {}, - "ackDeadlineSeconds": 10 + "ackDeadlineSeconds": 10, + "created_on": "2020-11-19T00:00:00Z" }], "nextPageToken": "", "totalSize": 2 @@ -301,7 +320,8 @@ Success Response "name": "projects/BRAND_NEW/subscriptions/alert_engine", "topic": "projects/BRAND_NEW/topics/monitoring", "pushConfig": {}, - "ackDeadlineSeconds": 10 + "ackDeadlineSeconds": 10, + "created_on": "2020-11-19T00:00:00Z" } ], "nextPageToken": "", @@ -336,7 +356,8 @@ Success Response "name": "projects/BRAND_NEW/subscriptions/alert_engine2", "topic": "projects/BRAND_NEW/topics/monitoring", "pushConfig": {}, - "ackDeadlineSeconds": 10 + "ackDeadlineSeconds": 10, + "created_on": "2020-11-19T00:00:00Z" } ], "nextPageToken": "some_token", @@ -506,6 +527,9 @@ This request modifies the push configuration of a subscription "pushConfig":{ "pushEndpoint":"", "maxMessages": 5, + "authorization_header": { + "type": "autogen" + }, "retryPolicy":{ "type":"linear", "period":300 @@ -519,6 +543,15 @@ This request modifies the push configuration of a subscription - subscription_name: The subscription name to consume - pushConfig: configuration including pushEndpoint for the remote endpoint to receive the messages. Also includes retryPolicy (type of retryPolicy and period parameters) +- `autogen(default when modyfing a sub from pull to push)`: The authorization header value will be automatically +generated by the service itself. +- `disabled`: No authorization header will be provided with the outgoing +push requests. + +NOTE that if you updated a push configuration with autogen +the service will generate a new value every time the update request happens. +For example, if you want to update your authorization header value, +you can use the update request with the autogen type. ### Example request @@ -670,7 +703,7 @@ Please refer to section [Errors](api_errors.md) to see all possible Errors This request returns the min, max and current offset of a subscription ### Request -`GET /v1/projects/{project_name}/subscriptions/{subscription_name}:Offsets` +`GET /v1/projects/{project_name}/subscriptions/{subscription_name}:offsets` ### Post body: ``` diff --git a/doc/v1/docs/api_topics.md b/doc/v1/docs/api_topics.md index 867ce682..06bc75e6 100644 --- a/doc/v1/docs/api_topics.md +++ b/doc/v1/docs/api_topics.md @@ -37,7 +37,8 @@ Success Response `200 OK` ```json { - "name": "projects/BRAND_NEW/topics/monitoring" + "name": "projects/BRAND_NEW/topics/monitoring", + "created_on": "2020-11-21T00:00:00Z" } ``` @@ -98,7 +99,9 @@ Success Response `200 OK` ```json { - "name": "projects/BRAND_NEW/topics/monitoring" + "name": "projects/BRAND_NEW/topics/monitoring", + "created_on": "2020-11-21T00:00:00Z" + } ``` @@ -149,10 +152,12 @@ Success Response { "topics": [ { - "name":"/project/BRAND_NEW/topics/monitoring" + "name":"/project/BRAND_NEW/topics/monitoring", + "created_on": "2020-11-21T00:00:00Z" }, { - "name":"/project/BRAND_NEW/topics/accounting" + "name":"/project/BRAND_NEW/topics/accounting", + "created_on": "2020-11-21T00:00:00Z" } ], "nextPageToken": "", @@ -183,7 +188,8 @@ Success Response { "topics": [ { - "name":"/project/BRAND_NEW/topics/monitoring" + "name":"/project/BRAND_NEW/topics/monitoring", + "created_on": "2020-11-21T00:00:00Z" } ], "nextPageToken": "some_token", @@ -214,7 +220,8 @@ Success Response { "topics": [ { - "name":"/project/BRAND_NEW/topics/accounting" + "name":"/project/BRAND_NEW/topics/accounting", + "created_on": "2020-11-21T00:00:00Z" } ], "nextPageToken": "", diff --git a/doc/v1/docs/api_users.md b/doc/v1/docs/api_users.md index 83f484c9..41fc6a32 100644 --- a/doc/v1/docs/api_users.md +++ b/doc/v1/docs/api_users.md @@ -3,8 +3,12 @@ ARGO Messaging Service supports calls for creating and modifing users ## [GET] Manage Users - List all users + This request lists all available users in the service using pagination +- `details`, if set to `true`, it will return the detailed view of each user, +containing the projects, subscriptions and topics that the user belongs to. + It is important to note that if there are no results to return the service will return the following: Success Response @@ -30,7 +34,7 @@ GET "/v1/users" ### Example request ``` curl -X GET -H "Content-Type: application/json" - "https://{URL}/v1/users?key=S3CR3T" + "https://{URL}/v1/users?key=S3CR3T&details=true" ``` ### Responses @@ -183,7 +187,7 @@ Success Response ### Example request ``` curl -X GET -H "Content-Type: application/json" - "https://{URL}/v1/users?key=S3CR3T&pageSize=2" + "https://{URL}/v1/users?key=S3CR3T&pageSize=2&details=true" ``` ### Responses @@ -253,7 +257,7 @@ Success Response ### Example request ``` curl -X GET -H "Content-Type: application/json" - "https://{URL}/v1/users?key=S3CR3T&pageSize=3&pageToken=some_token2" + "https://{URL}/v1/users?key=S3CR3T&pageSize=3&pageToken=some_token2&details=true" ``` ### Responses @@ -355,7 +359,7 @@ Success Response ### Example request ``` curl -X GET -H "Content-Type: application/json" - "https://{URL}/v1/users?key=S3CR3T&project=ARGO2" + "https://{URL}/v1/users?key=S3CR3T&project=ARGO2&details=true" ``` ### Responses diff --git a/doc/v1/docs/api_version.md b/doc/v1/docs/api_version.md index d960d028..27306dff 100644 --- a/doc/v1/docs/api_version.md +++ b/doc/v1/docs/api_version.md @@ -24,10 +24,8 @@ Json Response ```json { - "release": "1.0.5", - "commit": "f9f2e8c5f02lbcc94fe76b0d3cfa5d20d9365444", "build_time": "2019-11-01T12:51:04Z", - "golang": "go1.11.5", + "golang": "go1.15.6", "compiler": "gc", "os": "linux", "architecture": "amd64" diff --git a/doc/v1/docs/index.md b/doc/v1/docs/index.md index 5006a5e2..16940fad 100644 --- a/doc/v1/docs/index.md +++ b/doc/v1/docs/index.md @@ -5,6 +5,7 @@ ARGO Messaging Service Documentation - [Messaging API flow](msg_flow.md) - [Authentication & Authorization](auth.md) - [Project and User creation](projects_users.md) +- [Security](security.md) Argo Messaging API @@ -21,6 +22,7 @@ API Calls - [Metrics](api_metrics.md) - [Schemas](api_schemas.md) - [Version Information](api_version.md) +- [Health Status](api_health.md) - [Registrations](api_registrations.md) Frequent Questions diff --git a/doc/v1/docs/security.md b/doc/v1/docs/security.md new file mode 100644 index 00000000..42b48b8a --- /dev/null +++ b/doc/v1/docs/security.md @@ -0,0 +1,119 @@ +# Security in Argo Messaging service + +## Authentication + +Authentication in the AMS takes place using an `url key` provided +with each API request. + +The large majority of api calls support the `url parameter`, key. + +E.g. `/v1/projects?key=b328c7890f061f87cbd4rff34f36fa2ae20993a5` + + The service also supports the use of the x-api-key header +for the user to provide its key. + +- Each request will extract the key from the request parameters +and will try to find a user associated with it in the respective +data store. + +- The key can also be refreshed when needed with the +`/users/{user}:refreshToken` api call. + +- API keys are expected to be used by external service's clients. + +### X509 Authentication +Although AMS doesn't support direct authentication through an x509 certificate, +you can use the [argo-authentication-service](https://github.com/ARGOeu/argo-api-authn) +to map an x509 certificate to an AMS `key`. +The service will also validate the certificate. +The [ams-library](https://github.com/ARGOeu/argo-ams-library) will effortlessly +hide this complexity if you decide to use it in order to access AMS. + + +## Authorization + +After the authentication part takes place, the user will be also assigned +its privileges/roles in order for the service to determine, +if the `user is allowed to access` + - the requested resource. + - perform a certain action. + +The Argo Messaging Service supports the following `core` roles: + +- `service_admin` - which is targeted to users that have an administrative duty over the service. +Service admin is a service wide role. +- `project_admin` - which is targeted towards users that manage the resources/actions +under a specific ams project.Project admins can only access the project(s) they belong to. +Project admin is a `per project role` not a service wide role. +- `publisher` - which is targeted towards users that primarily publish messages to topics. +Publishers are able to access topic(s) under the project(s) they belong to. +Publisher is a `per project role` not a service wide role. +- `consuner` - which is targeted towards users that primarily consume messages from subscriptions. + Consumers are able to access subscriptions(s) under the project(s) they belong to. + Consumer is a `per project role` not a service wide role. + + E.g. `userA` can be a + - `project_admin` under `projectA`, + - `publihser` under `projectB` + - `publisher` & `consumer` under `projectC`. + + + Each API route gets assigned which roles it should accept, + +- /v1/projects is only accessible by `service_admin`, + + - /v1/topics/{topic}:publish is accessible by `service_admin`, `project_admin` and `publisher`. + + ## ACL Based access + +All publishers `cannot access` all topics under their project. +Same for consumers, they `cannot access` all subscriptions under their project. + +Both Topics and Subscriptions have ACLs which determine which of the project's +`publishers` and `consumers` respectively, can access them. +ACLs for topics and subscriptions contain `user names`. + +## Push Enabled Subscriptions + +### Verifying Ownership of Push Subscriptions Endpoints +Whenever a subscription is created with a valid push configuration, the service will also generate a unique hash that +should be later used to validate the ownership of the registered push endpoint, and will mark the subscription as +unverified.This procedure is mandatory in order to avoid spam requests to +endpoints that don't belong to the right user. + +The owner of the push endpoint needs to execute the following steps in order to verify the ownership of the +registered endpoint. + +- Expose an api call with a path of `/ams_verification_hash`. The service will try to access this path using the `host:port` +of the push endpoint. For example, if the push endpoint is `https://example.com:8443/receive_here`, the push endpoint should also +support the api route of `https://example.com:8443/ams_verification_hash`. + +- The api route of `https://example.com:8443/ams_verification_hash` should support the http `GET` method. + +- A `GET` request to `https://example.com:8443/ams_verification_hash` should return a response body +with only the `verification_hash` +that is found inside the subscriptions push configuration, +a `status code` of `200` and the header `Content-type: plain/text`. + +### Securing remote push endpoints + +If you want to secure your remote endpoint, you can have the service generate +a unique authorization hash for the subscription, which means that all +push messages will contain the generated token inside +the `Authorization` header. +As a result the remote endpoint can authenticate incoming +push messages. + +## AMS - Push Server Connectivity + +AMS doesn't handle the actual pushing of messages for push enabled subscriptions +,only the configuration part of them. + +The [ams-push-server](https://github.com/ARGOeu/ams-push-server) +component is responsible for delivering push messages +to remote endpoints. + +AMS and Push server communicate with each other using `mutual TLS` +for authentication, while the push server also implements an authorization strategy +of accepting requests only from certificates that have specific +`Common Name(s)`. \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml deleted file mode 100644 index 52e12bc2..00000000 --- a/docker-compose.yml +++ /dev/null @@ -1,38 +0,0 @@ -version: '2' -services: - zookeeper: - image: wurstmeister/zookeeper - ports: - - "2181:2181" - kafka: - image: wurstmeister/kafka - ports: - - "9092:9092" - depends_on: - - zookeeper - links: - - zookeeper - environment: - KAFKA_ADVERTISED_HOST_NAME: ADD_PUBLIC_IP_ADDRESS - KAFKA_ADVERTISED_PORT: 9092 - KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 - KAFKA_BROKER_ID: "10" - volumes: - - /var/run/docker.sock:/var/run/docker.sock - mongo: - image: mongo - ports: - - "27017:27017" - argo-messaging: - build: . - ports: - - "443:8080" - depends_on: - - mongo - - kafka - links: - - mongo - - zookeeper - - kafka - volumes: - - /var/run/docker.sock:/var/run/docker.sock diff --git a/examples/flask_receive_endpoint/receiver.py b/examples/flask_receive_endpoint/receiver.py index 1d9e0ba9..4a081eaa 100755 --- a/examples/flask_receive_endpoint/receiver.py +++ b/examples/flask_receive_endpoint/receiver.py @@ -55,6 +55,8 @@ MESSAGE_FORMAT = "" +AUTHZ_HEADER = "" + app = Flask(__name__) app.logger.removeHandler(default_handler) @@ -63,6 +65,12 @@ @app.route('/receive_here', methods=['POST']) def receive_msg(): + # if there is an authz header provided, check it + if AUTHZ_HEADER != "": + print(request.headers.get("Authorization")) + if request.headers.get("Authorization") != AUTHZ_HEADER: + return "UNAUTHORIZED", 401 + if MESSAGE_FORMAT is "single": try: @@ -158,6 +166,10 @@ def return_verification_hash(): "-vh", "--verification-hash", metavar="STRING", help="Verification hash for the push endpoint", required=True, dest="vhash") + parser.add_argument( + "-ah", "--authorization-header", metavar="STRING", help="Expected authorization header", + required=False, dest="authz") + group = parser.add_mutually_exclusive_group(required=True) group.add_argument("--single", action="store_true", help="The endpoint should expect single message format", @@ -175,6 +187,8 @@ def return_verification_hash(): VERIFICATION_HASH = args.vhash + AUTHZ_HEADER = args.authz + if args.single_message: MESSAGE_FORMAT = "single" diff --git a/handlers.go b/handlers.go deleted file mode 100644 index d3c5ba80..00000000 --- a/handlers.go +++ /dev/null @@ -1,4756 +0,0 @@ -package main - -import ( - "encoding/json" - "fmt" - "io/ioutil" - "net/http" - "net/url" - "sort" - "strconv" - "strings" - "time" - - log "github.com/sirupsen/logrus" - - "context" - - "github.com/ARGOeu/argo-messaging/auth" - "github.com/ARGOeu/argo-messaging/brokers" - "github.com/ARGOeu/argo-messaging/config" - "github.com/ARGOeu/argo-messaging/messages" - "github.com/ARGOeu/argo-messaging/metrics" - "github.com/ARGOeu/argo-messaging/projects" - oldPush "github.com/ARGOeu/argo-messaging/push" - "github.com/ARGOeu/argo-messaging/push/grpc/client" - "github.com/ARGOeu/argo-messaging/schemas" - "github.com/ARGOeu/argo-messaging/stores" - "github.com/ARGOeu/argo-messaging/subscriptions" - "github.com/ARGOeu/argo-messaging/topics" - "github.com/ARGOeu/argo-messaging/version" - - "bytes" - "encoding/base64" - gorillaContext "github.com/gorilla/context" - "github.com/gorilla/mux" - "github.com/twinj/uuid" -) - -// HandlerWrappers -////////////////// - -// WrapValidate handles validation -func WrapValidate(hfn http.HandlerFunc) http.HandlerFunc { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - - urlVars := mux.Vars(r) - - // sort keys - keys := []string(nil) - for key := range urlVars { - keys = append(keys, key) - } - sort.Strings(keys) - - // Iterate alphabetically - for _, key := range keys { - if validName(urlVars[key]) == false { - err := APIErrorInvalidName(key) - respondErr(w, err) - return - } - } - hfn.ServeHTTP(w, r) - - }) -} - -// WrapMockAuthConfig handle wrapper is used in tests were some auth context is needed -func WrapMockAuthConfig(hfn http.HandlerFunc, cfg *config.APICfg, brk brokers.Broker, str stores.Store, mgr *oldPush.Manager, c push.Client, roles ...string) http.HandlerFunc { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - - urlVars := mux.Vars(r) - - userRoles := []string{"publisher", "consumer"} - if len(roles) > 0 { - userRoles = roles - } - - nStr := str.Clone() - defer nStr.Close() - - projectUUID := projects.GetUUIDByName(urlVars["project"], nStr) - gorillaContext.Set(r, "auth_project_uuid", projectUUID) - gorillaContext.Set(r, "brk", brk) - gorillaContext.Set(r, "str", nStr) - gorillaContext.Set(r, "mgr", mgr) - gorillaContext.Set(r, "apsc", c) - gorillaContext.Set(r, "auth_resource", cfg.ResAuth) - gorillaContext.Set(r, "auth_user", "UserA") - gorillaContext.Set(r, "auth_user_uuid", "uuid1") - gorillaContext.Set(r, "auth_roles", userRoles) - gorillaContext.Set(r, "push_worker_token", cfg.PushWorkerToken) - gorillaContext.Set(r, "push_enabled", cfg.PushEnabled) - hfn.ServeHTTP(w, r) - - }) -} - -// WrapConfig handle wrapper to retrieve kafka configuration -func WrapConfig(hfn http.HandlerFunc, cfg *config.APICfg, brk brokers.Broker, str stores.Store, mgr *oldPush.Manager, c push.Client) http.HandlerFunc { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - - nStr := str.Clone() - defer nStr.Close() - gorillaContext.Set(r, "brk", brk) - gorillaContext.Set(r, "str", nStr) - gorillaContext.Set(r, "mgr", mgr) - gorillaContext.Set(r, "apsc", c) - gorillaContext.Set(r, "auth_resource", cfg.ResAuth) - gorillaContext.Set(r, "auth_service_token", cfg.ServiceToken) - gorillaContext.Set(r, "push_worker_token", cfg.PushWorkerToken) - gorillaContext.Set(r, "push_enabled", cfg.PushEnabled) - hfn.ServeHTTP(w, r) - - }) -} - -// WrapLog handle wrapper to apply Logging -func WrapLog(hfn http.Handler, name string) http.HandlerFunc { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - - start := time.Now() - - hfn.ServeHTTP(w, r) - - log.WithFields( - log.Fields{ - "type": "request_log", - "method": r.Method, - "path": r.RequestURI, - "action": name, - "requester": gorillaContext.Get(r, "auth_user_uuid"), - "processing_time": time.Since(start).String(), - }, - ).Info("") - }) -} - -// WrapAuthenticate handle wrapper to apply authentication -func WrapAuthenticate(hfn http.Handler) http.HandlerFunc { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - - urlVars := mux.Vars(r) - urlValues := r.URL.Query() - - // if the url parameter 'key' is empty or absent, end the request with an unauthorized response - if urlValues.Get("key") == "" { - err := APIErrorUnauthorized() - respondErr(w, err) - return - } - - refStr := gorillaContext.Get(r, "str").(stores.Store) - serviceToken := gorillaContext.Get(r, "auth_service_token").(string) - - projectName := urlVars["project"] - projectUUID := projects.GetUUIDByName(urlVars["project"], refStr) - - // In all cases instead of project create - if "projects:create" != mux.CurrentRoute(r).GetName() { - // Check if given a project name the project wasn't found - if projectName != "" && projectUUID == "" { - apiErr := APIErrorNotFound("project") - respondErr(w, apiErr) - return - } - } - - // Check first if service token is used - if serviceToken != "" && serviceToken == urlValues.Get("key") { - gorillaContext.Set(r, "auth_roles", []string{"service_admin"}) - gorillaContext.Set(r, "auth_user", "") - gorillaContext.Set(r, "auth_user_uuid", "") - gorillaContext.Set(r, "auth_project_uuid", projectUUID) - hfn.ServeHTTP(w, r) - return - } - - roles, user := auth.Authenticate(projectUUID, urlValues.Get("key"), refStr) - - if len(roles) > 0 { - userUUID := auth.GetUUIDByName(user, refStr) - gorillaContext.Set(r, "auth_roles", roles) - gorillaContext.Set(r, "auth_user", user) - gorillaContext.Set(r, "auth_user_uuid", userUUID) - gorillaContext.Set(r, "auth_project_uuid", projectUUID) - hfn.ServeHTTP(w, r) - } else { - err := APIErrorUnauthorized() - respondErr(w, err) - } - - }) -} - -// WrapAuthorize handle wrapper to apply authorization -func WrapAuthorize(hfn http.Handler, routeName string) http.HandlerFunc { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - - urlValues := r.URL.Query() - - refStr := gorillaContext.Get(r, "str").(stores.Store) - refRoles := gorillaContext.Get(r, "auth_roles").([]string) - serviceToken := gorillaContext.Get(r, "auth_service_token").(string) - - // Check first if service token is used - if serviceToken != "" && serviceToken == urlValues.Get("key") { - hfn.ServeHTTP(w, r) - return - } - - if auth.Authorize(routeName, refRoles, refStr) { - hfn.ServeHTTP(w, r) - } else { - err := APIErrorForbidden() - respondErr(w, err) - } - }) -} - -// HandlerFunctions -/////////////////// - -// UserProfile returns a user's profile based on the provided url parameter(key) -func UserProfile(w http.ResponseWriter, r *http.Request) { - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - - urlValues := r.URL.Query() - - // if the url parameter 'key' is empty or absent, end the request with an unauthorized response - if urlValues.Get("key") == "" { - err := APIErrorUnauthorized() - respondErr(w, err) - return - } - - result, err := auth.GetUserByToken(urlValues.Get("key"), refStr) - - if err != nil { - if err.Error() == "not found" { - err := APIErrorUnauthorized() - respondErr(w, err) - return - } - err := APIErrQueryDatastore() - respondErr(w, err) - return - } - - // Output result to JSON - resJSON, err := result.ExportJSON() - - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - // Write response - respondOK(w, []byte(resJSON)) - -} - -// ProjectDelete (DEL) deletes an existing project (also removes it's topics and subscriptions) -func ProjectDelete(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - - // Get Result Object - // Get project UUID First to use as reference - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - // RemoveProject removes also attached subs and topics from the datastore - err := projects.RemoveProject(projectUUID, refStr) - if err != nil { - if err.Error() == "not found" { - err := APIErrorNotFound("ProjectUUID") - respondErr(w, err) - return - } - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - // TODO Stop any relevant push subscriptions when deleting a project - - // Write empty response if anything ok - respondOK(w, output) - -} - -// ProjectUpdate (PUT) updates the name or the description of an existing project -func ProjectUpdate(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - // Read POST JSON body - body, err := ioutil.ReadAll(r.Body) - if err != nil { - err := APIErrorInvalidRequestBody() - respondErr(w, err) - return - } - - // Parse pull options - postBody, err := projects.GetFromJSON(body) - if err != nil { - err := APIErrorInvalidRequestBody() - respondErr(w, err) - log.Error(string(body[:])) - return - } - - modified := time.Now().UTC() - // Get Result Object - - res, err := projects.UpdateProject(projectUUID, postBody.Name, postBody.Description, modified, refStr) - - if err != nil { - if err.Error() == "not found" { - err := APIErrorNotFound("ProjectUUID") - respondErr(w, err) - return - } - - if strings.HasPrefix(err.Error(), "invalid") { - err := APIErrorInvalidData(err.Error()) - respondErr(w, err) - return - } - - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - // Output result to JSON - resJSON, err := res.ExportJSON() - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - // Write response - output = []byte(resJSON) - respondOK(w, output) - -} - -// ProjectCreate (POST) creates a new project -func ProjectCreate(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - urlProject := urlVars["project"] - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - refUserUUID := gorillaContext.Get(r, "auth_user_uuid").(string) - - // Read POST JSON body - body, err := ioutil.ReadAll(r.Body) - if err != nil { - err := APIErrorInvalidRequestBody() - respondErr(w, err) - return - } - - // Parse pull options - postBody, err := projects.GetFromJSON(body) - if err != nil { - err := APIErrorInvalidArgument("Project") - respondErr(w, err) - log.Error(string(body[:])) - return - } - - uuid := uuid.NewV4().String() // generate a new uuid to attach to the new project - created := time.Now().UTC() - // Get Result Object - - res, err := projects.CreateProject(uuid, urlProject, created, refUserUUID, postBody.Description, refStr) - - if err != nil { - if err.Error() == "exists" { - err := APIErrorConflict("Project") - respondErr(w, err) - return - } - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - // Output result to JSON - resJSON, err := res.ExportJSON() - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - // Write response - output = []byte(resJSON) - respondOK(w, output) - -} - -// ProjectListAll (GET) all projects -func ProjectListAll(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - - // Get Results Object - - res, err := projects.Find("", "", refStr) - - if err != nil && err.Error() != "not found" { - err := APIErrQueryDatastore() - respondErr(w, err) - return - } - - // Output result to JSON - resJSON, err := res.ExportJSON() - - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - // Write response - output = []byte(resJSON) - respondOK(w, output) - -} - -// ProjectListOne (GET) one project -func ProjectListOne(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - urlProject := urlVars["project"] - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - - // Get Results Object - results, err := projects.Find("", urlProject, refStr) - - if err != nil { - - if err.Error() == "not found" { - err := APIErrorNotFound("ProjectUUID") - respondErr(w, err) - return - } - err := APIErrQueryDatastore() - respondErr(w, err) - return - } - - // Output result to JSON - res := results.One() - resJSON, err := res.ExportJSON() - - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - // Write response - output = []byte(resJSON) - respondOK(w, output) - -} - -// RefreshToken (POST) refreshes user's token -func RefreshToken(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - urlUser := urlVars["user"] - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - - // Get Result Object - userUUID := auth.GetUUIDByName(urlUser, refStr) - token, err := auth.GenToken() // generate a new user token - - res, err := auth.UpdateUserToken(userUUID, token, refStr) - - if err != nil { - if err.Error() == "not found" { - err := APIErrorNotFound("User") - respondErr(w, err) - return - } - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - // Output result to JSON - resJSON, err := res.ExportJSON() - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - // Write response - output = []byte(resJSON) - respondOK(w, output) - -} - -// UserUpdate (PUT) updates the user information -func UserUpdate(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - urlUser := urlVars["user"] - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - - // Read POST JSON body - body, err := ioutil.ReadAll(r.Body) - if err != nil { - err := APIErrorInvalidRequestBody() - respondErr(w, err) - return - } - - // Parse pull options - postBody, err := auth.GetUserFromJSON(body) - if err != nil { - err := APIErrorInvalidArgument("User") - respondErr(w, err) - return - } - - // Get Result Object - userUUID := auth.GetUUIDByName(urlUser, refStr) - modified := time.Now().UTC() - res, err := auth.UpdateUser(userUUID, postBody.FirstName, postBody.LastName, postBody.Organization, postBody.Description, - postBody.Name, postBody.Projects, postBody.Email, postBody.ServiceRoles, modified, true, refStr) - - if err != nil { - - // In case of invalid project or role in post body - - if err.Error() == "not found" { - err := APIErrorNotFound("User") - respondErr(w, err) - return - } - - if strings.HasPrefix(err.Error(), "invalid") { - err := APIErrorInvalidData(err.Error()) - respondErr(w, err) - return - } - - if strings.HasPrefix(err.Error(), "duplicate") { - err := APIErrorInvalidData(err.Error()) - respondErr(w, err) - return - } - - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - // Output result to JSON - resJSON, err := res.ExportJSON() - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - // Write response - output = []byte(resJSON) - respondOK(w, output) - -} - -// UserCreate (POST) creates a new user inside a project -func UserCreate(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - urlUser := urlVars["user"] - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - refUserUUID := gorillaContext.Get(r, "auth_user_uuid").(string) - - // Read POST JSON body - body, err := ioutil.ReadAll(r.Body) - if err != nil { - err := APIErrorInvalidRequestBody() - respondErr(w, err) - return - } - - // Parse pull options - postBody, err := auth.GetUserFromJSON(body) - if err != nil { - err := APIErrorInvalidArgument("User") - respondErr(w, err) - log.Error(string(body[:])) - return - } - - uuid := uuid.NewV4().String() // generate a new uuid to attach to the new project - token, err := auth.GenToken() // generate a new user token - created := time.Now().UTC() - // Get Result Object - res, err := auth.CreateUser(uuid, urlUser, postBody.FirstName, postBody.LastName, postBody.Organization, postBody.Description, - postBody.Projects, token, postBody.Email, postBody.ServiceRoles, created, refUserUUID, refStr) - - if err != nil { - if err.Error() == "exists" { - err := APIErrorConflict("User") - respondErr(w, err) - return - } - - if strings.HasPrefix(err.Error(), "duplicate") { - err := APIErrorInvalidData(err.Error()) - respondErr(w, err) - return - } - - if strings.HasPrefix(err.Error(), "invalid") { - err := APIErrorInvalidData(err.Error()) - respondErr(w, err) - return - } - - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - // Output result to JSON - resJSON, err := res.ExportJSON() - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - // Write response - output = []byte(resJSON) - respondOK(w, output) - -} - -// OpMetrics (GET) all operational metrics -func OpMetrics(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - - // Get Results Object - res, err := metrics.GetUsageCpuMem(refStr) - - if err != nil && err.Error() != "not found" { - err := APIErrQueryDatastore() - respondErr(w, err) - return - } - - // Output result to JSON - resJSON, err := res.ExportJSON() - - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - // Write response - output = []byte(resJSON) - respondOK(w, output) - -} - -// DailyMessageAverage (GET) retrieves the average amount of published messages per day -func DailyMessageAverage(w http.ResponseWriter, r *http.Request) { - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - - startDate := time.Time{} - endDate := time.Time{} - var err error - - // if no start date was provided, set it to the start of the unix time - if r.URL.Query().Get("start_date") != "" { - startDate, err = time.Parse("2006-01-02", r.URL.Query().Get("start_date")) - if err != nil { - err := APIErrorInvalidData("Start date is not in valid format") - respondErr(w, err) - return - } - } else { - startDate = time.Date(1970, 1, 1, 0, 0, 0, 0, time.UTC) - } - - // if no end date was provided, set it to to today - if r.URL.Query().Get("end_date") != "" { - endDate, err = time.Parse("2006-01-02", r.URL.Query().Get("end_date")) - if err != nil { - err := APIErrorInvalidData("End date is not in valid format") - respondErr(w, err) - return - } - } else { - endDate = time.Now().UTC() - } - - if startDate.After(endDate) { - err := APIErrorInvalidData("Start date cannot be after the end date") - respondErr(w, err) - return - } - - projectsList := make([]string, 0) - projectsUrlValue := r.URL.Query().Get("projects") - if projectsUrlValue != "" { - projectsList = strings.Split(projectsUrlValue, ",") - } - - cc, err := projects.GetProjectsMessageCount(projectsList, startDate, endDate, refStr) - if err != nil { - err := APIErrorNotFound(err.Error()) - respondErr(w, err) - return - } - - output, err := json.MarshalIndent(cc, "", " ") - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - // Write response - respondOK(w, output) -} - -// UserListByToken (GET) one user by his token -func UserListByToken(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - urlToken := urlVars["token"] - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - - // Get Results Object - result, err := auth.GetUserByToken(urlToken, refStr) - - if err != nil { - if err.Error() == "not found" { - err := APIErrorNotFound("User") - respondErr(w, err) - return - } - err := APIErrQueryDatastore() - respondErr(w, err) - return - } - - // Output result to JSON - resJSON, err := result.ExportJSON() - - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - // Write response - output = []byte(resJSON) - respondOK(w, output) - -} - -// ProjectUserListOne (GET) one user member of a specific project -func ProjectUserListOne(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - urlUser := urlVars["user"] - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - refRoles := gorillaContext.Get(r, "auth_roles").([]string) - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - - // check that user is indeed a service admin in order to be priviledged to see full user info - priviledged := auth.IsServiceAdmin(refRoles) - - // Get Results Object - results, err := auth.FindUsers(projectUUID, "", urlUser, priviledged, refStr) - - if err != nil { - if err.Error() == "not found" { - err := APIErrorNotFound("User") - respondErr(w, err) - return - } - - err := APIErrQueryDatastore() - respondErr(w, err) - return - } - - res := results.One() - - // Output result to JSON - resJSON, err := res.ExportJSON() - - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - // Write response - output = []byte(resJSON) - respondOK(w, output) - -} - -// ProjectUserCreate (POST) creates a user under the respective project by the project's admin -func ProjectUserCreate(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - urlUser := urlVars["user"] - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - refUserUUID := gorillaContext.Get(r, "auth_user_uuid").(string) - refProjUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - - // Read POST JSON body - body, err := ioutil.ReadAll(r.Body) - if err != nil { - err := APIErrorInvalidRequestBody() - respondErr(w, err) - return - } - - // Parse pull options - postBody, err := auth.GetUserFromJSON(body) - if err != nil { - err := APIErrorInvalidArgument("User") - respondErr(w, err) - log.Error(string(body[:])) - return - } - - // omit service wide roles - postBody.ServiceRoles = []string{} - - // allow the user to be created to only have reference to the project under which is being created - prName := projects.GetNameByUUID(refProjUUID, refStr) - if prName == "" { - err := APIErrGenericInternal("Internal Error") - respondErr(w, err) - return - } - - projectRoles := auth.ProjectRoles{} - - for _, p := range postBody.Projects { - if p.Project == prName { - projectRoles.Project = prName - projectRoles.Roles = p.Roles - projectRoles.Topics = p.Topics - projectRoles.Subs = p.Subs - break - } - } - - // if the project was not mentioned in the creation, add it - if projectRoles.Project == "" { - projectRoles.Project = prName - } - - postBody.Projects = []auth.ProjectRoles{projectRoles} - - uuid := uuid.NewV4().String() // generate a new uuid to attach to the new project - token, err := auth.GenToken() // generate a new user token - created := time.Now().UTC() - - // Get Result Object - res, err := auth.CreateUser(uuid, urlUser, "", "", "", "", postBody.Projects, token, postBody.Email, postBody.ServiceRoles, created, refUserUUID, refStr) - - if err != nil { - if err.Error() == "exists" { - err := APIErrorConflict("User") - respondErr(w, err) - return - } - - if strings.HasPrefix(err.Error(), "invalid") { - err := APIErrorInvalidData(err.Error()) - respondErr(w, err) - return - } - - if strings.HasPrefix(err.Error(), "duplicate") { - err := APIErrorInvalidData(err.Error()) - respondErr(w, err) - return - } - - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - // Output result to JSON - resJSON, err := res.ExportJSON() - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - // Write response - output = []byte(resJSON) - respondOK(w, output) - -} - -// ProjectUserUpdate (PUT) updates a user under the respective project by the project's admin -func ProjectUserUpdate(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - urlUser := urlVars["user"] - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - refProjUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - refRoles := gorillaContext.Get(r, "auth_roles").([]string) - - // allow the user to be updated to only have reference to the project under which is being updated - prName := projects.GetNameByUUID(refProjUUID, refStr) - if prName == "" { - err := APIErrGenericInternal("Internal Error") - respondErr(w, err) - return - } - - // Read POST JSON body - body, err := ioutil.ReadAll(r.Body) - if err != nil { - err := APIErrorInvalidRequestBody() - respondErr(w, err) - return - } - - // Parse pull options - postBody, err := auth.GetUserFromJSON(body) - if err != nil { - err := APIErrorInvalidArgument("User") - respondErr(w, err) - return - } - - u, err := auth.FindUsers("", "", urlUser, true, refStr) - if err != nil { - if err.Error() == "not found" { - err := APIErrorNotFound("User") - respondErr(w, err) - return - } - - err := APIErrQueryDatastore() - respondErr(w, err) - return - } - - // from the post request keep only the reference to the current project - projectRoles := auth.ProjectRoles{} - - for _, p := range postBody.Projects { - if p.Project == prName { - projectRoles.Project = prName - projectRoles.Roles = p.Roles - projectRoles.Topics = p.Topics - projectRoles.Subs = p.Subs - break - } - } - - // if the user is already a member of the project, update it with the accepted contents of the post body - found := false - for idx, p := range u.One().Projects { - if p.Project == projectRoles.Project { - u.One().Projects[idx].Roles = projectRoles.Roles - u.One().Projects[idx].Topics = projectRoles.Topics - u.One().Projects[idx].Subs = projectRoles.Subs - found = true - break - } - } - - if !found { - err := APIErrorForbiddenWithMsg("User is not a member of the project") - respondErr(w, err) - return - } - - // check that user is indeed a service admin in order to be privileged to see full user info - privileged := auth.IsServiceAdmin(refRoles) - - // Get Result Object - userUUID := u.One().UUID - modified := time.Now().UTC() - userProjects := u.One().Projects - userEmail := u.One().Email - userSRoles := u.One().ServiceRoles - userName := u.One().Name - userFN := u.One().FirstName - userLN := u.One().LastName - userOrg := u.One().Organization - userDesc := u.One().Description - - _, err = auth.UpdateUser(userUUID, userFN, userLN, userOrg, userDesc, userName, userProjects, userEmail, userSRoles, modified, false, refStr) - - if err != nil { - - // In case of invalid project or role in post body - if err.Error() == "not found" { - err := APIErrorNotFound("User") - respondErr(w, err) - return - } - - if strings.HasPrefix(err.Error(), "invalid") { - err := APIErrorInvalidData(err.Error()) - respondErr(w, err) - return - } - - if strings.HasPrefix(err.Error(), "duplicate") { - err := APIErrorInvalidData(err.Error()) - respondErr(w, err) - return - } - - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - stored, err := auth.FindUsers(refProjUUID, userUUID, urlUser, privileged, refStr) - - if err != nil { - - if err.Error() == "not found" { - err := APIErrorNotFound("User") - respondErr(w, err) - return - } - - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - - } - - // Output result to JSON - resJSON, err := json.MarshalIndent(stored.One(), "", " ") - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - // Write response - output = []byte(resJSON) - respondOK(w, output) - -} - -// ProjectUserRemove (POST) removes a user from the respective project -func ProjectUserRemove(w http.ResponseWriter, r *http.Request) { - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - urlUser := urlVars["user"] - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - refProjUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - - projName := projects.GetNameByUUID(refProjUUID, refStr) - - u, err := auth.FindUsers("", "", urlUser, true, refStr) - if err != nil { - if err.Error() == "not found" { - err := APIErrorNotFound("User") - respondErr(w, err) - return - } - - err := APIErrQueryDatastore() - respondErr(w, err) - return - } - - userProjects := []auth.ProjectRoles{} - - // if the user is already a member of the project, update it with the accepted contents of the post body - found := false - for idx, p := range u.One().Projects { - if p.Project == projName { - userProjects = append(userProjects, u.One().Projects[:idx]...) - userProjects = append(userProjects, u.One().Projects[idx+1:]...) - found = true - break - } - } - - if !found { - err := APIErrorForbiddenWithMsg("User is not a member of the project") - respondErr(w, err) - return - } - - // Get Result Object - userUUID := u.One().UUID - modified := time.Now().UTC() - userEmail := u.One().Email - userSRoles := u.One().ServiceRoles - userName := u.One().Name - userFN := u.One().FirstName - userLN := u.One().LastName - userOrg := u.One().Organization - userDesc := u.One().Description - - _, err = auth.UpdateUser(userUUID, userFN, userLN, userOrg, userDesc, userName, userProjects, userEmail, userSRoles, modified, false, refStr) - - if err != nil { - - // In case of invalid project or role in post body - if err.Error() == "not found" { - err := APIErrorNotFound("User") - respondErr(w, err) - return - } - - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - // Write response - respondOK(w, []byte("{}")) - -} - -// ProjectUserAdd (POST) adds a user to the respective project -func ProjectUserAdd(w http.ResponseWriter, r *http.Request) { - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - urlUser := urlVars["user"] - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - refProjUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - refRoles := gorillaContext.Get(r, "auth_roles").([]string) - - projName := projects.GetNameByUUID(refProjUUID, refStr) - - u, err := auth.FindUsers("", "", urlUser, true, refStr) - if err != nil { - if err.Error() == "not found" { - err := APIErrorNotFound("User") - respondErr(w, err) - return - } - - err := APIErrQueryDatastore() - respondErr(w, err) - return - } - - // Read POST JSON body - body, err := ioutil.ReadAll(r.Body) - if err != nil { - err := APIErrorInvalidRequestBody() - respondErr(w, err) - return - } - - data := auth.ProjectRoles{} - - err = json.Unmarshal(body, &data) - if err != nil { - err := APIErrorInvalidRequestBody() - respondErr(w, err) - return - } - - // check if the user is already a user of the project - found := false - for _, p := range u.One().Projects { - if p.Project == projName { - found = true - break - } - } - - if found { - err := APIErrorGenericConflict("User is already a member of the project") - respondErr(w, err) - return - } - - // Get Result Object - userUUID := u.One().UUID - modified := time.Now().UTC() - userEmail := u.One().Email - userSRoles := u.One().ServiceRoles - userName := u.One().Name - userProjects := u.One().Projects - userFN := u.One().FirstName - userLN := u.One().LastName - userOrg := u.One().Organization - userDesc := u.One().Description - - userProjects = append(userProjects, auth.ProjectRoles{ - Project: projName, - Roles: data.Roles, - Subs: data.Subs, - Topics: data.Topics, - }) - - _, err = auth.UpdateUser(userUUID, userFN, userLN, userOrg, userDesc, userName, userProjects, userEmail, userSRoles, modified, false, refStr) - - if err != nil { - - // In case of invalid project or role in post body - if err.Error() == "not found" { - err := APIErrorNotFound("User") - respondErr(w, err) - return - } - - if strings.HasPrefix(err.Error(), "invalid") { - err := APIErrorInvalidData(err.Error()) - respondErr(w, err) - return - } - - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - // Write response - privileged := auth.IsServiceAdmin(refRoles) - fmt.Println(privileged) - results, err := auth.FindUsers(refProjUUID, "", urlUser, privileged, refStr) - - if err != nil { - if err.Error() == "not found" { - err := APIErrorNotFound("User") - respondErr(w, err) - return - } - - err := APIErrQueryDatastore() - respondErr(w, err) - return - } - - res := results.One() - - // Output result to JSON - resJSON, err := res.ExportJSON() - - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - // Write response - respondOK(w, []byte(resJSON)) -} - -// UserListOne (GET) one user -func UserListOne(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - urlUser := urlVars["user"] - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - - // Get Results Object - results, err := auth.FindUsers("", "", urlUser, true, refStr) - - if err != nil { - if err.Error() == "not found" { - err := APIErrorNotFound("User") - respondErr(w, err) - return - } - - err := APIErrQueryDatastore() - respondErr(w, err) - return - } - - res := results.One() - - // Output result to JSON - resJSON, err := res.ExportJSON() - - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - // Write response - output = []byte(resJSON) - respondOK(w, output) - -} - -func UserListByUUID(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - - // Get Results Object - result, err := auth.GetUserByUUID(urlVars["uuid"], refStr) - - if err != nil { - if err.Error() == "not found" { - err := APIErrorNotFound("User") - respondErr(w, err) - return - } - - if err.Error() == "multiple uuids" { - err := APIErrGenericInternal("Multiple users found with the same uuid") - respondErr(w, err) - return - } - - err := APIErrQueryDatastore() - respondErr(w, err) - return - } - - // Output result to JSON - resJSON, err := result.ExportJSON() - - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - } - - // Write response - output = []byte(resJSON) - respondOK(w, output) -} - -// ProjectListUsers (GET) all users belonging to a project -func ProjectListUsers(w http.ResponseWriter, r *http.Request) { - - var err error - var pageSize int - var paginatedUsers auth.PaginatedUsers - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - refRoles := gorillaContext.Get(r, "auth_roles").([]string) - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - - // Grab url path variables - urlValues := r.URL.Query() - pageToken := urlValues.Get("pageToken") - strPageSize := urlValues.Get("pageSize") - - if strPageSize != "" { - if pageSize, err = strconv.Atoi(strPageSize); err != nil { - log.Errorf("Pagesize %v produced an error while being converted to int: %v", strPageSize, err.Error()) - err := APIErrorInvalidData("Invalid page size") - respondErr(w, err) - return - } - } - - // check that user is indeed a service admin in order to be priviledged to see full user info - priviledged := auth.IsServiceAdmin(refRoles) - - // Get Results Object - call is always priviledged because this handler is only accessible by service admins - if paginatedUsers, err = auth.PaginatedFindUsers(pageToken, int32(pageSize), projectUUID, priviledged, refStr); err != nil { - err := APIErrorInvalidData("Invalid page token") - respondErr(w, err) - return - } - - // Output result to JSON - resJSON, err := paginatedUsers.ExportJSON() - - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - // Write response - output = []byte(resJSON) - respondOK(w, output) - -} - -// UserListAll (GET) all users - or users belonging to a project -func UserListAll(w http.ResponseWriter, r *http.Request) { - - var err error - var pageSize int - var paginatedUsers auth.PaginatedUsers - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - refRoles := gorillaContext.Get(r, "auth_roles").([]string) - - // Grab url path variables - urlValues := r.URL.Query() - pageToken := urlValues.Get("pageToken") - strPageSize := urlValues.Get("pageSize") - projectName := urlValues.Get("project") - projectUUID := "" - - if projectName != "" { - projectUUID = projects.GetUUIDByName(projectName, refStr) - if projectUUID == "" { - err := APIErrorNotFound("ProjectUUID") - respondErr(w, err) - return - } - } - - if strPageSize != "" { - if pageSize, err = strconv.Atoi(strPageSize); err != nil { - log.Errorf("Pagesize %v produced an error while being converted to int: %v", strPageSize, err.Error()) - err := APIErrorInvalidData("Invalid page size") - respondErr(w, err) - return - } - } - - // check that user is indeed a service admin in order to be priviledged to see full user info - priviledged := auth.IsServiceAdmin(refRoles) - - // Get Results Object - call is always priviledged because this handler is only accessible by service admins - if paginatedUsers, err = auth.PaginatedFindUsers(pageToken, int32(pageSize), projectUUID, priviledged, refStr); err != nil { - err := APIErrorInvalidData("Invalid page token") - respondErr(w, err) - return - } - - // Output result to JSON - resJSON, err := paginatedUsers.ExportJSON() - - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - // Write response - output = []byte(resJSON) - respondOK(w, output) - -} - -// UserDelete (DEL) deletes an existing user -func UserDelete(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - // Grab url path variables - urlVars := mux.Vars(r) - urlUser := urlVars["user"] - - userUUID := auth.GetUUIDByName(urlUser, refStr) - - err := auth.RemoveUser(userUUID, refStr) - if err != nil { - if err.Error() == "not found" { - err := APIErrorNotFound("User") - respondErr(w, err) - return - } - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - // Write empty response if anything ok - respondOK(w, output) - -} - -// RegisterUser(POST) registers a new user -func RegisterUser(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - - // Read POST JSON body - body, err := ioutil.ReadAll(r.Body) - if err != nil { - err := APIErrorInvalidRequestBody() - respondErr(w, err) - return - } - - // Parse pull options - requestBody := auth.UserRegistration{} - err = json.Unmarshal(body, &requestBody) - if err != nil { - err := APIErrorInvalidArgument("User") - respondErr(w, err) - return - } - - // check if a user with that name already exists - if auth.ExistsWithName(requestBody.Name, refStr) { - err := APIErrorConflict("User") - respondErr(w, err) - return - } - - uuid := uuid.NewV4().String() - registered := time.Now().UTC().Format("2006-01-02T15:04:05Z") - tkn, err := auth.GenToken() - if err != nil { - err := APIErrGenericInternal("") - respondErr(w, err) - return - } - - ur, err := auth.RegisterUser(uuid, requestBody.Name, requestBody.FirstName, requestBody.LastName, requestBody.Email, - requestBody.Organization, requestBody.Description, registered, tkn, auth.PendingRegistrationStatus, refStr) - - if err != nil { - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - output, err = json.MarshalIndent(ur, "", " ") - if err != nil { - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - respondOK(w, output) -} - -// AcceptUserRegister (POST) accepts a user registration and creates the respective user -func AcceptRegisterUser(w http.ResponseWriter, r *http.Request) { - - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - regUUID := urlVars["uuid"] - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - refUserUUID := gorillaContext.Get(r, "auth_user_uuid").(string) - - ru, err := auth.FindUserRegistration(regUUID, auth.PendingRegistrationStatus, refStr) - if err != nil { - - if err.Error() == "not found" { - err := APIErrorNotFound("User registration") - respondErr(w, err) - return - } - - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - userUUID := uuid.NewV4().String() // generate a new userUUID to attach to the new project - token, err := auth.GenToken() // generate a new user token - created := time.Now().UTC() - // Get Result Object - res, err := auth.CreateUser(userUUID, ru.Name, ru.FirstName, ru.LastName, ru.Organization, ru.Description, - []auth.ProjectRoles{}, token, ru.Email, []string{}, created, refUserUUID, refStr) - - if err != nil { - if err.Error() == "exists" { - err := APIErrorConflict("User") - respondErr(w, err) - return - } - - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - // update the registration - err = auth.UpdateUserRegistration(regUUID, auth.AcceptedRegistrationStatus, refUserUUID, created, refStr) - if err != nil { - log.Errorf("Could not update registration, %v", err.Error()) - } - - // Output result to JSON - resJSON, err := res.ExportJSON() - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - // Write response - respondOK(w, []byte(resJSON)) -} - -func DeclineRegisterUser(w http.ResponseWriter, r *http.Request) { - - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - regUUID := urlVars["uuid"] - refUserUUID := gorillaContext.Get(r, "auth_user_uuid").(string) - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - - _, err := auth.FindUserRegistration(regUUID, auth.PendingRegistrationStatus, refStr) - if err != nil { - - if err.Error() == "not found" { - err := APIErrorNotFound("User registration") - respondErr(w, err) - return - } - - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - err = auth.UpdateUserRegistration(regUUID, auth.DeclinedRegistrationStatus, refUserUUID, time.Now().UTC(), refStr) - if err != nil { - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - respondOK(w, []byte("{}")) - -} - -// ListOneRegistration(GET) retrieves information for a specific registration based on the provided activation token -func ListOneRegistration(w http.ResponseWriter, r *http.Request) { - - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - regUUID := urlVars["uuid"] - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - - ur, err := auth.FindUserRegistration(regUUID, "", refStr) - if err != nil { - - if err.Error() == "not found" { - err := APIErrorNotFound("User registration") - respondErr(w, err) - return - } - - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - urb, err := json.MarshalIndent(ur, "", " ") - if err != nil { - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - respondOK(w, urb) -} - -// ListAllRegistrations(GET) retrieves information about all the registrations in the service -func ListAllRegistrations(w http.ResponseWriter, r *http.Request) { - - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - - name := r.URL.Query().Get("name") - status := r.URL.Query().Get("status") - email := r.URL.Query().Get("email") - org := r.URL.Query().Get("organization") - activationToken := r.URL.Query().Get("activation_token") - - ur, err := auth.FindUserRegistrations(status, activationToken, name, email, org, refStr) - if err != nil { - - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - urb, err := json.MarshalIndent(ur, "", " ") - if err != nil { - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - respondOK(w, urb) -} - -// SubAck (GET) one subscription -func SubAck(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - // Read POST JSON body - body, err := ioutil.ReadAll(r.Body) - if err != nil { - err := APIErrorInvalidRequestBody() - respondErr(w, err) - return - } - - // Parse pull options - postBody, err := subscriptions.GetAckFromJSON(body) - if err != nil { - err := APIErrorInvalidData("Invalid ack parameter") - respondErr(w, err) - return - } - - // Get urlParams - projectName := urlVars["project"] - subName := urlVars["subscription"] - - // Check if sub exists - - cur_sub, err := subscriptions.Find(projectUUID, "", subName, "", 0, refStr) - if err != nil { - err := APIErrHandlingAcknowledgement() - respondErr(w, err) - return - } - if len(cur_sub.Subscriptions) == 0 { - err := APIErrorNotFound("Subscription") - respondErr(w, err) - return - } - - // Get list of AckIDs - if postBody.IDs == nil { - err := APIErrorInvalidData("Invalid ack id") - respondErr(w, err) - return - } - - // Check if each AckID is valid - for _, ackID := range postBody.IDs { - if validAckID(projectName, subName, ackID) == false { - err := APIErrorInvalidData("Invalid ack id") - respondErr(w, err) - return - } - } - - // Get Max ackID - maxAckID, err := subscriptions.GetMaxAckID(postBody.IDs) - if err != nil { - err := APIErrHandlingAcknowledgement() - respondErr(w, err) - return - } - // Extract offset from max ackID - off, err := subscriptions.GetOffsetFromAckID(maxAckID) - - if err != nil { - err := APIErrorInvalidData("Invalid ack id") - respondErr(w, err) - return - } - - zSec := "2006-01-02T15:04:05Z" - t := time.Now().UTC() - ts := t.Format(zSec) - - err = refStr.UpdateSubOffsetAck(projectUUID, urlVars["subscription"], int64(off+1), ts) - if err != nil { - - if err.Error() == "ack timeout" { - err := APIErrorTimeout(err.Error()) - respondErr(w, err) - return - } - - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - // Output result to JSON - resJSON := "{}" - - // Write response - output = []byte(resJSON) - respondOK(w, output) - -} - -// SubListOne (GET) one subscription -func SubListOne(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - - results, err := subscriptions.Find(projectUUID, "", urlVars["subscription"], "", 0, refStr) - - if err != nil { - err := APIErrGenericBackend() - respondErr(w, err) - return - } - - // If not found - if results.Empty() { - err := APIErrorNotFound("Subscription") - respondErr(w, err) - return - } - - // if its a push enabled sub and it has a verified endpoint - // call the push server to find its real time push status - if results.Subscriptions[0].PushCfg != (subscriptions.PushConfig{}) { - if results.Subscriptions[0].PushCfg.Verified { - apsc := gorillaContext.Get(r, "apsc").(push.Client) - results.Subscriptions[0].PushStatus = apsc.SubscriptionStatus(context.TODO(), results.Subscriptions[0].FullName).Result() - } - } - - // Output result to JSON - resJSON, err := results.Subscriptions[0].ExportJSON() - - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - // Write response - output = []byte(resJSON) - respondOK(w, output) - -} - -// SubSetOffset (PUT) sets subscriptions current offset -func SubSetOffset(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - // Get Result Object - urlSub := urlVars["subscription"] - - // Read POST JSON body - body, err := ioutil.ReadAll(r.Body) - if err != nil { - err := APIErrorInvalidRequestBody() - respondErr(w, err) - return - } - - // Parse pull options - postBody, err := subscriptions.GetSetOffsetJSON(body) - if err != nil { - err := APIErrorInvalidArgument("Offset") - respondErr(w, err) - log.Error(string(body[:])) - return - } - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - refBrk := gorillaContext.Get(r, "brk").(brokers.Broker) - // Get project UUID First to use as reference - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - - // Find Subscription - results, err := subscriptions.Find(projectUUID, "", urlVars["subscription"], "", 0, refStr) - - if err != nil { - err := APIErrGenericBackend() - respondErr(w, err) - return - } - - // If not found - if results.Empty() { - err := APIErrorNotFound("Subscription") - respondErr(w, err) - return - } - brk_topic := projectUUID + "." + results.Subscriptions[0].Topic - min_offset := refBrk.GetMinOffset(brk_topic) - max_offset := refBrk.GetMaxOffset(brk_topic) - - //Check if given offset is between min max - if postBody.Offset < min_offset || postBody.Offset > max_offset { - err := APIErrorInvalidData("Offset out of bounds") - respondErr(w, err) - log.Error(string(body[:])) - } - - // Get subscription offsets - - refStr.UpdateSubOffset(projectUUID, urlSub, postBody.Offset) - - respondOK(w, output) - -} - -// SubGetOffsets (GET) gets offset metrics from a subscription -func SubGetOffsets(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - refBrk := gorillaContext.Get(r, "brk").(brokers.Broker) - - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - - results, err := subscriptions.Find(projectUUID, "", urlVars["subscription"], "", 0, refStr) - - if err != nil { - err := APIErrGenericBackend() - respondErr(w, err) - return - } - - // If not found - if results.Empty() { - err := APIErrorNotFound("Subscription") - respondErr(w, err) - return - } - - // Output result to JSON - brk_topic := projectUUID + "." + results.Subscriptions[0].Topic - cur_offset := results.Subscriptions[0].Offset - min_offset := refBrk.GetMinOffset(brk_topic) - max_offset := refBrk.GetMaxOffset(brk_topic) - - // Create offset struct - offResult := subscriptions.Offsets{Current: cur_offset, Min: min_offset, Max: max_offset} - resJSON, err := offResult.ExportJSON() - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - // Write response - output = []byte(resJSON) - respondOK(w, output) - -} - -func SubTimeToOffset(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - refBrk := gorillaContext.Get(r, "brk").(brokers.Broker) - - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - - results, err := subscriptions.Find(projectUUID, "", urlVars["subscription"], "", 0, refStr) - - if err != nil { - err := APIErrGenericBackend() - respondErr(w, err) - return - } - - // If not found - if results.Empty() { - err := APIErrorNotFound("Subscription") - respondErr(w, err) - return - } - - t, err := time.Parse("2006-01-02T15:04:05.000Z", r.URL.Query().Get("time")) - if err != nil { - err := APIErrorInvalidData("Time is not in valid Zulu format.") - respondErr(w, err) - return - } - - // Output result to JSON - brk_topic := projectUUID + "." + results.Subscriptions[0].Topic - off, err := refBrk.TimeToOffset(brk_topic, t.Local()) - - if err != nil { - log.Errorf(err.Error()) - err := APIErrGenericBackend() - respondErr(w, err) - return - } - - if off < 0 { - err := APIErrorGenericConflict("Timestamp is out of bounds for the subscription's topic/partition") - respondErr(w, err) - return - } - - topicOffset := brokers.TopicOffset{Offset: off} - output, err = json.Marshal(topicOffset) - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - respondOK(w, output) -} - -// TopicDelete (DEL) deletes an existing topic -func TopicDelete(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - refBrk := gorillaContext.Get(r, "brk").(brokers.Broker) - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - - // Get Result Object - - err := topics.RemoveTopic(projectUUID, urlVars["topic"], refStr) - if err != nil { - if err.Error() == "not found" { - err := APIErrorNotFound("Topic") - respondErr(w, err) - return - } - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - fullTopic := projectUUID + "." + urlVars["topic"] - err = refBrk.DeleteTopic(fullTopic) - if err != nil { - log.Errorf("Couldn't delete topic %v from broker, %v", fullTopic, err.Error()) - } - - // Write empty response if anything ok - respondOK(w, output) - -} - -// SubDelete (DEL) deletes an existing subscription -func SubDelete(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - - // Get Result Object - results, err := subscriptions.Find(projectUUID, "", urlVars["subscription"], "", 0, refStr) - if err != nil { - err := APIErrGenericBackend() - respondErr(w, err) - return - } - - // If not found - if results.Empty() { - err := APIErrorNotFound("Subscription") - respondErr(w, err) - return - } - - err = subscriptions.RemoveSub(projectUUID, urlVars["subscription"], refStr) - if err != nil { - if err.Error() == "not found" { - err := APIErrorNotFound("Subscription") - respondErr(w, err) - return - } - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - // if it is a push sub and it is also has a verified push endpoint, deactivate it - if results.Subscriptions[0].PushCfg != (subscriptions.PushConfig{}) { - if results.Subscriptions[0].PushCfg.Verified { - pr := make(map[string]string) - apsc := gorillaContext.Get(r, "apsc").(push.Client) - pr["message"] = apsc.DeactivateSubscription(context.TODO(), results.Subscriptions[0].FullName).Result() - b, _ := json.Marshal(pr) - output = b - } - } - respondOK(w, output) -} - -// TopicModACL (PUT) modifies the ACL -func TopicModACL(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - // Get Result Object - urlTopic := urlVars["topic"] - - // Read POST JSON body - body, err := ioutil.ReadAll(r.Body) - if err != nil { - err := APIErrorInvalidRequestBody() - respondErr(w, err) - return - } - - // Parse pull options - postBody, err := auth.GetACLFromJSON(body) - if err != nil { - err := APIErrorInvalidArgument("Topic ACL") - respondErr(w, err) - log.Error(string(body[:])) - return - } - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - // Get project UUID First to use as reference - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - - // check if user list contain valid users for the given project - _, err = auth.AreValidUsers(projectUUID, postBody.AuthUsers, refStr) - if err != nil { - err := APIErrorRoot{Body: APIErrorBody{Code: http.StatusNotFound, Message: err.Error(), Status: "NOT_FOUND"}} - respondErr(w, err) - return - } - - err = auth.ModACL(projectUUID, "topics", urlTopic, postBody.AuthUsers, refStr) - - if err != nil { - - if err.Error() == "not found" { - err := APIErrorNotFound("Topic") - respondErr(w, err) - return - } - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - respondOK(w, output) - -} - -// SubModACL (POST) modifies the ACL -func SubModACL(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - // Get Result Object - urlSub := urlVars["subscription"] - - // Read POST JSON body - body, err := ioutil.ReadAll(r.Body) - if err != nil { - err := APIErrorInvalidRequestBody() - respondErr(w, err) - return - } - - // Parse pull options - postBody, err := auth.GetACLFromJSON(body) - if err != nil { - err := APIErrorInvalidArgument("Subscription ACL") - respondErr(w, err) - log.Error(string(body[:])) - return - } - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - // Get project UUID First to use as reference - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - - // check if user list contain valid users for the given project - _, err = auth.AreValidUsers(projectUUID, postBody.AuthUsers, refStr) - if err != nil { - err := APIErrorRoot{Body: APIErrorBody{Code: http.StatusNotFound, Message: err.Error(), Status: "NOT_FOUND"}} - respondErr(w, err) - return - } - - err = auth.ModACL(projectUUID, "subscriptions", urlSub, postBody.AuthUsers, refStr) - - if err != nil { - - if err.Error() == "not found" { - err := APIErrorNotFound("Subscription") - respondErr(w, err) - return - } - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - respondOK(w, output) - -} - -// SubModPush (POST) modifies the push configuration -func SubModPush(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - subName := urlVars["subscription"] - - // Get project UUID First to use as reference - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - - // Read POST JSON body - body, err := ioutil.ReadAll(r.Body) - if err != nil { - err := APIErrorInvalidRequestBody() - respondErr(w, err) - return - } - - // Parse pull options - postBody, err := subscriptions.GetFromJSON(body) - if err != nil { - APIErrorInvalidArgument("Subscription") - log.Error(string(body[:])) - return - } - - pushEnd := "" - rPolicy := "" - rPeriod := 0 - vhash := "" - verified := false - maxMessages := int64(0) - pushWorker := auth.User{} - pwToken := gorillaContext.Get(r, "push_worker_token").(string) - - if postBody.PushCfg != (subscriptions.PushConfig{}) { - - pushEnabled := gorillaContext.Get(r, "push_enabled").(bool) - - // check the state of the push functionality - if !pushEnabled { - err := APIErrorPushConflict() - respondErr(w, err) - return - } - - pushWorker, err = auth.GetPushWorker(pwToken, refStr) - if err != nil { - err := APIErrInternalPush() - respondErr(w, err) - return - } - - pushEnd = postBody.PushCfg.Pend - // Check if push endpoint is not a valid https:// endpoint - if !(isValidHTTPS(pushEnd)) { - err := APIErrorInvalidData("Push endpoint should be addressed by a valid https url") - respondErr(w, err) - return - } - rPolicy = postBody.PushCfg.RetPol.PolicyType - rPeriod = postBody.PushCfg.RetPol.Period - maxMessages = postBody.PushCfg.MaxMessages - - if rPolicy == "" { - rPolicy = subscriptions.LinearRetryPolicyType - } - if rPeriod <= 0 { - rPeriod = 3000 - } - - if !subscriptions.IsRetryPolicySupported(rPolicy) { - err := APIErrorInvalidData(subscriptions.UnSupportedRetryPolicyError) - respondErr(w, err) - return - } - } - - // Get Result Object - res, err := subscriptions.Find(projectUUID, "", subName, "", 0, refStr) - - if err != nil { - err := APIErrGenericBackend() - respondErr(w, err) - return - } - - if res.Empty() { - err := APIErrorNotFound("Subscription") - respondErr(w, err) - return - } - - existingSub := res.Subscriptions[0] - - if maxMessages == 0 { - if existingSub.PushCfg.MaxMessages == 0 { - maxMessages = int64(1) - } else { - maxMessages = existingSub.PushCfg.MaxMessages - } - } - - // if the request wants to transform a pull subscription to a push one - // we need to begin the verification process - if postBody.PushCfg != (subscriptions.PushConfig{}) { - - // if the endpoint in not the same with the old one, we need to verify it again - if postBody.PushCfg.Pend != existingSub.PushCfg.Pend { - vhash, err = auth.GenToken() - if err != nil { - log.Errorf("Could not generate verification hash for subscription %v, %v", urlVars["subscription"], err.Error()) - err := APIErrGenericInternal("Could not generate verification hash") - respondErr(w, err) - return - } - // else keep the already existing data - } else { - vhash = existingSub.PushCfg.VerificationHash - verified = existingSub.PushCfg.Verified - } - } - - err = subscriptions.ModSubPush(projectUUID, subName, pushEnd, maxMessages, rPolicy, rPeriod, vhash, verified, refStr) - - if err != nil { - if err.Error() == "not found" { - err := APIErrorNotFound("Subscription") - respondErr(w, err) - return - } - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - // if this is an deactivate request, try to retrieve the push worker in order to remove him from the sub's acl - if existingSub.PushCfg != (subscriptions.PushConfig{}) && postBody.PushCfg == (subscriptions.PushConfig{}) { - pushWorker, _ = auth.GetPushWorker(pwToken, refStr) - } - - // if the sub, was push enabled before the update and the endpoint was verified - // we need to deactivate it on the push server - if existingSub.PushCfg != (subscriptions.PushConfig{}) { - if existingSub.PushCfg.Verified { - // deactivate the subscription on the push backend - apsc := gorillaContext.Get(r, "apsc").(push.Client) - apsc.DeactivateSubscription(context.TODO(), existingSub.FullName) - - // remove the push worker user from the sub's acl - err = auth.RemoveFromACL(projectUUID, "subscriptions", existingSub.Name, []string{pushWorker.Name}, refStr) - if err != nil { - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - } - } - // if the update on push configuration is not intended to stop the push functionality - // activate the subscription with the new values - if postBody.PushCfg != (subscriptions.PushConfig{}) { - - // reactivate only if the push endpoint hasn't changed and it wes already verified - // otherwise we need to verify the ownership again before wee activate it - if postBody.PushCfg.Pend == existingSub.PushCfg.Pend && existingSub.PushCfg.Verified { - - // activate the subscription on the push backend - apsc := gorillaContext.Get(r, "apsc").(push.Client) - apsc.ActivateSubscription(context.TODO(), existingSub.FullName, existingSub.FullTopic, - pushEnd, rPolicy, uint32(rPeriod), maxMessages) - - // modify the sub's acl with the push worker's uuid - err = auth.AppendToACL(projectUUID, "subscriptions", existingSub.Name, []string{pushWorker.Name}, refStr) - if err != nil { - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - // link the sub's project with the push worker - err = auth.AppendToUserProjects(pushWorker.UUID, projectUUID, refStr) - if err != nil { - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - } - } - - // Write empty response if everything's ok - respondOK(w, output) -} - -// SubVerifyPushEndpoint (POST) verifies the ownership of a push endpoint registered in a push enabled subscription -func SubVerifyPushEndpoint(w http.ResponseWriter, r *http.Request) { - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - subName := urlVars["subscription"] - - // Get project UUID First to use as reference - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - - pwToken := gorillaContext.Get(r, "push_worker_token").(string) - - pushEnabled := gorillaContext.Get(r, "push_enabled").(bool) - - pushW := auth.User{} - - // check the state of the push functionality - if !pushEnabled { - err := APIErrorPushConflict() - respondErr(w, err) - return - } - - pushW, err := auth.GetPushWorker(pwToken, refStr) - if err != nil { - err := APIErrInternalPush() - respondErr(w, err) - return - } - - // Get Result Object - res, err := subscriptions.Find(projectUUID, "", subName, "", 0, refStr) - - if err != nil { - err := APIErrGenericBackend() - respondErr(w, err) - return - } - - if res.Empty() { - err := APIErrorNotFound("Subscription") - respondErr(w, err) - return - } - - sub := res.Subscriptions[0] - - // check that the subscription is push enabled - if sub.PushCfg == (subscriptions.PushConfig{}) { - err := APIErrorGenericConflict("Subscription is not in push mode") - respondErr(w, err) - return - } - - // check that the endpoint isn't already verified - if sub.PushCfg.Verified { - err := APIErrorGenericConflict("Push endpoint is already verified") - respondErr(w, err) - return - } - - // verify the push endpoint - c := new(http.Client) - err = subscriptions.VerifyPushEndpoint(sub, c, refStr) - if err != nil { - err := APIErrPushVerification(err.Error()) - respondErr(w, err) - return - } - - // activate the subscription on the push backend - apsc := gorillaContext.Get(r, "apsc").(push.Client) - apsc.ActivateSubscription(context.TODO(), sub.FullName, sub.FullTopic, sub.PushCfg.Pend, - sub.PushCfg.RetPol.PolicyType, uint32(sub.PushCfg.RetPol.Period), sub.PushCfg.MaxMessages) - - // modify the sub's acl with the push worker's uuid - err = auth.AppendToACL(projectUUID, "subscriptions", sub.Name, []string{pushW.Name}, refStr) - if err != nil { - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - // link the sub's project with the push worker - err = auth.AppendToUserProjects(pushW.UUID, projectUUID, refStr) - if err != nil { - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - respondOK(w, []byte{}) -} - -// SubModAck (POST) modifies the Ack deadline of the subscription -func SubModAck(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - // Get Result Object - urlSub := urlVars["subscription"] - - // Read POST JSON body - body, err := ioutil.ReadAll(r.Body) - if err != nil { - err := APIErrorInvalidRequestBody() - respondErr(w, err) - return - } - - // Parse pull options - postBody, err := subscriptions.GetAckDeadlineFromJSON(body) - if err != nil { - err := APIErrorInvalidArgument("ackDeadlineSeconds(needs value between 0 and 600)") - respondErr(w, err) - log.Error(string(body[:])) - return - } - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - // Get project UUID First to use as reference - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - - err = subscriptions.ModAck(projectUUID, urlSub, postBody.AckDeadline, refStr) - - if err != nil { - if err.Error() == "wrong value" { - respondErr(w, APIErrorInvalidArgument("ackDeadlineSeconds(needs value between 0 and 600)")) - return - } - if err.Error() == "not found" { - err := APIErrorNotFound("Subscription") - respondErr(w, err) - return - } - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - respondOK(w, output) - -} - -// SubCreate (PUT) creates a new subscription -func SubCreate(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - refBrk := gorillaContext.Get(r, "brk").(brokers.Broker) - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - - // Read POST JSON body - body, err := ioutil.ReadAll(r.Body) - if err != nil { - err := APIErrorInvalidRequestBody() - respondErr(w, err) - return - } - - // Parse pull options - postBody, err := subscriptions.GetFromJSON(body) - if err != nil { - err := APIErrorInvalidArgument("Subscription") - respondErr(w, err) - log.Error(string(body[:])) - return - } - - tProject, tName, err := subscriptions.ExtractFullTopicRef(postBody.FullTopic) - - if err != nil { - err := APIErrorInvalidName("Topic") - respondErr(w, err) - return - } - - if topics.HasTopic(projectUUID, tName, refStr) == false { - err := APIErrorNotFound("Topic") - respondErr(w, err) - return - } - - // Get current topic offset - tProjectUUID := projects.GetUUIDByName(tProject, refStr) - fullTopic := tProjectUUID + "." + tName - curOff := refBrk.GetMaxOffset(fullTopic) - - pushEnd := "" - rPolicy := "" - rPeriod := 0 - maxMessages := int64(1) - - //pushWorker := auth.User{} - verifyHash := "" - - if postBody.PushCfg != (subscriptions.PushConfig{}) { - - // check the state of the push functionality - pwToken := gorillaContext.Get(r, "push_worker_token").(string) - pushEnabled := gorillaContext.Get(r, "push_enabled").(bool) - - if !pushEnabled { - err := APIErrorPushConflict() - respondErr(w, err) - return - } - - _, err = auth.GetPushWorker(pwToken, refStr) - if err != nil { - err := APIErrInternalPush() - respondErr(w, err) - return - } - - pushEnd = postBody.PushCfg.Pend - // Check if push endpoint is not a valid https:// endpoint - if !(isValidHTTPS(pushEnd)) { - err := APIErrorInvalidData("Push endpoint should be addressed by a valid https url") - respondErr(w, err) - return - } - rPolicy = postBody.PushCfg.RetPol.PolicyType - rPeriod = postBody.PushCfg.RetPol.Period - maxMessages = postBody.PushCfg.MaxMessages - - if rPolicy == "" { - rPolicy = subscriptions.LinearRetryPolicyType - } - - if maxMessages == 0 { - maxMessages = int64(1) - } - - if rPeriod <= 0 { - rPeriod = 3000 - } - - if !subscriptions.IsRetryPolicySupported(rPolicy) { - err := APIErrorInvalidData(subscriptions.UnSupportedRetryPolicyError) - respondErr(w, err) - return - } - - verifyHash, err = auth.GenToken() - if err != nil { - log.Errorf("Could not generate verification hash for subscription %v, %v", urlVars["subscription"], err.Error()) - err := APIErrGenericInternal("Could not generate verification hash") - respondErr(w, err) - return - } - - } - - // Get Result Object - res, err := subscriptions.CreateSub(projectUUID, urlVars["subscription"], tName, pushEnd, curOff, maxMessages, postBody.Ack, rPolicy, rPeriod, verifyHash, false, refStr) - - if err != nil { - if err.Error() == "exists" { - err := APIErrorConflict("Subscription") - respondErr(w, err) - return - } - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - // Output result to JSON - resJSON, err := res.ExportJSON() - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - // Write response - output = []byte(resJSON) - respondOK(w, output) - -} - -// TopicCreate (PUT) creates a new topic -func TopicCreate(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - - postBody := map[string]string{} - schemaUUID := "" - - // check if there's a request body provided before trying to decode - if r.Body != nil { - - b, err := ioutil.ReadAll(r.Body) - - if err != nil { - err := APIErrorInvalidRequestBody() - respondErr(w, err) - return - } - defer r.Body.Close() - - if len(b) > 0 { - err = json.Unmarshal(b, &postBody) - if err != nil { - err := APIErrorInvalidRequestBody() - respondErr(w, err) - return - } - - schemaRef := postBody["schema"] - - // if there was a schema name provided, check its existence - if schemaRef != "" { - _, schemaName, err := schemas.ExtractSchema(schemaRef) - if err != nil { - err := APIErrorInvalidData(err.Error()) - respondErr(w, err) - return - } - sl, err := schemas.Find(projectUUID, "", schemaName, refStr) - if err != nil { - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - if sl.Empty() { - err := APIErrorNotFound("Schema") - respondErr(w, err) - return - } - - schemaUUID = sl.Schemas[0].UUID - } - } - } - // Get Result Object - res, err := topics.CreateTopic(projectUUID, urlVars["topic"], schemaUUID, refStr) - if err != nil { - if err.Error() == "exists" { - err := APIErrorConflict("Topic") - respondErr(w, err) - return - } - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - } - - // Output result to JSON - resJSON, err := res.ExportJSON() - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - // Write response - output = []byte(resJSON) - respondOK(w, output) - -} - -// ProjectMetrics (GET) metrics for one project (number of topics) -func ProjectMetrics(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - //refRoles := gorillaContext.Get(r, "auth_roles").([]string) - //refUser := gorillaContext.Get(r, "auth_user").(string) - //refAuthResource := gorillaContext.Get(r, "auth_resource").(bool) - - urlProject := urlVars["project"] - - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - - // Check Authorization per topic - // - if enabled in config - // - if user has only publisher role - - numTopics := int64(0) - numSubs := int64(0) - - numTopics2, err2 := metrics.GetProjectTopics(projectUUID, refStr) - if err2 != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - numTopics = numTopics2 - numSubs2, err2 := metrics.GetProjectSubs(projectUUID, refStr) - if err2 != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - numSubs = numSubs2 - - var timePoints []metrics.Timepoint - var err error - - if timePoints, err = metrics.GetDailyProjectMsgCount(projectUUID, refStr); err != nil { - err := APIErrGenericBackend() - respondErr(w, err) - return - } - - m1 := metrics.NewProjectTopics(urlProject, numTopics, metrics.GetTimeNowZulu()) - m2 := metrics.NewProjectSubs(urlProject, numSubs, metrics.GetTimeNowZulu()) - res := metrics.NewMetricList(m1) - res.Metrics = append(res.Metrics, m2) - - // ProjectUUID User topics aggregation - m3, err := metrics.AggrProjectUserTopics(projectUUID, refStr) - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - for _, item := range m3.Metrics { - res.Metrics = append(res.Metrics, item) - } - - // ProjectUUID User subscriptions aggregation - m4, err := metrics.AggrProjectUserSubs(projectUUID, refStr) - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - for _, item := range m4.Metrics { - res.Metrics = append(res.Metrics, item) - } - - m5 := metrics.NewDailyProjectMsgCount(urlProject, timePoints) - res.Metrics = append(res.Metrics, m5) - - // Output result to JSON - resJSON, err := res.ExportJSON() - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - // Write response - output = []byte(resJSON) - respondOK(w, output) - -} - -// TopicMetrics (GET) metrics for one topic -func TopicMetrics(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - refRoles := gorillaContext.Get(r, "auth_roles").([]string) - refUserUUID := gorillaContext.Get(r, "auth_user_uuid").(string) - refAuthResource := gorillaContext.Get(r, "auth_resource").(bool) - - urlTopic := urlVars["topic"] - - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - - // Check Authorization per topic - // - if enabled in config - // - if user has only publisher role - - if refAuthResource && auth.IsPublisher(refRoles) { - - if auth.PerResource(projectUUID, "topics", urlTopic, refUserUUID, refStr) == false { - err := APIErrorForbidden() - respondErr(w, err) - return - } - } - - // Number of bytes and number of messages - resultsMsg, err := topics.FindMetric(projectUUID, urlTopic, refStr) - - if err != nil { - if err.Error() == "not found" { - err := APIErrorNotFound("Topic") - respondErr(w, err) - return - } - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - numMsg := resultsMsg.MsgNum - numBytes := resultsMsg.TotalBytes - - numSubs := int64(0) - numSubs, err = metrics.GetProjectSubsByTopic(projectUUID, urlTopic, refStr) - if err != nil { - if err.Error() == "not found" { - err := APIErrorNotFound("Topic") - respondErr(w, err) - return - } - err := APIErrGenericBackend() - respondErr(w, err) - return - } - - var timePoints []metrics.Timepoint - if timePoints, err = metrics.GetDailyTopicMsgCount(projectUUID, urlTopic, refStr); err != nil { - err := APIErrGenericBackend() - respondErr(w, err) - return - } - - m1 := metrics.NewTopicSubs(urlTopic, numSubs, metrics.GetTimeNowZulu()) - res := metrics.NewMetricList(m1) - - m2 := metrics.NewTopicMsgs(urlTopic, numMsg, metrics.GetTimeNowZulu()) - m3 := metrics.NewTopicBytes(urlTopic, numBytes, metrics.GetTimeNowZulu()) - m4 := metrics.NewDailyTopicMsgCount(urlTopic, timePoints) - m5 := metrics.NewTopicRate(urlTopic, resultsMsg.PublishRate, resultsMsg.LatestPublish.Format("2006-01-02T15:04:05Z")) - - res.Metrics = append(res.Metrics, m2, m3, m4, m5) - - // Output result to JSON - resJSON, err := res.ExportJSON() - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - // Write response - output = []byte(resJSON) - respondOK(w, output) - -} - -// TopicListOne (GET) one topic -func TopicListOne(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - - results, err := topics.Find(projectUUID, "", urlVars["topic"], "", 0, refStr) - - if err != nil { - err := APIErrGenericBackend() - respondErr(w, err) - return - } - - // If not found - if results.Empty() { - err := APIErrorNotFound("Topic") - respondErr(w, err) - return - } - - res := results.Topics[0] - - // Output result to JSON - resJSON, err := res.ExportJSON() - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - // Write response - output = []byte(resJSON) - respondOK(w, output) - -} - -// ListSubsByTopic (GET) lists all subscriptions associated with the given topic -func ListSubsByTopic(w http.ResponseWriter, r *http.Request) { - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - - results, err := topics.Find(projectUUID, "", urlVars["topic"], "", 0, refStr) - - if err != nil { - err := APIErrGenericBackend() - respondErr(w, err) - return - } - - // If not found - if results.Empty() { - err := APIErrorNotFound("Topic") - respondErr(w, err) - return - } - - subs, err := subscriptions.FindByTopic(projectUUID, results.Topics[0].Name, refStr) - if err != nil { - err := APIErrGenericBackend() - respondErr(w, err) - return - - } - - resJSON, err := json.Marshal(subs) - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - respondOK(w, resJSON) -} - -// TopicACL (GET) one topic's authorized users -func TopicACL(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - urlTopic := urlVars["topic"] - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - - // Get project UUID First to use as reference - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - res, err := auth.GetACL(projectUUID, "topics", urlTopic, refStr) - - // If not found - if err != nil { - err := APIErrorNotFound("Topic") - respondErr(w, err) - return - } - - // Output result to JSON - resJSON, err := res.ExportJSON() - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - // Write response - output = []byte(resJSON) - respondOK(w, output) - -} - -// SubACL (GET) one topic's authorized users -func SubACL(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - urlSub := urlVars["subscription"] - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - - // Get project UUID First to use as reference - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - res, err := auth.GetACL(projectUUID, "subscriptions", urlSub, refStr) - - // If not found - if err != nil { - err := APIErrorNotFound("Subscription") - respondErr(w, err) - return - } - - // Output result to JSON - resJSON, err := res.ExportJSON() - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - // Write response - output = []byte(resJSON) - respondOK(w, output) - -} - -// SubMetrics (GET) metrics for one subscription -func SubMetrics(w http.ResponseWriter, r *http.Request) { - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab url path variables - urlVars := mux.Vars(r) - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - refRoles := gorillaContext.Get(r, "auth_roles").([]string) - refUserUUID := gorillaContext.Get(r, "auth_user_uuid").(string) - refAuthResource := gorillaContext.Get(r, "auth_resource").(bool) - - urlSub := urlVars["subscription"] - - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - - // Check Authorization per topic - // - if enabled in config - // - if user has only publisher role - - if refAuthResource && auth.IsConsumer(refRoles) { - - if auth.PerResource(projectUUID, "subscriptions", urlSub, refUserUUID, refStr) == false { - err := APIErrorForbidden() - respondErr(w, err) - return - } - } - - resultMsg, err := subscriptions.FindMetric(projectUUID, urlSub, refStr) - - if err != nil { - if err.Error() == "not found" { - err := APIErrorNotFound("Subscription") - respondErr(w, err) - return - } - err := APIErrGenericBackend() - respondErr(w, err) - } - - numMsg := resultMsg.MsgNum - numBytes := resultMsg.TotalBytes - - m1 := metrics.NewSubMsgs(urlSub, numMsg, metrics.GetTimeNowZulu()) - res := metrics.NewMetricList(m1) - m2 := metrics.NewSubBytes(urlSub, numBytes, metrics.GetTimeNowZulu()) - m3 := metrics.NewSubRate(urlSub, resultMsg.ConsumeRate, resultMsg.LatestConsume.Format("2006-01-02T15:04:05Z")) - - res.Metrics = append(res.Metrics, m2, m3) - - // Output result to JSON - resJSON, err := res.ExportJSON() - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - // Write response - output = []byte(resJSON) - respondOK(w, output) - -} - -//SubListAll (GET) all subscriptions -func SubListAll(w http.ResponseWriter, r *http.Request) { - - var err error - var strPageSize string - var pageSize int - var res subscriptions.PaginatedSubscriptions - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - roles := gorillaContext.Get(r, "auth_roles").([]string) - - urlValues := r.URL.Query() - pageToken := urlValues.Get("pageToken") - strPageSize = urlValues.Get("pageSize") - - // if this route is used by a user who only has a consumer role - // return all subscriptions that he has access to - userUUID := "" - if !auth.IsProjectAdmin(roles) && !auth.IsServiceAdmin(roles) && auth.IsConsumer(roles) { - userUUID = gorillaContext.Get(r, "auth_user_uuid").(string) - } - - if strPageSize != "" { - if pageSize, err = strconv.Atoi(strPageSize); err != nil { - log.Errorf("Pagesize %v produced an error while being converted to int: %v", strPageSize, err.Error()) - err := APIErrorInvalidData("Invalid page size") - respondErr(w, err) - return - } - } - - if res, err = subscriptions.Find(projectUUID, userUUID, "", pageToken, int32(pageSize), refStr); err != nil { - err := APIErrorInvalidData("Invalid page token") - respondErr(w, err) - return - } - - // Output result to JSON - resJSON, err := res.ExportJSON() - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - // Write Response - output = []byte(resJSON) - respondOK(w, output) - -} - -// TopicListAll (GET) all topics -func TopicListAll(w http.ResponseWriter, r *http.Request) { - - var err error - var strPageSize string - var pageSize int - var res topics.PaginatedTopics - - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - roles := gorillaContext.Get(r, "auth_roles").([]string) - - urlValues := r.URL.Query() - pageToken := urlValues.Get("pageToken") - strPageSize = urlValues.Get("pageSize") - - // if this route is used by a user who only has a publisher role - // return all topics that he has access to - userUUID := "" - if !auth.IsProjectAdmin(roles) && !auth.IsServiceAdmin(roles) && auth.IsPublisher(roles) { - userUUID = gorillaContext.Get(r, "auth_user_uuid").(string) - } - - if strPageSize != "" { - if pageSize, err = strconv.Atoi(strPageSize); err != nil { - log.Errorf("Pagesize %v produced an error while being converted to int: %v", strPageSize, err.Error()) - err := APIErrorInvalidData("Invalid page size") - respondErr(w, err) - return - } - } - - if res, err = topics.Find(projectUUID, userUUID, "", pageToken, int32(pageSize), refStr); err != nil { - err := APIErrorInvalidData("Invalid page token") - respondErr(w, err) - return - } - // Output result to JSON - resJSON, err := res.ExportJSON() - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - // Write Response - output = []byte(resJSON) - respondOK(w, output) - -} - -// TopicPublish (POST) publish a new topic -func TopicPublish(w http.ResponseWriter, r *http.Request) { - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Get url path variables - urlVars := mux.Vars(r) - urlTopic := urlVars["topic"] - - // Grab context references - - refBrk := gorillaContext.Get(r, "brk").(brokers.Broker) - refStr := gorillaContext.Get(r, "str").(stores.Store) - refUserUUID := gorillaContext.Get(r, "auth_user_uuid").(string) - refRoles := gorillaContext.Get(r, "auth_roles").([]string) - refAuthResource := gorillaContext.Get(r, "auth_resource").(bool) - // Get project UUID First to use as reference - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - - results, err := topics.Find(projectUUID, "", urlVars["topic"], "", 0, refStr) - - if err != nil { - err := APIErrGenericBackend() - respondErr(w, err) - return - } - - // If not found - if results.Empty() { - err := APIErrorNotFound("Topic") - respondErr(w, err) - return - } - - res := results.Topics[0] - - // Check Authorization per topic - // - if enabled in config - // - if user has only publisher role - - if refAuthResource && auth.IsPublisher(refRoles) { - - if auth.PerResource(projectUUID, "topics", urlTopic, refUserUUID, refStr) == false { - err := APIErrorForbidden() - respondErr(w, err) - return - } - } - - // Read POST JSON body - body, err := ioutil.ReadAll(r.Body) - if err != nil { - err := APIErrorInvalidRequestBody() - respondErr(w, err) - return - } - - // Create Message List from Post JSON - msgList, err := messages.LoadMsgListJSON(body) - if err != nil { - err := APIErrorInvalidArgument("Message") - respondErr(w, err) - log.Error(string(body[:])) - return - } - - // check if the topic has a schema associated with it - if res.Schema != "" { - - // retrieve the schema - _, schemaName, err := schemas.ExtractSchema(res.Schema) - if err != nil { - log.WithFields( - log.Fields{ - "type": "service_log", - "schema_name": res.Schema, - "topic_name": res.Name, - "error": err.Error(), - }, - ).Error("Could not extract schema name") - err := APIErrGenericInternal(schemas.GenericError) - respondErr(w, err) - return - } - - sl, err := schemas.Find(projectUUID, "", schemaName, refStr) - - if err != nil { - log.WithFields( - log.Fields{ - "type": "service_log", - "schema_name": schemaName, - "topic_name": res.Name, - "error": err.Error(), - }, - ).Error("Could not retrieve schema from the store") - err := APIErrGenericInternal(schemas.GenericError) - respondErr(w, err) - return - } - - if !sl.Empty() { - err := schemas.ValidateMessages(sl.Schemas[0], msgList) - if err != nil { - if err.Error() == "500" { - err := APIErrGenericInternal(schemas.GenericError) - respondErr(w, err) - return - } else { - err := APIErrorInvalidData(err.Error()) - respondErr(w, err) - return - } - } - } else { - log.WithFields( - log.Fields{ - "type": "service_log", - "schema_name": res.Schema, - "topic_name": res.Name, - }, - ).Error("List of schemas was empty") - err := APIErrGenericInternal(schemas.GenericError) - respondErr(w, err) - return - } - } - - // Init message ids list - msgIDs := messages.MsgIDs{IDs: []string{}} - - // For each message in message list - for _, msg := range msgList.Msgs { - // Get offset and set it as msg - fullTopic := projectUUID + "." + urlTopic - - msgID, rTop, _, _, err := refBrk.Publish(fullTopic, msg) - - if err != nil { - if err.Error() == "kafka server: Message was too large, server rejected it to avoid allocation error." { - err := APIErrTooLargeMessage("Message size too large") - respondErr(w, err) - return - } - - err := APIErrGenericBackend() - respondErr(w, err) - return - } - - msg.ID = msgID - // Assertions for Succesfull Publish - if rTop != fullTopic { - err := APIErrGenericInternal("Broker reports wrong topic") - respondErr(w, err) - return - } - - // Append the MsgID of the successful published message to the msgIds list - msgIDs.IDs = append(msgIDs.IDs, msg.ID) - } - - // timestamp of the publish event - publishTime := time.Now().UTC() - - // amount of messages published - msgCount := int64(len(msgList.Msgs)) - - // increment topic number of message metric - refStr.IncrementTopicMsgNum(projectUUID, urlTopic, msgCount) - - // increment daily count of topic messages - year, month, day := publishTime.Date() - refStr.IncrementDailyTopicMsgCount(projectUUID, urlTopic, msgCount, time.Date(year, month, day, 0, 0, 0, 0, time.UTC)) - - // increment topic total bytes published - refStr.IncrementTopicBytes(projectUUID, urlTopic, msgList.TotalSize()) - - // update latest publish date for the given topic - refStr.UpdateTopicLatestPublish(projectUUID, urlTopic, publishTime) - - // count the rate of published messages per sec between the last two publish events - var dt float64 = 1 - // if its the first publish to the topic - // skip the subtraction that computes the DT between the last two publish events - if !res.LatestPublish.IsZero() { - dt = publishTime.Sub(res.LatestPublish).Seconds() - } - refStr.UpdateTopicPublishRate(projectUUID, urlTopic, float64(msgCount)/dt) - - // Export the msgIDs - resJSON, err := msgIDs.ExportJSON() - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - // Write response - output = []byte(resJSON) - respondOK(w, output) -} - -// SubPull (POST) publish a new topic -func SubPull(w http.ResponseWriter, r *http.Request) { - // Init output - output := []byte("") - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Get url path variables - urlVars := mux.Vars(r) - urlProject := urlVars["project"] - urlSub := urlVars["subscription"] - - // Grab context references - refBrk := gorillaContext.Get(r, "brk").(brokers.Broker) - refStr := gorillaContext.Get(r, "str").(stores.Store) - refUserUUID := gorillaContext.Get(r, "auth_user_uuid").(string) - refRoles := gorillaContext.Get(r, "auth_roles").([]string) - refAuthResource := gorillaContext.Get(r, "auth_resource").(bool) - pushEnabled := gorillaContext.Get(r, "push_enabled").(bool) - - // Get project UUID First to use as reference - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - - // Get the subscription - results, err := subscriptions.Find(projectUUID, "", urlSub, "", 0, refStr) - if err != nil { - err := APIErrGenericBackend() - respondErr(w, err) - return - } - - if results.Empty() { - err := APIErrorNotFound("Subscription") - respondErr(w, err) - return - } - - targetSub := results.Subscriptions[0] - fullTopic := targetSub.ProjectUUID + "." + targetSub.Topic - retImm := true - max := 1 - - // if the subscription is push enabled but push enabled is false, don't allow push worker user to consume - if targetSub.PushCfg != (subscriptions.PushConfig{}) && !pushEnabled && auth.IsPushWorker(refRoles) { - err := APIErrorPushConflict() - respondErr(w, err) - return - } - - // if the subscription is push enabled, allow only push worker and service_admin users to pull from it - if targetSub.PushCfg != (subscriptions.PushConfig{}) && !auth.IsPushWorker(refRoles) && !auth.IsServiceAdmin(refRoles) { - err := APIErrorForbidden() - respondErr(w, err) - return - } - - // Check Authorization per subscription - // - if enabled in config - // - if user has only consumer role - if refAuthResource && auth.IsConsumer(refRoles) { - if auth.PerResource(projectUUID, "subscriptions", targetSub.Name, refUserUUID, refStr) == false { - err := APIErrorForbidden() - respondErr(w, err) - return - } - } - - // check if the subscription's topic exists - if !topics.HasTopic(projectUUID, targetSub.Topic, refStr) { - err := APIErrorPullNoTopic() - respondErr(w, err) - return - } - - // Read POST JSON body - body, err := ioutil.ReadAll(r.Body) - if err != nil { - err := APIErrorInvalidRequestBody() - respondErr(w, err) - return - } - - // Parse pull options - pullInfo, err := subscriptions.GetPullOptionsJSON(body) - if err != nil { - err := APIErrorInvalidArgument("Pull Parameters") - respondErr(w, err) - log.Error(string(body[:])) - return - } - - if pullInfo.MaxMsg != "" { - max, err = strconv.Atoi(pullInfo.MaxMsg) - if err != nil { - max = 1 - } - } - - if pullInfo.RetImm == "false" { - retImm = false - } - - // Init Received Message List - recList := messages.RecList{} - - msgs, err := refBrk.Consume(r.Context(), fullTopic, targetSub.Offset, retImm, int64(max)) - if err != nil { - // If tracked offset is off - if err == brokers.ErrOffsetOff { - log.Debug("Will increment now...") - // Increment tracked offset to current min offset - targetSub.Offset = refBrk.GetMinOffset(fullTopic) - refStr.UpdateSubOffset(projectUUID, targetSub.Name, targetSub.Offset) - // Try again to consume - msgs, err = refBrk.Consume(r.Context(), fullTopic, targetSub.Offset, retImm, int64(max)) - // If still error respond and return - if err != nil { - log.Errorf("Couldn't consume messages for subscription %v, %v", targetSub.FullName, err.Error()) - err := APIErrGenericBackend() - respondErr(w, err) - return - } - } else { - log.Errorf("Couldn't consume messages for subscription %v, %v", targetSub.FullName, err.Error()) - err := APIErrGenericBackend() - respondErr(w, err) - return - } - } - var limit int - limit, err = strconv.Atoi(pullInfo.MaxMsg) - if err != nil { - limit = 0 - } - - ackPrefix := "projects/" + urlProject + "/subscriptions/" + urlSub + ":" - - for i, msg := range msgs { - if limit > 0 && i >= limit { - break // max messages left - } - curMsg, err := messages.LoadMsgJSON([]byte(msg)) - if err != nil { - err := APIErrGenericInternal("Message retrieved from broker network has invalid JSON Structure") - respondErr(w, err) - return - } - // calc the message id = message's kafka offset (read offst + msg position) - idOff := targetSub.Offset + int64(i) - curMsg.ID = strconv.FormatInt(idOff, 10) - curRec := messages.RecMsg{AckID: ackPrefix + curMsg.ID, Msg: curMsg} - recList.RecMsgs = append(recList.RecMsgs, curRec) - } - - // amount of messages consumed - msgCount := int64(len(msgs)) - - log.Debug(msgCount) - - // consumption time - consumeTime := time.Now().UTC() - - // increment subscription number of message metric - refStr.IncrementSubMsgNum(projectUUID, urlSub, msgCount) - refStr.IncrementSubBytes(projectUUID, urlSub, recList.TotalSize()) - refStr.UpdateSubLatestConsume(projectUUID, targetSub.Name, consumeTime) - - // count the rate of consumed messages per sec between the last two consume events - var dt float64 = 1 - // if its the first consume to the subscription - // skip the subtraction that computes the DT between the last two consume events - if !targetSub.LatestConsume.IsZero() { - dt = consumeTime.Sub(targetSub.LatestConsume).Seconds() - } - - refStr.UpdateSubConsumeRate(projectUUID, targetSub.Name, float64(msgCount)/dt) - - resJSON, err := recList.ExportJSON() - - if err != nil { - err := APIErrExportJSON() - respondErr(w, err) - return - } - - // Stamp time to UTC Z to seconds - zSec := "2006-01-02T15:04:05Z" - t := time.Now().UTC() - ts := t.Format(zSec) - refStr.UpdateSubPull(targetSub.ProjectUUID, targetSub.Name, int64(len(recList.RecMsgs))+targetSub.Offset, ts) - - output = []byte(resJSON) - respondOK(w, output) -} - -// HealthCheck returns an ok message to make sure the service is up and running -func HealthCheck(w http.ResponseWriter, r *http.Request) { - - var err error - var bytes []byte - - apsc := gorillaContext.Get(r, "apsc").(push.Client) - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - healthMsg := HealthStatus{ - Status: "ok", - } - - pwToken := gorillaContext.Get(r, "push_worker_token").(string) - pushEnabled := gorillaContext.Get(r, "push_enabled").(bool) - refStr := gorillaContext.Get(r, "str").(stores.Store) - - if pushEnabled { - _, err := auth.GetPushWorker(pwToken, refStr) - if err != nil { - healthMsg.Status = "warning" - } - - healthMsg.PushServers = []PushServerInfo{ - { - Endpoint: apsc.Target(), - Status: apsc.HealthCheck(context.TODO()).Result(), - }, - } - - } else { - healthMsg.PushFunctionality = "disabled" - } - - if bytes, err = json.MarshalIndent(healthMsg, "", " "); err != nil { - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - respondOK(w, bytes) -} - -// SchemaCreate(POST) handles the creation of a new schema -func SchemaCreate(w http.ResponseWriter, r *http.Request) { - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Get url path variables - urlVars := mux.Vars(r) - schemaName := urlVars["schema"] - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - - // Get project UUID First to use as reference - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - - schemaUUID := uuid.NewV4().String() - - schema := schemas.Schema{} - - err := json.NewDecoder(r.Body).Decode(&schema) - if err != nil { - err := APIErrorInvalidArgument("Schema") - respondErr(w, err) - return - } - - schema, err = schemas.Create(projectUUID, schemaUUID, schemaName, schema.Type, schema.RawSchema, refStr) - if err != nil { - if err.Error() == "exists" { - err := APIErrorConflict("Schema") - respondErr(w, err) - return - - } - - if err.Error() == "unsupported" { - err := APIErrorInvalidData(schemas.UnsupportedSchemaError) - respondErr(w, err) - return - - } - - err := APIErrorInvalidData(err.Error()) - respondErr(w, err) - return - } - - output, _ := json.MarshalIndent(schema, "", " ") - respondOK(w, output) -} - -// SchemaListOne(GET) retrieves information about the requested schema -func SchemaListOne(w http.ResponseWriter, r *http.Request) { - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Get url path variables - urlVars := mux.Vars(r) - schemaName := urlVars["schema"] - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - - // Get project UUID First to use as reference - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - schemasList, err := schemas.Find(projectUUID, "", schemaName, refStr) - if err != nil { - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - if schemasList.Empty() { - err := APIErrorNotFound("Schema") - respondErr(w, err) - return - } - - output, _ := json.MarshalIndent(schemasList.Schemas[0], "", " ") - respondOK(w, output) -} - -// SchemaLisAll(GET) retrieves all the schemas under the given project -func SchemaListAll(w http.ResponseWriter, r *http.Request) { - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - - // Get project UUID First to use as reference - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - schemasList, err := schemas.Find(projectUUID, "", "", refStr) - if err != nil { - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - output, _ := json.MarshalIndent(schemasList, "", " ") - respondOK(w, output) -} - -// SchemaUpdate(PUT) updates the given schema -func SchemaUpdate(w http.ResponseWriter, r *http.Request) { - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Get url path variables - urlVars := mux.Vars(r) - schemaName := urlVars["schema"] - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - - // Get project UUID First to use as reference - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - schemasList, err := schemas.Find(projectUUID, "", schemaName, refStr) - if err != nil { - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - if schemasList.Empty() { - err := APIErrorNotFound("Schema") - respondErr(w, err) - return - } - - updatedSchema := schemas.Schema{} - - err = json.NewDecoder(r.Body).Decode(&updatedSchema) - if err != nil { - err := APIErrorInvalidArgument("Schema") - respondErr(w, err) - return - } - - if updatedSchema.FullName != "" { - _, schemaName, err := schemas.ExtractSchema(updatedSchema.FullName) - if err != nil { - err := APIErrorInvalidData(err.Error()) - respondErr(w, err) - return - } - updatedSchema.Name = schemaName - } - - schema, err := schemas.Update(schemasList.Schemas[0], updatedSchema.Name, updatedSchema.Type, updatedSchema.RawSchema, refStr) - if err != nil { - if err.Error() == "exists" { - err := APIErrorConflict("Schema") - respondErr(w, err) - return - - } - - if err.Error() == "unsupported" { - err := APIErrorInvalidData(schemas.UnsupportedSchemaError) - respondErr(w, err) - return - - } - - err := APIErrorInvalidData(err.Error()) - respondErr(w, err) - return - } - - output, _ := json.MarshalIndent(schema, "", " ") - respondOK(w, output) -} - -func SchemaDelete(w http.ResponseWriter, r *http.Request) { - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Get url path variables - urlVars := mux.Vars(r) - schemaName := urlVars["schema"] - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - - // Get project UUID First to use as reference - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - - schemasList, err := schemas.Find(projectUUID, "", schemaName, refStr) - if err != nil { - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - if schemasList.Empty() { - err := APIErrorNotFound("Schema") - respondErr(w, err) - return - } - - err = schemas.Delete(schemasList.Schemas[0].UUID, refStr) - if err != nil { - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - respondOK(w, nil) -} - -// SchemaValidateMessage(POST) validates the given message against the schema -func SchemaValidateMessage(w http.ResponseWriter, r *http.Request) { - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - // Get url path variables - urlVars := mux.Vars(r) - schemaName := urlVars["schema"] - - // Grab context references - refStr := gorillaContext.Get(r, "str").(stores.Store) - - // Get project UUID First to use as reference - projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) - schemasList, err := schemas.Find(projectUUID, "", schemaName, refStr) - if err != nil { - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - - if schemasList.Empty() { - err := APIErrorNotFound("Schema") - respondErr(w, err) - return - } - - buf := bytes.Buffer{} - _, err = buf.ReadFrom(r.Body) - if err != nil { - err := APIErrorInvalidData(err.Error()) - respondErr(w, err) - return - } - - msgList := messages.MsgList{} - - switch schemasList.Schemas[0].Type { - case schemas.JSON: - msg := messages.Message{ - Data: base64.StdEncoding.EncodeToString(buf.Bytes()), - } - - msgList.Msgs = append(msgList.Msgs, msg) - - case schemas.AVRO: - - body := map[string]string{} - err := json.Unmarshal(buf.Bytes(), &body) - if err != nil { - err := APIErrorInvalidRequestBody() - respondErr(w, err) - return - } - - // check to find the payload field - if val, ok := body["data"]; ok { - - msg := messages.Message{ - Data: val, - } - - msgList.Msgs = append(msgList.Msgs, msg) - - } else { - - err := APIErrorInvalidArgument("Schema Payload") - respondErr(w, err) - return - } - } - - err = schemas.ValidateMessages(schemasList.Schemas[0], msgList) - if err != nil { - if err.Error() == "500" { - err := APIErrGenericInternal(schemas.GenericError) - respondErr(w, err) - return - } else { - err := APIErrorInvalidData(err.Error()) - respondErr(w, err) - return - } - } - - res, _ := json.MarshalIndent(map[string]string{"message": "Message validated successfully"}, "", " ") - - respondOK(w, res) -} - -// ListVersion displays version information about the service -func ListVersion(w http.ResponseWriter, r *http.Request) { - - // Add content type header to the response - contentType := "application/json" - charset := "utf-8" - w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) - - v := version.Model{ - Release: version.Release, - Commit: version.Commit, - BuildTime: version.BuildTime, - GO: version.GO, - Compiler: version.Compiler, - OS: version.OS, - Arch: version.Arch, - } - - output, err := json.MarshalIndent(v, "", " ") - if err != nil { - err := APIErrGenericInternal(err.Error()) - respondErr(w, err) - return - } - respondOK(w, output) - -} - -// Respond utility functions -/////////////////////////////// - -// respondOK is used to finalize response writer with proper code and output -func respondOK(w http.ResponseWriter, output []byte) { - w.WriteHeader(http.StatusOK) - w.Write(output) -} - -// respondErr is used to finalize response writer with proper error codes and error output -func respondErr(w http.ResponseWriter, apiErr APIErrorRoot) { - log.Error(apiErr.Body.Code, "\t", apiErr.Body.Message) - // set the response code - w.WriteHeader(apiErr.Body.Code) - // Output API Erorr object to JSON - output, _ := json.MarshalIndent(apiErr, "", " ") - w.Write(output) -} - -type HealthStatus struct { - Status string `json:"status,omitempty"` - PushServers []PushServerInfo `json:"push_servers,omitempty"` - PushFunctionality string `json:"push_functionality,omitempty"` -} - -type PushServerInfo struct { - Endpoint string `json:"endpoint"` - Status string `json:"status"` -} - -// APIErrorRoot holds the root json object of an error response -type APIErrorRoot struct { - Body APIErrorBody `json:"error"` -} - -// APIErrorBody represents the inner json body of the error response -type APIErrorBody struct { - Code int `json:"code"` - Message string `json:"message"` - ErrList []APIError `json:"errors,omitempty"` - Status string `json:"status"` -} - -// APIError represents array items for error list array -type APIError struct { - Message string `json:"message"` - Domain string `json:"domain"` - Reason string `json:"reason"` -} - -// IsValidHTTPS checks if a url string is valid https url -func isValidHTTPS(urlStr string) bool { - u, err := url.ParseRequestURI(urlStr) - if err != nil { - return false - } - // If a valid url is in form without slashes after scheme consider it invalid. - // If a valid url doesn't have https as a scheme consider it invalid - if u.Host == "" || u.Scheme != "https" { - return false - } - - return true -} - -// api err to be used when dealing with an invalid request body -var APIErrorInvalidRequestBody = func() APIErrorRoot { - apiErrBody := APIErrorBody{Code: http.StatusBadRequest, Message: "Invalid Request Body", Status: "BAD_REQUEST"} - return APIErrorRoot{Body: apiErrBody} -} - -// api err to be used when a name provided through the url parameters is not valid -var APIErrorInvalidName = func(key string) APIErrorRoot { - apiErrBody := APIErrorBody{Code: http.StatusBadRequest, Message: fmt.Sprintf("Invalid %v name", key), Status: "INVALID_ARGUMENT"} - return APIErrorRoot{Body: apiErrBody} -} - -// api err to be used when data provided is invalid -var APIErrorInvalidData = func(msg string) APIErrorRoot { - apiErrBody := APIErrorBody{Code: http.StatusBadRequest, Message: msg, Status: "INVALID_ARGUMENT"} - return APIErrorRoot{Body: apiErrBody} -} - -// api err to be used when argument's provided are invalid according to the resource -var APIErrorInvalidArgument = func(resource string) APIErrorRoot { - apiErrBody := APIErrorBody{Code: http.StatusBadRequest, Message: fmt.Sprintf("Invalid %v Arguments", resource), Status: "INVALID_ARGUMENT"} - return APIErrorRoot{Body: apiErrBody} -} - -// api err to be used when a user is unauthorized -var APIErrorUnauthorized = func() APIErrorRoot { - apiErrBody := APIErrorBody{Code: http.StatusUnauthorized, Message: "Unauthorized", Status: "UNAUTHORIZED"} - return APIErrorRoot{Body: apiErrBody} -} - -// api err to be used when access to a resource is forbidden for the request user -var APIErrorForbidden = func() APIErrorRoot { - apiErrBody := APIErrorBody{Code: http.StatusForbidden, Message: "Access to this resource is forbidden", Status: "FORBIDDEN"} - return APIErrorRoot{Body: apiErrBody} -} - -// api err to be used when access to a resource is forbidden for the request user -var APIErrorForbiddenWithMsg = func(msg string) APIErrorRoot { - apiErrBody := APIErrorBody{Code: http.StatusForbidden, Message: fmt.Sprintf("Access to this resource is forbidden. %v", msg), Status: "FORBIDDEN"} - return APIErrorRoot{Body: apiErrBody} -} - -// api err for dealing with absent resources -var APIErrorNotFound = func(resource string) APIErrorRoot { - apiErrBody := APIErrorBody{Code: http.StatusNotFound, Message: fmt.Sprintf("%v doesn't exist", resource), Status: "NOT_FOUND"} - return APIErrorRoot{Body: apiErrBody} -} - -// api err for dealing with timeouts -var APIErrorTimeout = func(msg string) APIErrorRoot { - apiErrBody := APIErrorBody{Code: http.StatusRequestTimeout, Message: msg, Status: "TIMEOUT"} - return APIErrorRoot{Body: apiErrBody} -} - -// api err for dealing with already existing resources -var APIErrorConflict = func(resource string) APIErrorRoot { - apiErrBody := APIErrorBody{Code: http.StatusConflict, Message: fmt.Sprintf("%v already exists", resource), Status: "ALREADY_EXISTS"} - return APIErrorRoot{Body: apiErrBody} -} - -// api error to be used when push enabled false -var APIErrorPushConflict = func() APIErrorRoot { - - apiErrBody := APIErrorBody{ - Code: http.StatusConflict, - Message: "Push functionality is currently disabled", - Status: "CONFLICT", - } - - return APIErrorRoot{ - Body: apiErrBody, - } -} - -// api error to be used to format generic conflict errors -var APIErrorGenericConflict = func(msg string) APIErrorRoot { - - apiErrBody := APIErrorBody{ - Code: http.StatusConflict, - Message: msg, - Status: "CONFLICT", - } - - return APIErrorRoot{ - Body: apiErrBody, - } -} - -// api error to be used when push enabled false -var APIErrorPullNoTopic = func() APIErrorRoot { - - apiErrBody := APIErrorBody{ - Code: http.StatusConflict, - Message: "Subscription's topic doesn't exist", - Status: "CONFLICT", - } - - return APIErrorRoot{ - Body: apiErrBody, - } -} - -// api err for dealing with too large messages -var APIErrTooLargeMessage = func(resource string) APIErrorRoot { - apiErrBody := APIErrorBody{Code: http.StatusRequestEntityTooLarge, Message: "Message size is too large", Status: "INVALID_ARGUMENT"} - return APIErrorRoot{Body: apiErrBody} -} - -// api err for dealing with generic internal errors -var APIErrGenericInternal = func(msg string) APIErrorRoot { - apiErrBody := APIErrorBody{Code: http.StatusInternalServerError, Message: msg, Status: "INTERNAL_SERVER_ERROR"} - return APIErrorRoot{Body: apiErrBody} -} - -// api err for dealing with generic internal errors -var APIErrPushVerification = func(msg string) APIErrorRoot { - apiErrBody := APIErrorBody{ - Code: http.StatusUnauthorized, - Message: fmt.Sprintf("Endpoint verification failed.%v", msg), - Status: "UNAUTHORIZED", - } - return APIErrorRoot{Body: apiErrBody} -} - -// api err for dealing with internal errors when marshaling json to struct -var APIErrExportJSON = func() APIErrorRoot { - apiErrBody := APIErrorBody{Code: http.StatusInternalServerError, Message: "Error exporting data to JSON", Status: "INTERNAL_SERVER_ERROR"} - return APIErrorRoot{Body: apiErrBody} -} - -// api err for dealing with internal errors when querying the datastore -var APIErrQueryDatastore = func() APIErrorRoot { - apiErrBody := APIErrorBody{Code: http.StatusInternalServerError, Message: "Internal error while querying datastore", Status: "INTERNAL_SERVER_ERROR"} - return APIErrorRoot{Body: apiErrBody} -} - -// api err for dealing with internal errors related to acknowledgement -var APIErrHandlingAcknowledgement = func() APIErrorRoot { - apiErrBody := APIErrorBody{Code: http.StatusInternalServerError, Message: "Error handling acknowledgement", Status: "INTERNAL_SERVER_ERROR"} - return APIErrorRoot{Body: apiErrBody} -} - -// api err for dealing with generic backend errors -var APIErrGenericBackend = func() APIErrorRoot { - apiErrBody := APIErrorBody{Code: http.StatusInternalServerError, Message: "Backend Error", Status: "INTERNAL_SERVER_ERROR"} - return APIErrorRoot{Body: apiErrBody} -} - -// api error to be used when push enabled true but push worker was not able to be retrieved -var APIErrInternalPush = func() APIErrorRoot { - - apiErrBody := APIErrorBody{ - Code: http.StatusInternalServerError, - Message: "Push functionality is currently unavailable", - Status: "INTERNAL_SERVER_ERROR", - } - - return APIErrorRoot{ - Body: apiErrBody, - } -} diff --git a/handlers/errors.go b/handlers/errors.go new file mode 100644 index 00000000..95723dd7 --- /dev/null +++ b/handlers/errors.go @@ -0,0 +1,312 @@ +package handlers + +import ( + "fmt" + "net/http" +) + +// APIErrorRoot holds the root json object of an error response +type APIErrorRoot struct { + Body APIErrorBody `json:"error"` +} + +// APIErrorBody represents the inner json body of the error response +type APIErrorBody struct { + Code int `json:"code"` + Message string `json:"message"` + ErrList []APIError `json:"errors,omitempty"` + Status string `json:"status"` +} + +// APIError represents array items for error list array +type APIError struct { + Message string `json:"message"` + Domain string `json:"domain"` + Reason string `json:"reason"` +} + +// api err to be used when dealing with an invalid request body +var APIErrorInvalidRequestBody = func() APIErrorRoot { + + apiErrBody := APIErrorBody{ + Code: http.StatusBadRequest, + Message: "Invalid Request Body", + Status: "BAD_REQUEST", + } + + return APIErrorRoot{ + Body: apiErrBody, + } +} + +// api err to be used when a name provided through the url parameters is not valid +var APIErrorInvalidName = func(key string) APIErrorRoot { + + apiErrBody := APIErrorBody{ + Code: http.StatusBadRequest, + Message: fmt.Sprintf("Invalid %v name", key), + Status: "INVALID_ARGUMENT", + } + + return APIErrorRoot{ + Body: apiErrBody, + } +} + +// api err to be used when data provided is invalid +var APIErrorInvalidData = func(msg string) APIErrorRoot { + + apiErrBody := APIErrorBody{ + Code: http.StatusBadRequest, + Message: msg, + Status: "INVALID_ARGUMENT", + } + + return APIErrorRoot{ + Body: apiErrBody, + } +} + +// api err to be used when argument's provided are invalid according to the resource +var APIErrorInvalidArgument = func(resource string) APIErrorRoot { + + apiErrBody := APIErrorBody{ + Code: http.StatusBadRequest, + Message: fmt.Sprintf("Invalid %v Arguments", resource), + Status: "INVALID_ARGUMENT", + } + + return APIErrorRoot{ + Body: apiErrBody, + } +} + +// api err to be used when a user is unauthorized +var APIErrorUnauthorized = func() APIErrorRoot { + + apiErrBody := APIErrorBody{ + Code: http.StatusUnauthorized, + Message: "Unauthorized", + Status: "UNAUTHORIZED", + } + + return APIErrorRoot{ + Body: apiErrBody, + } +} + +// api err to be used when access to a resource is forbidden for the request user +var APIErrorForbidden = func() APIErrorRoot { + + apiErrBody := APIErrorBody{ + Code: http.StatusForbidden, + Message: "Access to this resource is forbidden", + Status: "FORBIDDEN", + } + + return APIErrorRoot{ + Body: apiErrBody, + } +} + +// api err to be used when access to a resource is forbidden for the request user +var APIErrorForbiddenWithMsg = func(msg string) APIErrorRoot { + apiErrBody := APIErrorBody{Code: http.StatusForbidden, Message: fmt.Sprintf("Access to this resource is forbidden. %v", msg), Status: "FORBIDDEN"} + return APIErrorRoot{Body: apiErrBody} +} + +// api err for dealing with absent resources +var APIErrorNotFound = func(resource string) APIErrorRoot { + + apiErrBody := APIErrorBody{ + Code: http.StatusNotFound, + Message: fmt.Sprintf("%v doesn't exist", resource), + Status: "NOT_FOUND", + } + + return APIErrorRoot{ + Body: apiErrBody, + } +} + +// api err for dealing with timeouts +var APIErrorTimeout = func(msg string) APIErrorRoot { + + apiErrBody := APIErrorBody{ + Code: http.StatusRequestTimeout, + Message: msg, + Status: "TIMEOUT", + } + + return APIErrorRoot{ + Body: apiErrBody, + } +} + +// api err for dealing with already existing resources +var APIErrorConflict = func(resource string) APIErrorRoot { + + apiErrBody := APIErrorBody{ + Code: http.StatusConflict, + Message: fmt.Sprintf("%v already exists", resource), + Status: "ALREADY_EXISTS", + } + + return APIErrorRoot{ + Body: apiErrBody, + } +} + +// api error to be used when push enabled false +var APIErrorPushConflict = func() APIErrorRoot { + + apiErrBody := APIErrorBody{ + Code: http.StatusConflict, + Message: "Push functionality is currently disabled", + Status: "CONFLICT", + } + + return APIErrorRoot{ + Body: apiErrBody, + } +} + +// api error to be used to format generic conflict errors +var APIErrorGenericConflict = func(msg string) APIErrorRoot { + + apiErrBody := APIErrorBody{ + Code: http.StatusConflict, + Message: msg, + Status: "CONFLICT", + } + + return APIErrorRoot{ + Body: apiErrBody, + } +} + +// api error to be used when push enabled false +var APIErrorPullNoTopic = func() APIErrorRoot { + + apiErrBody := APIErrorBody{ + Code: http.StatusConflict, + Message: "Subscription's topic doesn't exist", + Status: "CONFLICT", + } + + return APIErrorRoot{ + Body: apiErrBody, + } +} + +// api err for dealing with too large messages +var APIErrTooLargeMessage = func(resource string) APIErrorRoot { + + apiErrBody := APIErrorBody{ + Code: http.StatusRequestEntityTooLarge, + Message: "Message size is too large", + Status: "INVALID_ARGUMENT", + } + + return APIErrorRoot{ + Body: apiErrBody, + } +} + +// api err for dealing with generic internal errors +var APIErrGenericInternal = func(msg string) APIErrorRoot { + + apiErrBody := APIErrorBody{ + Code: http.StatusInternalServerError, + Message: msg, + Status: "INTERNAL_SERVER_ERROR", + } + + return APIErrorRoot{ + Body: apiErrBody, + } +} + +// api err for dealing with generic internal errors +var APIErrPushVerification = func(msg string) APIErrorRoot { + + apiErrBody := APIErrorBody{ + Code: http.StatusUnauthorized, + Message: fmt.Sprintf("Endpoint verification failed.%v", msg), + Status: "UNAUTHORIZED", + } + + return APIErrorRoot{ + Body: apiErrBody, + } +} + +// api err for dealing with internal errors when marshaling json to struct +var APIErrExportJSON = func() APIErrorRoot { + + apiErrBody := APIErrorBody{ + Code: http.StatusInternalServerError, + Message: "Error exporting data to JSON", + Status: "INTERNAL_SERVER_ERROR", + } + + return APIErrorRoot{ + Body: apiErrBody, + } +} + +// api err for dealing with internal errors when querying the datastore +var APIErrQueryDatastore = func() APIErrorRoot { + + apiErrBody := APIErrorBody{ + Code: http.StatusInternalServerError, + Message: "Internal error while querying datastore", + Status: "INTERNAL_SERVER_ERROR", + } + + return APIErrorRoot{ + Body: apiErrBody, + } +} + +// api err for dealing with internal errors related to acknowledgement +var APIErrHandlingAcknowledgement = func() APIErrorRoot { + + apiErrBody := APIErrorBody{ + Code: http.StatusInternalServerError, + Message: "Error handling acknowledgement", + Status: "INTERNAL_SERVER_ERROR", + } + + return APIErrorRoot{ + Body: apiErrBody, + } +} + +// api err for dealing with generic backend errors +var APIErrGenericBackend = func() APIErrorRoot { + + apiErrBody := APIErrorBody{ + Code: http.StatusInternalServerError, + Message: "Backend Error", + Status: "INTERNAL_SERVER_ERROR", + } + + return APIErrorRoot{ + Body: apiErrBody, + } +} + +// api error to be used when push enabled true but push worker was not able to be retrieved +var APIErrInternalPush = func() APIErrorRoot { + + apiErrBody := APIErrorBody{ + Code: http.StatusInternalServerError, + Message: "Push functionality is currently unavailable", + Status: "INTERNAL_SERVER_ERROR", + } + + return APIErrorRoot{ + Body: apiErrBody, + } +} diff --git a/handlers/handlers.go b/handlers/handlers.go new file mode 100644 index 00000000..aaac1407 --- /dev/null +++ b/handlers/handlers.go @@ -0,0 +1,367 @@ +package handlers + +import ( + "context" + "encoding/json" + "fmt" + "github.com/ARGOeu/argo-messaging/auth" + "github.com/ARGOeu/argo-messaging/brokers" + "github.com/ARGOeu/argo-messaging/config" + "github.com/ARGOeu/argo-messaging/projects" + oldPush "github.com/ARGOeu/argo-messaging/push" + push "github.com/ARGOeu/argo-messaging/push/grpc/client" + "github.com/ARGOeu/argo-messaging/stores" + "github.com/ARGOeu/argo-messaging/validation" + "github.com/ARGOeu/argo-messaging/version" + gorillaContext "github.com/gorilla/context" + "github.com/gorilla/mux" + log "github.com/sirupsen/logrus" + "net/http" + "sort" + "time" +) + +// WrapValidate handles validation +func WrapValidate(hfn http.HandlerFunc) http.HandlerFunc { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + + urlVars := mux.Vars(r) + + // sort keys + keys := []string(nil) + for key := range urlVars { + keys = append(keys, key) + } + sort.Strings(keys) + + // Iterate alphabetically + for _, key := range keys { + if validation.ValidName(urlVars[key]) == false { + err := APIErrorInvalidName(key) + respondErr(w, err) + return + } + } + hfn.ServeHTTP(w, r) + + }) +} + +// WrapMockAuthConfig handle wrapper is used in tests were some auth context is needed +func WrapMockAuthConfig(hfn http.HandlerFunc, cfg *config.APICfg, brk brokers.Broker, str stores.Store, mgr *oldPush.Manager, c push.Client, roles ...string) http.HandlerFunc { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + + urlVars := mux.Vars(r) + + userRoles := []string{"publisher", "consumer"} + if len(roles) > 0 { + userRoles = roles + } + + nStr := str.Clone() + defer nStr.Close() + + projectUUID := projects.GetUUIDByName(urlVars["project"], nStr) + gorillaContext.Set(r, "auth_project_uuid", projectUUID) + gorillaContext.Set(r, "brk", brk) + gorillaContext.Set(r, "str", nStr) + gorillaContext.Set(r, "mgr", mgr) + gorillaContext.Set(r, "apsc", c) + gorillaContext.Set(r, "auth_resource", cfg.ResAuth) + gorillaContext.Set(r, "auth_user", "UserA") + gorillaContext.Set(r, "auth_user_uuid", "uuid1") + gorillaContext.Set(r, "auth_roles", userRoles) + gorillaContext.Set(r, "push_worker_token", cfg.PushWorkerToken) + gorillaContext.Set(r, "push_enabled", cfg.PushEnabled) + hfn.ServeHTTP(w, r) + + }) +} + +// WrapConfig handle wrapper to retrieve kafka configuration +func WrapConfig(hfn http.HandlerFunc, cfg *config.APICfg, brk brokers.Broker, str stores.Store, mgr *oldPush.Manager, c push.Client) http.HandlerFunc { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + + nStr := str.Clone() + defer nStr.Close() + gorillaContext.Set(r, "brk", brk) + gorillaContext.Set(r, "str", nStr) + gorillaContext.Set(r, "mgr", mgr) + gorillaContext.Set(r, "apsc", c) + gorillaContext.Set(r, "auth_resource", cfg.ResAuth) + gorillaContext.Set(r, "auth_service_token", cfg.ServiceToken) + gorillaContext.Set(r, "push_worker_token", cfg.PushWorkerToken) + gorillaContext.Set(r, "push_enabled", cfg.PushEnabled) + hfn.ServeHTTP(w, r) + + }) +} + +// WrapLog handle wrapper to apply Logging +func WrapLog(hfn http.Handler, name string) http.HandlerFunc { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + + start := time.Now() + + hfn.ServeHTTP(w, r) + + log.WithFields( + log.Fields{ + "type": "request_log", + "method": r.Method, + "path": r.URL.Path, + "action": name, + "requester": gorillaContext.Get(r, "auth_user_uuid"), + "processing_time": time.Since(start).String(), + }, + ).Info("") + }) +} + +// WrapAuthenticate handle wrapper to apply authentication +func WrapAuthenticate(hfn http.Handler, extractToken RequestTokenExtractStrategy) http.HandlerFunc { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + + urlVars := mux.Vars(r) + + apiKey := extractToken(r) + + // if the url parameter 'key' is empty or absent, end the request with an unauthorized response + if apiKey == "" { + err := APIErrorUnauthorized() + respondErr(w, err) + return + } + + refStr := gorillaContext.Get(r, "str").(stores.Store) + serviceToken := gorillaContext.Get(r, "auth_service_token").(string) + + projectName := urlVars["project"] + projectUUID := projects.GetUUIDByName(urlVars["project"], refStr) + + // In all cases instead of project create + if "projects:create" != mux.CurrentRoute(r).GetName() { + // Check if given a project name the project wasn't found + if projectName != "" && projectUUID == "" { + apiErr := APIErrorNotFound("project") + respondErr(w, apiErr) + return + } + } + + // Check first if service token is used + if serviceToken != "" && serviceToken == apiKey { + gorillaContext.Set(r, "auth_roles", []string{"service_admin"}) + gorillaContext.Set(r, "auth_user", "") + gorillaContext.Set(r, "auth_user_uuid", "") + gorillaContext.Set(r, "auth_project_uuid", projectUUID) + hfn.ServeHTTP(w, r) + return + } + + roles, user := auth.Authenticate(projectUUID, apiKey, refStr) + + if len(roles) > 0 { + userUUID := auth.GetUUIDByName(user, refStr) + gorillaContext.Set(r, "auth_roles", roles) + gorillaContext.Set(r, "auth_user", user) + gorillaContext.Set(r, "auth_user_uuid", userUUID) + gorillaContext.Set(r, "auth_project_uuid", projectUUID) + hfn.ServeHTTP(w, r) + } else { + err := APIErrorUnauthorized() + respondErr(w, err) + } + + }) +} + +// WrapAuthorize handle wrapper to apply authorization +func WrapAuthorize(hfn http.Handler, routeName string, extractToken RequestTokenExtractStrategy) http.HandlerFunc { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + + refStr := gorillaContext.Get(r, "str").(stores.Store) + refRoles := gorillaContext.Get(r, "auth_roles").([]string) + serviceToken := gorillaContext.Get(r, "auth_service_token").(string) + apiKey := extractToken(r) + + // Check first if service token is used + if serviceToken != "" && serviceToken == apiKey { + hfn.ServeHTTP(w, r) + return + } + + if auth.Authorize(routeName, refRoles, refStr) { + hfn.ServeHTTP(w, r) + } else { + err := APIErrorForbidden() + respondErr(w, err) + } + }) +} + +// HealthCheck returns an ok message to make sure the service is up and running +func HealthCheck(w http.ResponseWriter, r *http.Request) { + + var err error + var bytes []byte + + apsc := gorillaContext.Get(r, "apsc").(push.Client) + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + healthMsg := HealthStatus{ + Status: "ok", + } + + detailedStatus := false + + pwToken := gorillaContext.Get(r, "push_worker_token").(string) + pushEnabled := gorillaContext.Get(r, "push_enabled").(bool) + refStr := gorillaContext.Get(r, "str").(stores.Store) + + // check for the right roles when accessing the details part of the api call + if r.URL.Query().Get("details") == "true" { + + user, _ := auth.GetUserByToken(r.URL.Query().Get("key"), refStr) + + // if the user has a name, the token is valid + if user.Name == "" { + respondErr(w, APIErrorForbidden()) + return + } + + if !auth.IsAdminViewer(user.ServiceRoles) && !auth.IsServiceAdmin(user.ServiceRoles) { + respondErr(w, APIErrorUnauthorized()) + return + } + + // set uuid for logging + gorillaContext.Set(r, "auth_user_uuid", user.UUID) + + detailedStatus = true + } + + if pushEnabled { + _, err := auth.GetPushWorker(pwToken, refStr) + if err != nil { + healthMsg.Status = "warning" + } + + healthMsg.PushServers = []PushServerInfo{ + { + Endpoint: apsc.Target(), + Status: apsc.HealthCheck(context.TODO()).Result(detailedStatus), + }, + } + + } else { + healthMsg.PushFunctionality = "disabled" + } + + if bytes, err = json.MarshalIndent(healthMsg, "", " "); err != nil { + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + respondOK(w, bytes) +} + +// ListVersion displays version information about the service +func ListVersion(w http.ResponseWriter, r *http.Request) { + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + v := version.Model{ + BuildTime: version.BuildTime, + GO: version.GO, + Compiler: version.Compiler, + OS: version.OS, + Arch: version.Arch, + } + + output, err := json.MarshalIndent(v, "", " ") + if err != nil { + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + respondOK(w, output) +} + +// respondOK is used to finalize response writer with proper code and output +func respondOK(w http.ResponseWriter, output []byte) { + w.WriteHeader(http.StatusOK) + w.Write(output) +} + +// respondErr is used to finalize response writer with proper error codes and error output +func respondErr(w http.ResponseWriter, apiErr APIErrorRoot) { + log.Error(apiErr.Body.Code, "\t", apiErr.Body.Message) + // set the response code + w.WriteHeader(apiErr.Body.Code) + // Output API Erorr object to JSON + output, _ := json.MarshalIndent(apiErr, "", " ") + w.Write(output) +} + +// A function type that refers to all the functions that can extract an api access token from the request +type RequestTokenExtractStrategy func(r *http.Request) string + +// UrlKeyExtract extracts the api access token from the url parameter key +func UrlKeyExtract(r *http.Request) string { + return r.URL.Query().Get("key") +} + +// HeaderKeyExtract extracts the api access token from the url header x-api-key +func HeaderKeyExtract(r *http.Request) string { + return r.Header.Get("x-api-key") +} + +// HeaderUrlKeyExtract tries to extract the api access token first from the x-api-header +// and then it falls back to the url parameter +func HeaderUrlKeyExtract(r *http.Request) string { + + // first try the header x-api-key + key := r.Header.Get("x-api-key") + + // if the header is empty, fall back to the url parameter key + if key == "" { + key = r.URL.Query().Get("key") + } + + return key +} + +// GetRequestTokenExtractStrategy determines which api token extraction strategy +// should take place based on the provided argument +func GetRequestTokenExtractStrategy(authOpt config.AuthOption) RequestTokenExtractStrategy { + switch authOpt { + case config.HeaderKey: + return HeaderKeyExtract + case config.UrlKey: + return UrlKeyExtract + case config.URLKeyAndHeaderKey: + return HeaderUrlKeyExtract + } + return HeaderUrlKeyExtract +} + +type HealthStatus struct { + Status string `json:"status,omitempty"` + PushServers []PushServerInfo `json:"push_servers,omitempty"` + PushFunctionality string `json:"push_functionality,omitempty"` +} + +type PushServerInfo struct { + Endpoint string `json:"endpoint"` + Status string `json:"status"` +} diff --git a/handlers/handlers_test.go b/handlers/handlers_test.go new file mode 100644 index 00000000..8d55faf5 --- /dev/null +++ b/handlers/handlers_test.go @@ -0,0 +1,253 @@ +package handlers + +import ( + "fmt" + "github.com/ARGOeu/argo-messaging/version" + log "github.com/sirupsen/logrus" + "io/ioutil" + "net/http" + "net/http/httptest" + "net/url" + "testing" + + "github.com/ARGOeu/argo-messaging/brokers" + "github.com/ARGOeu/argo-messaging/config" + oldPush "github.com/ARGOeu/argo-messaging/push" + push "github.com/ARGOeu/argo-messaging/push/grpc/client" + "github.com/ARGOeu/argo-messaging/stores" + "github.com/gorilla/mux" + "github.com/stretchr/testify/suite" +) + +type HandlerTestSuite struct { + suite.Suite + cfgStr string +} + +func (suite *HandlerTestSuite) SetupTest() { + suite.cfgStr = `{ + "bind_ip":"", + "port":8080, + "zookeeper_hosts":["localhost"], + "kafka_znode":"", + "store_host":"localhost", + "store_db":"argo_msg", + "certificate":"/etc/pki/tls/certs/localhost.crt", + "certificate_key":"/etc/pki/tls/private/localhost.key", + "per_resource_auth":"true", + "push_enabled": "true", + "push_worker_token": "push_token" + }` +} + +func (suite *HandlerTestSuite) TestHealthCheck() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/status", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "status": "ok", + "push_servers": [ + { + "endpoint": "localhost:5555", + "status": "SERVING" + } + ] +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + cfgKafka.PushEnabled = true + cfgKafka.PushWorkerToken = "push_token" + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + w := httptest.NewRecorder() + router.HandleFunc("/v1/status", WrapMockAuthConfig(HealthCheck, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + + suite.Equal(expResp, w.Body.String()) +} + +func (suite *HandlerTestSuite) TestHealthCheckDetails() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/status?details=true&key=admin-viewer-token", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "status": "ok", + "push_servers": [ + { + "endpoint": "localhost:5555", + "status": "SERVING" + } + ] +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + cfgKafka.PushEnabled = true + cfgKafka.PushWorkerToken = "push_token" + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + str.UserList = append(str.UserList, stores.QUser{ + UUID: "admin-viewer-id", + Name: "admin-viewer", + Token: "admin-viewer-token", + ServiceRoles: []string{"admin_viewer"}, + }) + + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + w := httptest.NewRecorder() + router.HandleFunc("/v1/status", WrapMockAuthConfig(HealthCheck, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + + suite.Equal(expResp, w.Body.String()) +} + +func (suite *HandlerTestSuite) TestHealthCheckPushDisabled() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/status", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "status": "ok", + "push_functionality": "disabled" +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + cfgKafka.PushEnabled = false + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + w := httptest.NewRecorder() + router.HandleFunc("/v1/status", WrapMockAuthConfig(HealthCheck, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) +} + +func (suite *HandlerTestSuite) TestHealthCheckPushWorkerMissing() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/status", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "status": "warning", + "push_servers": [ + { + "endpoint": "localhost:5555", + "status": "SERVING" + } + ] +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + cfgKafka.PushEnabled = true + // add a wrong push worker token + cfgKafka.PushWorkerToken = "missing" + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + w := httptest.NewRecorder() + router.HandleFunc("/v1/status", WrapMockAuthConfig(HealthCheck, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) +} + +func (suite *HandlerTestSuite) TestGetRequestTokenExtractStrategy() { + + // test the key extract strategy + keyStrategy := GetRequestTokenExtractStrategy(config.UrlKey) + u1, _ := url.Parse("https://host.com/v1/projects?key=tok3n") + r1 := &http.Request{ + URL: u1, + } + suite.Equal("tok3n", keyStrategy(r1)) + + // test the header extract strategy + h1 := http.Header{} + h1.Add("x-api-key", "tok3n") + u2, _ := url.Parse("https://host.com/v1/projects") + r2 := &http.Request{ + URL: u2, + Header: h1, + } + headerStrategy := GetRequestTokenExtractStrategy(config.HeaderKey) + suite.Equal("tok3n", headerStrategy(r2)) + + // test the key and header strategy when there is a x-api-key header present + h2 := http.Header{} + h2.Add("x-api-key", "tok3n-h") + u3, _ := url.Parse("https://host.com/v1/projects?key=tok3n-url") + r3 := &http.Request{ + URL: u3, + Header: h2, + } + bothStrategy := GetRequestTokenExtractStrategy(config.URLKeyAndHeaderKey) + suite.Equal("tok3n-h", bothStrategy(r3)) + + // test the key and header strategy when there is no a x-api-key header present but there is a key url value + r3.Header = http.Header{} + bothStrategy2 := GetRequestTokenExtractStrategy(0) + suite.Equal("tok3n-url", bothStrategy2(r3)) +} + +func (suite *HandlerTestSuite) TestListVersion() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/version", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "build_time": "%v", + "golang": "%v", + "compiler": "%v", + "os": "%v", + "architecture": "%v" +}` + expResp = fmt.Sprintf(expResp, version.BuildTime, version.GO, version.Compiler, version.OS, version.Arch) + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + cfgKafka.PushEnabled = true + // add a wrong push worker token + cfgKafka.PushWorkerToken = "missing" + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + w := httptest.NewRecorder() + router.HandleFunc("/v1/version", WrapMockAuthConfig(ListVersion, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) +} + +func TestHandlersTestSuite(t *testing.T) { + log.SetOutput(ioutil.Discard) + suite.Run(t, new(HandlerTestSuite)) +} diff --git a/handlers/metrics.go b/handlers/metrics.go new file mode 100644 index 00000000..b853313c --- /dev/null +++ b/handlers/metrics.go @@ -0,0 +1,391 @@ +package handlers + +import ( + "encoding/json" + "fmt" + "github.com/ARGOeu/argo-messaging/auth" + "github.com/ARGOeu/argo-messaging/metrics" + "github.com/ARGOeu/argo-messaging/stores" + "github.com/ARGOeu/argo-messaging/subscriptions" + "github.com/ARGOeu/argo-messaging/topics" + gorillaContext "github.com/gorilla/context" + "github.com/gorilla/mux" + "net/http" + "strings" + "time" +) + +// OpMetrics (GET) all operational metrics +func OpMetrics(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + + // Get Results Object + res, err := metrics.GetUsageCpuMem(refStr) + + if err != nil && err.Error() != "not found" { + err := APIErrQueryDatastore() + respondErr(w, err) + return + } + + // Output result to JSON + resJSON, err := res.ExportJSON() + + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + // Write response + output = []byte(resJSON) + respondOK(w, output) +} + +// VaMetrics (GET) retrieves metrics regrading projects, users, subscriptions, topics +func VaMetrics(w http.ResponseWriter, r *http.Request) { + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + + startDate := time.Time{} + endDate := time.Time{} + var err error + + // if no start date was provided, set it to the start of the unix time + if r.URL.Query().Get("start_date") != "" { + startDate, err = time.Parse("2006-01-02", r.URL.Query().Get("start_date")) + if err != nil { + err := APIErrorInvalidData("Start date is not in valid format") + respondErr(w, err) + return + } + } else { + startDate = time.Date(1970, 1, 1, 0, 0, 0, 0, time.UTC) + } + + // if no end date was provided, set it to to today + if r.URL.Query().Get("end_date") != "" { + endDate, err = time.Parse("2006-01-02", r.URL.Query().Get("end_date")) + if err != nil { + err := APIErrorInvalidData("End date is not in valid format") + respondErr(w, err) + return + } + } else { + endDate = time.Now().UTC() + } + + if startDate.After(endDate) { + err := APIErrorInvalidData("Start date cannot be after the end date") + respondErr(w, err) + return + } + + projectsList := make([]string, 0) + projectsUrlValue := r.URL.Query().Get("projects") + if projectsUrlValue != "" { + projectsList = strings.Split(projectsUrlValue, ",") + } + + vr, err := metrics.GetVAReport(projectsList, startDate, endDate, refStr) + if err != nil { + err := APIErrorNotFound(err.Error()) + respondErr(w, err) + return + } + + output, err := json.MarshalIndent(vr, "", " ") + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + // Write response + respondOK(w, output) +} + +// ProjectMetrics (GET) metrics for one project (number of topics) +func ProjectMetrics(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + //refRoles := gorillaContext.Get(r, "auth_roles").([]string) + //refUser := gorillaContext.Get(r, "auth_user").(string) + //refAuthResource := gorillaContext.Get(r, "auth_resource").(bool) + + urlProject := urlVars["project"] + + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + + // Check Authorization per topic + // - if enabled in config + // - if user has only publisher role + + numTopics := int64(0) + numSubs := int64(0) + + numTopics2, err2 := metrics.GetProjectTopics(projectUUID, refStr) + if err2 != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + numTopics = numTopics2 + numSubs2, err2 := metrics.GetProjectSubs(projectUUID, refStr) + if err2 != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + numSubs = numSubs2 + + var timePoints []metrics.Timepoint + var err error + + if timePoints, err = metrics.GetDailyProjectMsgCount(projectUUID, refStr); err != nil { + err := APIErrGenericBackend() + respondErr(w, err) + return + } + + m1 := metrics.NewProjectTopics(urlProject, numTopics, metrics.GetTimeNowZulu()) + m2 := metrics.NewProjectSubs(urlProject, numSubs, metrics.GetTimeNowZulu()) + res := metrics.NewMetricList(m1) + res.Metrics = append(res.Metrics, m2) + + // ProjectUUID User topics aggregation + m3, err := metrics.AggrProjectUserTopics(projectUUID, refStr) + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + for _, item := range m3.Metrics { + res.Metrics = append(res.Metrics, item) + } + + // ProjectUUID User subscriptions aggregation + m4, err := metrics.AggrProjectUserSubs(projectUUID, refStr) + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + for _, item := range m4.Metrics { + res.Metrics = append(res.Metrics, item) + } + + m5 := metrics.NewDailyProjectMsgCount(urlProject, timePoints) + res.Metrics = append(res.Metrics, m5) + + // Output result to JSON + resJSON, err := res.ExportJSON() + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + // Write response + output = []byte(resJSON) + respondOK(w, output) +} + +// TopicMetrics (GET) metrics for one topic +func TopicMetrics(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + refRoles := gorillaContext.Get(r, "auth_roles").([]string) + refUserUUID := gorillaContext.Get(r, "auth_user_uuid").(string) + refAuthResource := gorillaContext.Get(r, "auth_resource").(bool) + + urlTopic := urlVars["topic"] + + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + + // Check Authorization per topic + // - if enabled in config + // - if user has only publisher role + + if refAuthResource && auth.IsPublisher(refRoles) { + + if auth.PerResource(projectUUID, "topics", urlTopic, refUserUUID, refStr) == false { + err := APIErrorForbidden() + respondErr(w, err) + return + } + } + + // Number of bytes and number of messages + resultsMsg, err := topics.FindMetric(projectUUID, urlTopic, refStr) + + if err != nil { + if err.Error() == "not found" { + err := APIErrorNotFound("Topic") + respondErr(w, err) + return + } + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + numMsg := resultsMsg.MsgNum + numBytes := resultsMsg.TotalBytes + + numSubs := int64(0) + numSubs, err = metrics.GetProjectSubsByTopic(projectUUID, urlTopic, refStr) + if err != nil { + if err.Error() == "not found" { + err := APIErrorNotFound("Topic") + respondErr(w, err) + return + } + err := APIErrGenericBackend() + respondErr(w, err) + return + } + + var timePoints []metrics.Timepoint + if timePoints, err = metrics.GetDailyTopicMsgCount(projectUUID, urlTopic, refStr); err != nil { + err := APIErrGenericBackend() + respondErr(w, err) + return + } + + m1 := metrics.NewTopicSubs(urlTopic, numSubs, metrics.GetTimeNowZulu()) + res := metrics.NewMetricList(m1) + + m2 := metrics.NewTopicMsgs(urlTopic, numMsg, metrics.GetTimeNowZulu()) + m3 := metrics.NewTopicBytes(urlTopic, numBytes, metrics.GetTimeNowZulu()) + m4 := metrics.NewDailyTopicMsgCount(urlTopic, timePoints) + m5 := metrics.NewTopicRate(urlTopic, resultsMsg.PublishRate, resultsMsg.LatestPublish.Format("2006-01-02T15:04:05Z")) + + res.Metrics = append(res.Metrics, m2, m3, m4, m5) + + // Output result to JSON + resJSON, err := res.ExportJSON() + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + // Write response + output = []byte(resJSON) + respondOK(w, output) +} + +// SubMetrics (GET) metrics for one subscription +func SubMetrics(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + refRoles := gorillaContext.Get(r, "auth_roles").([]string) + refUserUUID := gorillaContext.Get(r, "auth_user_uuid").(string) + refAuthResource := gorillaContext.Get(r, "auth_resource").(bool) + + urlSub := urlVars["subscription"] + + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + + // Check Authorization per topic + // - if enabled in config + // - if user has only publisher role + + if refAuthResource && auth.IsConsumer(refRoles) { + + if auth.PerResource(projectUUID, "subscriptions", urlSub, refUserUUID, refStr) == false { + err := APIErrorForbidden() + respondErr(w, err) + return + } + } + + resultMsg, err := subscriptions.FindMetric(projectUUID, urlSub, refStr) + + if err != nil { + if err.Error() == "not found" { + err := APIErrorNotFound("Subscription") + respondErr(w, err) + return + } + err := APIErrGenericBackend() + respondErr(w, err) + } + + numMsg := resultMsg.MsgNum + numBytes := resultMsg.TotalBytes + + m1 := metrics.NewSubMsgs(urlSub, numMsg, metrics.GetTimeNowZulu()) + res := metrics.NewMetricList(m1) + m2 := metrics.NewSubBytes(urlSub, numBytes, metrics.GetTimeNowZulu()) + m3 := metrics.NewSubRate(urlSub, resultMsg.ConsumeRate, resultMsg.LatestConsume.Format("2006-01-02T15:04:05Z")) + + res.Metrics = append(res.Metrics, m2, m3) + + // Output result to JSON + resJSON, err := res.ExportJSON() + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + // Write response + output = []byte(resJSON) + respondOK(w, output) +} diff --git a/handlers/metrics_test.go b/handlers/metrics_test.go new file mode 100644 index 00000000..a5102472 --- /dev/null +++ b/handlers/metrics_test.go @@ -0,0 +1,718 @@ +package handlers + +import ( + "github.com/ARGOeu/argo-messaging/brokers" + "github.com/ARGOeu/argo-messaging/config" + "github.com/ARGOeu/argo-messaging/metrics" + oldPush "github.com/ARGOeu/argo-messaging/push" + "github.com/ARGOeu/argo-messaging/stores" + "github.com/gorilla/mux" + log "github.com/sirupsen/logrus" + "github.com/stretchr/testify/suite" + "io/ioutil" + "net/http" + "net/http/httptest" + "strconv" + "strings" + "testing" +) + +type MetricsHandlersTestSuite struct { + suite.Suite + cfgStr string +} + +func (suite *MetricsHandlersTestSuite) SetupTest() { + suite.cfgStr = `{ + "bind_ip":"", + "port":8080, + "zookeeper_hosts":["localhost"], + "kafka_znode":"", + "store_host":"localhost", + "store_db":"argo_msg", + "certificate":"/etc/pki/tls/certs/localhost.crt", + "certificate_key":"/etc/pki/tls/private/localhost.key", + "per_resource_auth":"true", + "push_enabled": "true", + "push_worker_token": "push_token" + }` +} + +func (suite *MetricsHandlersTestSuite) TestProjectMessageCount() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/metrics/va_metrics?start_date=2018-10-01&end_date=2018-10-04", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "projects_metrics": { + "projects": [ + { + "project": "ARGO", + "message_count": 30, + "average_daily_messages": 7 + } + ], + "total_message_count": 30, + "average_daily_messages": 7 + }, + "users_count": 0, + "topics_count": 0, + "subscriptions_count": 0 +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/metrics/va_metrics", WrapMockAuthConfig(VaMetrics, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) +} + +func (suite *MetricsHandlersTestSuite) TestVaReportFull() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/metrics/va_metrics?start_date=2007-10-01&end_date=2020-11-24", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "projects_metrics": { + "projects": [ + { + "project": "ARGO", + "message_count": 140, + "average_daily_messages": 0 + } + ], + "total_message_count": 140, + "average_daily_messages": 0 + }, + "users_count": 9, + "topics_count": 4, + "subscriptions_count": 4 +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/metrics/va_metrics", WrapMockAuthConfig(VaMetrics, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) +} + +func (suite *MetricsHandlersTestSuite) TestProjectMessageCountErrors() { + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects-message-count", WrapMockAuthConfig(VaMetrics, cfgKafka, &brk, str, &mgr, nil)) + + // wrong start date + expResp1 := `{ + "error": { + "code": 400, + "message": "Start date is not in valid format", + "status": "INVALID_ARGUMENT" + } +}` + req1, err := http.NewRequest("GET", "http://localhost:8080/v1/projects-message-count?start_date=ffff", nil) + if err != nil { + log.Fatal(err) + } + router.ServeHTTP(w, req1) + suite.Equal(400, w.Code) + suite.Equal(expResp1, w.Body.String()) + w.Body.Reset() + + // wrong end date + expResp2 := `{ + "error": { + "code": 400, + "message": "End date is not in valid format", + "status": "INVALID_ARGUMENT" + } +}` + req2, err := http.NewRequest("GET", "http://localhost:8080/v1/projects-message-count?end_date=ffff", nil) + if err != nil { + log.Fatal(err) + } + router.ServeHTTP(w, req2) + suite.Equal(400, w.Code) + suite.Equal(expResp2, w.Body.String()) + w.Body.Reset() + + // one of the projects doesn't exist end date + expResp3 := `{ + "error": { + "code": 404, + "message": "Project ffff doesn't exist", + "status": "NOT_FOUND" + } +}` + req3, err := http.NewRequest("GET", "http://localhost:8080/v1/projects-message-count?projects=ARGO,ffff", nil) + if err != nil { + log.Fatal(err) + } + router.ServeHTTP(w, req3) + suite.Equal(400, w.Code) + suite.Equal(expResp3, w.Body.String()) + w.Body.Reset() + + // start date is off + expResp4 := `{ + "error": { + "code": 400, + "message": "Start date cannot be after the end date", + "status": "INVALID_ARGUMENT" + } +}` + req4, err := http.NewRequest("GET", "http://localhost:8080/v1/projects-message-count?start_date=2019-04-04&end_date=2018-01-01", nil) + if err != nil { + log.Fatal(err) + } + router.ServeHTTP(w, req4) + suite.Equal(400, w.Code) + suite.Equal(expResp4, w.Body.String()) +} + +func (suite *MetricsHandlersTestSuite) TestSubMetrics() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub1:metrics", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "metrics": [ + { + "metric": "subscription.number_of_messages", + "metric_type": "counter", + "value_type": "int64", + "resource_type": "subscription", + "resource_name": "sub1", + "timeseries": [ + { + "timestamp": "{{TS1}}", + "value": 0 + } + ], + "description": "Counter that displays the number of messages consumed from the specific subscription" + }, + { + "metric": "subscription.number_of_bytes", + "metric_type": "counter", + "value_type": "int64", + "resource_type": "subscription", + "resource_name": "sub1", + "timeseries": [ + { + "timestamp": "{{TS2}}", + "value": 0 + } + ], + "description": "Counter that displays the total size of data (in bytes) consumed from the specific subscription" + }, + { + "metric": "subscription.consumption_rate", + "metric_type": "rate", + "value_type": "float64", + "resource_type": "subscription", + "resource_name": "sub1", + "timeseries": [ + { + "timestamp": "2019-05-06T00:00:00Z", + "value": 10 + } + ], + "description": "A rate that displays how many messages were consumed per second between the last two consume events" + } + ] +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:metrics", WrapMockAuthConfig(SubMetrics, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + + metricOut, _ := metrics.GetMetricsFromJSON([]byte(w.Body.String())) + ts1 := metricOut.Metrics[0].Timeseries[0].Timestamp + ts2 := metricOut.Metrics[1].Timeseries[0].Timestamp + expResp = strings.Replace(expResp, "{{TS1}}", ts1, -1) + expResp = strings.Replace(expResp, "{{TS2}}", ts2, -1) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *MetricsHandlersTestSuite) TestSubMetricsNotFound() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/subscriptions/unknown_sub:metrics", nil) + if err != nil { + log.Fatal(err) + } + + expRes := `{ + "error": { + "code": 404, + "message": "Subscription doesn't exist", + "status": "NOT_FOUND" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + // temporarily disable auth for this test case + cfgKafka.ResAuth = false + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:metrics", WrapMockAuthConfig(SubMetrics, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(404, w.Code) + suite.Equal(expRes, w.Body.String()) + +} + +func (suite *MetricsHandlersTestSuite) TestProjectMetrics() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO:metrics", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "metrics": [ + { + "metric": "project.number_of_topics", + "metric_type": "counter", + "value_type": "int64", + "resource_type": "project", + "resource_name": "ARGO", + "timeseries": [ + { + "timestamp": "{{TS1}}", + "value": 4 + } + ], + "description": "Counter that displays the number of topics belonging to the specific project" + }, + { + "metric": "project.number_of_subscriptions", + "metric_type": "counter", + "value_type": "int64", + "resource_type": "project", + "resource_name": "ARGO", + "timeseries": [ + { + "timestamp": "{{TS2}}", + "value": 4 + } + ], + "description": "Counter that displays the number of subscriptions belonging to the specific project" + }, + { + "metric": "project.user.number_of_topics", + "metric_type": "counter", + "value_type": "int64", + "resource_type": "project.user", + "resource_name": "ARGO.UserA", + "timeseries": [ + { + "timestamp": "{{TS3}}", + "value": 2 + } + ], + "description": "Counter that displays the number of topics that a user has access to the specific project" + }, + { + "metric": "project.user.number_of_topics", + "metric_type": "counter", + "value_type": "int64", + "resource_type": "project.user", + "resource_name": "ARGO.UserB", + "timeseries": [ + { + "timestamp": "{{TS4}}", + "value": 2 + } + ], + "description": "Counter that displays the number of topics that a user has access to the specific project" + }, + { + "metric": "project.user.number_of_topics", + "metric_type": "counter", + "value_type": "int64", + "resource_type": "project.user", + "resource_name": "ARGO.UserX", + "timeseries": [ + { + "timestamp": "{{TS5}}", + "value": 1 + } + ], + "description": "Counter that displays the number of topics that a user has access to the specific project" + }, + { + "metric": "project.user.number_of_topics", + "metric_type": "counter", + "value_type": "int64", + "resource_type": "project.user", + "resource_name": "ARGO.UserZ", + "timeseries": [ + { + "timestamp": "{{TS6}}", + "value": 1 + } + ], + "description": "Counter that displays the number of topics that a user has access to the specific project" + }, + { + "metric": "project.user.number_of_subscriptions", + "metric_type": "counter", + "value_type": "int64", + "resource_type": "project.user", + "resource_name": "ARGO.UserA", + "timeseries": [ + { + "timestamp": "{{TS7}}", + "value": 3 + } + ], + "description": "Counter that displays the number of subscriptions that a user has access to the specific project" + }, + { + "metric": "project.user.number_of_subscriptions", + "metric_type": "counter", + "value_type": "int64", + "resource_type": "project.user", + "resource_name": "ARGO.UserB", + "timeseries": [ + { + "timestamp": "{{TS8}}", + "value": 3 + } + ], + "description": "Counter that displays the number of subscriptions that a user has access to the specific project" + }, + { + "metric": "project.user.number_of_subscriptions", + "metric_type": "counter", + "value_type": "int64", + "resource_type": "project.user", + "resource_name": "ARGO.UserX", + "timeseries": [ + { + "timestamp": "{{TS9}}", + "value": 1 + } + ], + "description": "Counter that displays the number of subscriptions that a user has access to the specific project" + }, + { + "metric": "project.user.number_of_subscriptions", + "metric_type": "counter", + "value_type": "int64", + "resource_type": "project.user", + "resource_name": "ARGO.UserZ", + "timeseries": [ + { + "timestamp": "{{TS10}}", + "value": 2 + } + ], + "description": "Counter that displays the number of subscriptions that a user has access to the specific project" + }, + { + "metric": "project.number_of_daily_messages", + "metric_type": "counter", + "value_type": "int64", + "resource_type": "project", + "resource_name": "ARGO", + "timeseries": [ + { + "timestamp": "{{TS11}}", + "value": 30 + }, + { + "timestamp": "{{TS12}}", + "value": 110 + } + ], + "description": "A collection of counters that represents the total number of messages published each day to all of the project's topics" + } + ] +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}:metrics", WrapMockAuthConfig(ProjectMetrics, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + metricOut, _ := metrics.GetMetricsFromJSON([]byte(w.Body.String())) + ts1 := metricOut.Metrics[0].Timeseries[0].Timestamp + ts2 := metricOut.Metrics[1].Timeseries[0].Timestamp + ts3 := metricOut.Metrics[2].Timeseries[0].Timestamp + ts4 := metricOut.Metrics[3].Timeseries[0].Timestamp + ts5 := metricOut.Metrics[4].Timeseries[0].Timestamp + ts6 := metricOut.Metrics[5].Timeseries[0].Timestamp + ts7 := metricOut.Metrics[6].Timeseries[0].Timestamp + ts8 := metricOut.Metrics[7].Timeseries[0].Timestamp + ts9 := metricOut.Metrics[8].Timeseries[0].Timestamp + ts10 := metricOut.Metrics[9].Timeseries[0].Timestamp + ts11 := metricOut.Metrics[10].Timeseries[0].Timestamp + ts12 := metricOut.Metrics[10].Timeseries[1].Timestamp + expResp = strings.Replace(expResp, "{{TS1}}", ts1, -1) + expResp = strings.Replace(expResp, "{{TS2}}", ts2, -1) + expResp = strings.Replace(expResp, "{{TS3}}", ts3, -1) + expResp = strings.Replace(expResp, "{{TS4}}", ts4, -1) + expResp = strings.Replace(expResp, "{{TS5}}", ts5, -1) + expResp = strings.Replace(expResp, "{{TS6}}", ts6, -1) + expResp = strings.Replace(expResp, "{{TS7}}", ts7, -1) + expResp = strings.Replace(expResp, "{{TS8}}", ts8, -1) + expResp = strings.Replace(expResp, "{{TS9}}", ts9, -1) + expResp = strings.Replace(expResp, "{{TS10}}", ts10, -1) + expResp = strings.Replace(expResp, "{{TS11}}", ts11, -1) + expResp = strings.Replace(expResp, "{{TS12}}", ts12, -1) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *MetricsHandlersTestSuite) TestOpMetrics() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/metrics", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "metrics": [ + { + "metric": "ams_node.cpu_usage", + "metric_type": "percentage", + "value_type": "float64", + "resource_type": "ams_node", + "resource_name": "{{HOST}}", + "timeseries": [ + { + "timestamp": "{{TS1}}", + "value": {{VAL1}} + } + ], + "description": "Percentage value that displays the CPU usage of ams service in the specific node" + }, + { + "metric": "ams_node.memory_usage", + "metric_type": "percentage", + "value_type": "float64", + "resource_type": "ams_node", + "resource_name": "{{HOST}}", + "timeseries": [ + { + "timestamp": "{{TS1}}", + "value": {{VAL2}} + } + ], + "description": "Percentage value that displays the Memory usage of ams service in the specific node" + } + ] +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/metrics", WrapMockAuthConfig(OpMetrics, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + metricOut, _ := metrics.GetMetricsFromJSON([]byte(w.Body.String())) + ts1 := metricOut.Metrics[0].Timeseries[0].Timestamp + val1 := metricOut.Metrics[0].Timeseries[0].Value.(float64) + ts2 := metricOut.Metrics[1].Timeseries[0].Timestamp + val2 := metricOut.Metrics[1].Timeseries[0].Value.(float64) + host := metricOut.Metrics[0].Resource + expResp = strings.Replace(expResp, "{{TS1}}", ts1, -1) + expResp = strings.Replace(expResp, "{{TS2}}", ts2, -1) + expResp = strings.Replace(expResp, "{{VAL1}}", strconv.FormatFloat(val1, 'g', 1, 64), -1) + expResp = strings.Replace(expResp, "{{VAL2}}", strconv.FormatFloat(val2, 'g', 1, 64), -1) + expResp = strings.Replace(expResp, "{{HOST}}", host, -1) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *MetricsHandlersTestSuite) TestTopicMetrics() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/topics/topic1:metrics", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "metrics": [ + { + "metric": "topic.number_of_subscriptions", + "metric_type": "counter", + "value_type": "int64", + "resource_type": "topic", + "resource_name": "topic1", + "timeseries": [ + { + "timestamp": "{{TIMESTAMP1}}", + "value": 1 + } + ], + "description": "Counter that displays the number of subscriptions belonging to a specific topic" + }, + { + "metric": "topic.number_of_messages", + "metric_type": "counter", + "value_type": "int64", + "resource_type": "topic", + "resource_name": "topic1", + "timeseries": [ + { + "timestamp": "{{TIMESTAMP2}}", + "value": 0 + } + ], + "description": "Counter that displays the number of messages published to the specific topic" + }, + { + "metric": "topic.number_of_bytes", + "metric_type": "counter", + "value_type": "int64", + "resource_type": "topic", + "resource_name": "topic1", + "timeseries": [ + { + "timestamp": "{{TIMESTAMP3}}", + "value": 0 + } + ], + "description": "Counter that displays the total size of data (in bytes) published to the specific topic" + }, + { + "metric": "topic.number_of_daily_messages", + "metric_type": "counter", + "value_type": "int64", + "resource_type": "topic", + "resource_name": "topic1", + "timeseries": [ + { + "timestamp": "{{TIMESTAMP4}}", + "value": 30 + }, + { + "timestamp": "{{TIMESTAMP5}}", + "value": 40 + } + ], + "description": "A collection of counters that represents the total number of messages published each day to a specific topic" + }, + { + "metric": "topic.publishing_rate", + "metric_type": "rate", + "value_type": "float64", + "resource_type": "topic", + "resource_name": "topic1", + "timeseries": [ + { + "timestamp": "2019-05-06T00:00:00Z", + "value": 10 + } + ], + "description": "A rate that displays how many messages were published per second between the last two publish events" + } + ] +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/topics/{topic}:metrics", WrapMockAuthConfig(TopicMetrics, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + metricOut, _ := metrics.GetMetricsFromJSON([]byte(w.Body.String())) + ts1 := metricOut.Metrics[0].Timeseries[0].Timestamp + ts2 := metricOut.Metrics[1].Timeseries[0].Timestamp + ts3 := metricOut.Metrics[2].Timeseries[0].Timestamp + ts4 := metricOut.Metrics[3].Timeseries[0].Timestamp + ts5 := metricOut.Metrics[3].Timeseries[1].Timestamp + expResp = strings.Replace(expResp, "{{TIMESTAMP1}}", ts1, -1) + expResp = strings.Replace(expResp, "{{TIMESTAMP2}}", ts2, -1) + expResp = strings.Replace(expResp, "{{TIMESTAMP3}}", ts3, -1) + expResp = strings.Replace(expResp, "{{TIMESTAMP4}}", ts4, -1) + expResp = strings.Replace(expResp, "{{TIMESTAMP5}}", ts5, -1) + + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *MetricsHandlersTestSuite) TestTopicMetricsNotFound() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/topics/topic_not_found:metrics", nil) + if err != nil { + log.Fatal(err) + } + + expRes := `{ + "error": { + "code": 404, + "message": "Topic doesn't exist", + "status": "NOT_FOUND" + } +}` + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + // deactivate auth for this specific test case + cfgKafka.ResAuth = false + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/topics/{topic}:metrics", WrapMockAuthConfig(TopicMetrics, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(404, w.Code) + suite.Equal(expRes, w.Body.String()) + +} + +func TestMetricsHandlersTestSuite(t *testing.T) { + log.SetOutput(ioutil.Discard) + suite.Run(t, new(MetricsHandlersTestSuite)) +} diff --git a/handlers/projects.go b/handlers/projects.go new file mode 100644 index 00000000..7566dee2 --- /dev/null +++ b/handlers/projects.go @@ -0,0 +1,876 @@ +package handlers + +import ( + "encoding/json" + "fmt" + "github.com/ARGOeu/argo-messaging/auth" + "github.com/ARGOeu/argo-messaging/projects" + "github.com/ARGOeu/argo-messaging/stores" + gorillaContext "github.com/gorilla/context" + "github.com/gorilla/mux" + log "github.com/sirupsen/logrus" + "github.com/twinj/uuid" + "io/ioutil" + "net/http" + "strconv" + "strings" + "time" +) + +// ProjectDelete (DEL) deletes an existing project (also removes it's topics and subscriptions) +func ProjectDelete(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + + // Get Result Object + // Get project UUID First to use as reference + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + // RemoveProject removes also attached subs and topics from the datastore + err := projects.RemoveProject(projectUUID, refStr) + if err != nil { + if err.Error() == "not found" { + err := APIErrorNotFound("ProjectUUID") + respondErr(w, err) + return + } + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + // Write empty response if anything ok + respondOK(w, output) +} + +// ProjectUpdate (PUT) updates the name or the description of an existing project +func ProjectUpdate(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + // Read POST JSON body + body, err := ioutil.ReadAll(r.Body) + if err != nil { + err := APIErrorInvalidRequestBody() + respondErr(w, err) + return + } + + // Parse pull options + postBody, err := projects.GetFromJSON(body) + if err != nil { + err := APIErrorInvalidRequestBody() + respondErr(w, err) + log.Error(string(body[:])) + return + } + + modified := time.Now().UTC() + // Get Result Object + + res, err := projects.UpdateProject(projectUUID, postBody.Name, postBody.Description, modified, refStr) + + if err != nil { + if err.Error() == "not found" { + err := APIErrorNotFound("ProjectUUID") + respondErr(w, err) + return + } + + if strings.HasPrefix(err.Error(), "invalid") { + err := APIErrorInvalidData(err.Error()) + respondErr(w, err) + return + } + + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + // Output result to JSON + resJSON, err := res.ExportJSON() + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + // Write response + output = []byte(resJSON) + respondOK(w, output) +} + +// ProjectCreate (POST) creates a new project +func ProjectCreate(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + urlProject := urlVars["project"] + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + refUserUUID := gorillaContext.Get(r, "auth_user_uuid").(string) + + // Read POST JSON body + body, err := ioutil.ReadAll(r.Body) + if err != nil { + err := APIErrorInvalidRequestBody() + respondErr(w, err) + return + } + + // Parse pull options + postBody, err := projects.GetFromJSON(body) + if err != nil { + err := APIErrorInvalidArgument("Project") + respondErr(w, err) + return + } + + uuid := uuid.NewV4().String() // generate a new uuid to attach to the new project + created := time.Now().UTC() + // Get Result Object + + res, err := projects.CreateProject(uuid, urlProject, created, refUserUUID, postBody.Description, refStr) + + if err != nil { + if err.Error() == "exists" { + err := APIErrorConflict("Project") + respondErr(w, err) + return + } + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + // Output result to JSON + resJSON, err := res.ExportJSON() + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + // Write response + output = []byte(resJSON) + respondOK(w, output) +} + +// ProjectListAll (GET) all projects +func ProjectListAll(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + + // Get Results Object + + res, err := projects.Find("", "", refStr) + + if err != nil && err.Error() != "not found" { + err := APIErrQueryDatastore() + respondErr(w, err) + return + } + + // Output result to JSON + resJSON, err := res.ExportJSON() + + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + // Write response + output = []byte(resJSON) + respondOK(w, output) +} + +// ProjectListOne (GET) one project +func ProjectListOne(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + urlProject := urlVars["project"] + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + + // Get Results Object + results, err := projects.Find("", urlProject, refStr) + + if err != nil { + + if err.Error() == "not found" { + err := APIErrorNotFound("ProjectUUID") + respondErr(w, err) + return + } + err := APIErrQueryDatastore() + respondErr(w, err) + return + } + + // Output result to JSON + res := results.One() + resJSON, err := res.ExportJSON() + + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + // Write response + output = []byte(resJSON) + respondOK(w, output) +} + +// ProjectUserListOne (GET) one user member of a specific project +func ProjectUserListOne(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + urlUser := urlVars["user"] + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + refRoles := gorillaContext.Get(r, "auth_roles").([]string) + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + + // check that user is indeed a service admin in order to be priviledged to see full user info + priviledged := auth.IsServiceAdmin(refRoles) + + // Get Results Object + results, err := auth.FindUsers(projectUUID, "", urlUser, priviledged, refStr) + + if err != nil { + if err.Error() == "not found" { + err := APIErrorNotFound("User") + respondErr(w, err) + return + } + + err := APIErrQueryDatastore() + respondErr(w, err) + return + } + + res := results.One() + + // Output result to JSON + resJSON, err := res.ExportJSON() + + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + // Write response + output = []byte(resJSON) + respondOK(w, output) +} + +// ProjectUserCreate (POST) creates a user under the respective project by the project's admin +func ProjectUserCreate(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + urlUser := urlVars["user"] + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + refUserUUID := gorillaContext.Get(r, "auth_user_uuid").(string) + refProjUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + + // Read POST JSON body + body, err := ioutil.ReadAll(r.Body) + if err != nil { + err := APIErrorInvalidRequestBody() + respondErr(w, err) + return + } + + // Parse pull options + postBody, err := auth.GetUserFromJSON(body) + if err != nil { + err := APIErrorInvalidArgument("User") + respondErr(w, err) + log.Error(string(body[:])) + return + } + + // omit service wide roles + postBody.ServiceRoles = []string{} + + // allow the user to be created to only have reference to the project under which is being created + prName := projects.GetNameByUUID(refProjUUID, refStr) + if prName == "" { + err := APIErrGenericInternal("Internal Error") + respondErr(w, err) + return + } + + projectRoles := auth.ProjectRoles{} + + for _, p := range postBody.Projects { + if p.Project == prName { + projectRoles.Project = prName + projectRoles.Roles = p.Roles + projectRoles.Topics = p.Topics + projectRoles.Subs = p.Subs + break + } + } + + // if the project was not mentioned in the creation, add it + if projectRoles.Project == "" { + projectRoles.Project = prName + } + + postBody.Projects = []auth.ProjectRoles{projectRoles} + + uuid := uuid.NewV4().String() // generate a new uuid to attach to the new project + token, err := auth.GenToken() // generate a new user token + created := time.Now().UTC() + + // Get Result Object + res, err := auth.CreateUser(uuid, urlUser, "", "", "", "", postBody.Projects, token, postBody.Email, postBody.ServiceRoles, created, refUserUUID, refStr) + + if err != nil { + if err.Error() == "exists" { + err := APIErrorConflict("User") + respondErr(w, err) + return + } + + if strings.HasPrefix(err.Error(), "invalid") { + err := APIErrorInvalidData(err.Error()) + respondErr(w, err) + return + } + + if strings.HasPrefix(err.Error(), "duplicate") { + err := APIErrorInvalidData(err.Error()) + respondErr(w, err) + return + } + + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + // Output result to JSON + resJSON, err := res.ExportJSON() + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + // Write response + output = []byte(resJSON) + respondOK(w, output) +} + +// ProjectUserUpdate (PUT) updates a user under the respective project by the project's admin +func ProjectUserUpdate(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + urlUser := urlVars["user"] + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + refProjUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + refRoles := gorillaContext.Get(r, "auth_roles").([]string) + + // allow the user to be updated to only have reference to the project under which is being updated + prName := projects.GetNameByUUID(refProjUUID, refStr) + if prName == "" { + err := APIErrGenericInternal("Internal Error") + respondErr(w, err) + return + } + + // Read POST JSON body + body, err := ioutil.ReadAll(r.Body) + if err != nil { + err := APIErrorInvalidRequestBody() + respondErr(w, err) + return + } + + // Parse pull options + postBody, err := auth.GetUserFromJSON(body) + if err != nil { + err := APIErrorInvalidArgument("User") + respondErr(w, err) + return + } + + u, err := auth.FindUsers("", "", urlUser, true, refStr) + if err != nil { + if err.Error() == "not found" { + err := APIErrorNotFound("User") + respondErr(w, err) + return + } + + err := APIErrQueryDatastore() + respondErr(w, err) + return + } + + // from the post request keep only the reference to the current project + projectRoles := auth.ProjectRoles{} + + for _, p := range postBody.Projects { + if p.Project == prName { + projectRoles.Project = prName + projectRoles.Roles = p.Roles + projectRoles.Topics = p.Topics + projectRoles.Subs = p.Subs + break + } + } + + // if the user is already a member of the project, update it with the accepted contents of the post body + found := false + for idx, p := range u.One().Projects { + if p.Project == projectRoles.Project { + u.One().Projects[idx].Roles = projectRoles.Roles + u.One().Projects[idx].Topics = projectRoles.Topics + u.One().Projects[idx].Subs = projectRoles.Subs + found = true + break + } + } + + if !found { + err := APIErrorForbiddenWithMsg("User is not a member of the project") + respondErr(w, err) + return + } + + // check that user is indeed a service admin in order to be privileged to see full user info + privileged := auth.IsServiceAdmin(refRoles) + + // Get Result Object + userUUID := u.One().UUID + modified := time.Now().UTC() + userProjects := u.One().Projects + userEmail := u.One().Email + userSRoles := u.One().ServiceRoles + userName := u.One().Name + userFN := u.One().FirstName + userLN := u.One().LastName + userOrg := u.One().Organization + userDesc := u.One().Description + + _, err = auth.UpdateUser(userUUID, userFN, userLN, userOrg, userDesc, userName, userProjects, userEmail, userSRoles, modified, false, refStr) + + if err != nil { + + // In case of invalid project or role in post body + if err.Error() == "not found" { + err := APIErrorNotFound("User") + respondErr(w, err) + return + } + + if strings.HasPrefix(err.Error(), "invalid") { + err := APIErrorInvalidData(err.Error()) + respondErr(w, err) + return + } + + if strings.HasPrefix(err.Error(), "duplicate") { + err := APIErrorInvalidData(err.Error()) + respondErr(w, err) + return + } + + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + stored, err := auth.FindUsers(refProjUUID, userUUID, urlUser, privileged, refStr) + + if err != nil { + + if err.Error() == "not found" { + err := APIErrorNotFound("User") + respondErr(w, err) + return + } + + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + // Output result to JSON + resJSON, err := json.MarshalIndent(stored.One(), "", " ") + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + // Write response + output = []byte(resJSON) + respondOK(w, output) +} + +// ProjectUserRemove (POST) removes a user from the respective project +func ProjectUserRemove(w http.ResponseWriter, r *http.Request) { + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + urlUser := urlVars["user"] + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + refProjUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + + projName := projects.GetNameByUUID(refProjUUID, refStr) + + u, err := auth.FindUsers("", "", urlUser, true, refStr) + if err != nil { + if err.Error() == "not found" { + err := APIErrorNotFound("User") + respondErr(w, err) + return + } + + err := APIErrQueryDatastore() + respondErr(w, err) + return + } + + userProjects := []auth.ProjectRoles{} + + // if the user is already a member of the project, update it with the accepted contents of the post body + found := false + for idx, p := range u.One().Projects { + if p.Project == projName { + userProjects = append(userProjects, u.One().Projects[:idx]...) + userProjects = append(userProjects, u.One().Projects[idx+1:]...) + found = true + break + } + } + + if !found { + err := APIErrorForbiddenWithMsg("User is not a member of the project") + respondErr(w, err) + return + } + + // Get Result Object + userUUID := u.One().UUID + modified := time.Now().UTC() + userEmail := u.One().Email + userSRoles := u.One().ServiceRoles + userName := u.One().Name + userFN := u.One().FirstName + userLN := u.One().LastName + userOrg := u.One().Organization + userDesc := u.One().Description + + _, err = auth.UpdateUser(userUUID, userFN, userLN, userOrg, userDesc, userName, userProjects, userEmail, userSRoles, modified, false, refStr) + + if err != nil { + + // In case of invalid project or role in post body + if err.Error() == "not found" { + err := APIErrorNotFound("User") + respondErr(w, err) + return + } + + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + // Write response + respondOK(w, []byte("{}")) +} + +// ProjectUserAdd (POST) adds a user to the respective project +func ProjectUserAdd(w http.ResponseWriter, r *http.Request) { + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + urlUser := urlVars["user"] + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + refProjUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + refRoles := gorillaContext.Get(r, "auth_roles").([]string) + + projName := projects.GetNameByUUID(refProjUUID, refStr) + + u, err := auth.FindUsers("", "", urlUser, true, refStr) + if err != nil { + if err.Error() == "not found" { + err := APIErrorNotFound("User") + respondErr(w, err) + return + } + + err := APIErrQueryDatastore() + respondErr(w, err) + return + } + + // Read POST JSON body + body, err := ioutil.ReadAll(r.Body) + if err != nil { + err := APIErrorInvalidRequestBody() + respondErr(w, err) + return + } + + data := auth.ProjectRoles{} + + err = json.Unmarshal(body, &data) + if err != nil { + err := APIErrorInvalidRequestBody() + respondErr(w, err) + return + } + + // check if the user is already a user of the project + found := false + for _, p := range u.One().Projects { + if p.Project == projName { + found = true + break + } + } + + if found { + err := APIErrorGenericConflict("User is already a member of the project") + respondErr(w, err) + return + } + + // Get Result Object + userUUID := u.One().UUID + modified := time.Now().UTC() + userEmail := u.One().Email + userSRoles := u.One().ServiceRoles + userName := u.One().Name + userProjects := u.One().Projects + userFN := u.One().FirstName + userLN := u.One().LastName + userOrg := u.One().Organization + userDesc := u.One().Description + + userProjects = append(userProjects, auth.ProjectRoles{ + Project: projName, + Roles: data.Roles, + Subs: data.Subs, + Topics: data.Topics, + }) + + _, err = auth.UpdateUser(userUUID, userFN, userLN, userOrg, userDesc, userName, userProjects, userEmail, userSRoles, modified, false, refStr) + + if err != nil { + + // In case of invalid project or role in post body + if err.Error() == "not found" { + err := APIErrorNotFound("User") + respondErr(w, err) + return + } + + if strings.HasPrefix(err.Error(), "invalid") { + err := APIErrorInvalidData(err.Error()) + respondErr(w, err) + return + } + + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + // Write response + privileged := auth.IsServiceAdmin(refRoles) + fmt.Println(privileged) + results, err := auth.FindUsers(refProjUUID, "", urlUser, privileged, refStr) + + if err != nil { + if err.Error() == "not found" { + err := APIErrorNotFound("User") + respondErr(w, err) + return + } + + err := APIErrQueryDatastore() + respondErr(w, err) + return + } + + res := results.One() + + // Output result to JSON + resJSON, err := res.ExportJSON() + + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + // Write response + respondOK(w, []byte(resJSON)) +} + +// ProjectListUsers (GET) all users belonging to a project +func ProjectListUsers(w http.ResponseWriter, r *http.Request) { + + var err error + var pageSize int + var paginatedUsers auth.PaginatedUsers + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + refRoles := gorillaContext.Get(r, "auth_roles").([]string) + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + + // Grab url path variables + urlValues := r.URL.Query() + pageToken := urlValues.Get("pageToken") + strPageSize := urlValues.Get("pageSize") + details := urlValues.Get("details") + usersDetailedView := false + + if details == "true" { + usersDetailedView = true + } + + if strPageSize != "" { + if pageSize, err = strconv.Atoi(strPageSize); err != nil { + log.Errorf("Pagesize %v produced an error while being converted to int: %v", strPageSize, err.Error()) + err := APIErrorInvalidData("Invalid page size") + respondErr(w, err) + return + } + } + + // check that user is indeed a service admin in order to be priviledged to see full user info + priviledged := auth.IsServiceAdmin(refRoles) + + // Get Results Object - call is always priviledged because this handler is only accessible by service admins + if paginatedUsers, err = auth.PaginatedFindUsers(pageToken, int32(pageSize), projectUUID, priviledged, usersDetailedView, refStr); err != nil { + err := APIErrorInvalidData("Invalid page token") + respondErr(w, err) + return + } + + // Output result to JSON + resJSON, err := paginatedUsers.ExportJSON() + + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + // Write response + output = []byte(resJSON) + respondOK(w, output) +} diff --git a/handlers/projects_test.go b/handlers/projects_test.go new file mode 100644 index 00000000..4e13b4a8 --- /dev/null +++ b/handlers/projects_test.go @@ -0,0 +1,1370 @@ +package handlers + +import ( + "bytes" + "fmt" + "github.com/ARGOeu/argo-messaging/auth" + "github.com/ARGOeu/argo-messaging/brokers" + "github.com/ARGOeu/argo-messaging/config" + "github.com/ARGOeu/argo-messaging/projects" + oldPush "github.com/ARGOeu/argo-messaging/push" + push "github.com/ARGOeu/argo-messaging/push/grpc/client" + "github.com/ARGOeu/argo-messaging/stores" + "github.com/gorilla/mux" + "github.com/stretchr/testify/suite" + "io/ioutil" + "log" + "net/http" + "net/http/httptest" + "strings" + "testing" +) + +type ProjectsHandlersTestSuite struct { + suite.Suite + cfgStr string +} + +func (suite *ProjectsHandlersTestSuite) SetupTest() { + suite.cfgStr = `{ + "bind_ip":"", + "port":8080, + "zookeeper_hosts":["localhost"], + "kafka_znode":"", + "store_host":"localhost", + "store_db":"argo_msg", + "certificate":"/etc/pki/tls/certs/localhost.crt", + "certificate_key":"/etc/pki/tls/private/localhost.key", + "per_resource_auth":"true", + "push_enabled": "true", + "push_worker_token": "push_token" + }` +} + +func (suite *ProjectsHandlersTestSuite) TestProjectUserListOne() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/members/UserZ", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "uuid": "uuid4", + "projects": [ + { + "project": "ARGO", + "roles": [ + "publisher", + "consumer" + ], + "topics": [ + "topic2" + ], + "subscriptions": [ + "sub3", + "sub4" + ] + } + ], + "name": "UserZ", + "token": "S3CR3T4", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z", + "created_by": "UserA" +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/members/{user}", WrapMockAuthConfig(ProjectUserListOne, cfgKafka, &brk, str, &mgr, nil, "service_admin")) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) +} + +func (suite *ProjectsHandlersTestSuite) TestProjectUserListOneUnpriv() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/members/UserZ", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "uuid": "uuid4", + "projects": [ + { + "project": "ARGO", + "roles": [ + "publisher", + "consumer" + ], + "topics": [ + "topic2" + ], + "subscriptions": [ + "sub3", + "sub4" + ] + } + ], + "name": "UserZ", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z" +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/members/{user}", WrapMockAuthConfig(ProjectUserListOne, cfgKafka, &brk, str, &mgr, nil, "project_admin")) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) +} + +func (suite *ProjectsHandlersTestSuite) TestProjectUserListARGO() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/users?details=true", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "users": [ + { + "uuid": "same_uuid", + "projects": [ + { + "project": "ARGO", + "roles": [ + "publisher", + "consumer" + ], + "topics": [], + "subscriptions": [] + } + ], + "name": "UserSame2", + "token": "S3CR3T42", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z", + "created_by": "UserA" + }, + { + "uuid": "same_uuid", + "projects": [ + { + "project": "ARGO", + "roles": [ + "publisher", + "consumer" + ], + "topics": [], + "subscriptions": [] + } + ], + "name": "UserSame1", + "token": "S3CR3T41", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z", + "created_by": "UserA" + }, + { + "uuid": "uuid4", + "projects": [ + { + "project": "ARGO", + "roles": [ + "publisher", + "consumer" + ], + "topics": [ + "topic2" + ], + "subscriptions": [ + "sub3", + "sub4" + ] + } + ], + "name": "UserZ", + "token": "S3CR3T4", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z", + "created_by": "UserA" + }, + { + "uuid": "uuid3", + "projects": [ + { + "project": "ARGO", + "roles": [ + "publisher", + "consumer" + ], + "topics": [ + "topic3" + ], + "subscriptions": [ + "sub2" + ] + } + ], + "name": "UserX", + "token": "S3CR3T3", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z", + "created_by": "UserA" + }, + { + "uuid": "uuid2", + "projects": [ + { + "project": "ARGO", + "roles": [ + "consumer", + "publisher" + ], + "topics": [ + "topic1", + "topic2" + ], + "subscriptions": [ + "sub1", + "sub3", + "sub4" + ] + } + ], + "name": "UserB", + "token": "S3CR3T2", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z", + "created_by": "UserA" + }, + { + "uuid": "uuid1", + "projects": [ + { + "project": "ARGO", + "roles": [ + "consumer", + "publisher" + ], + "topics": [ + "topic1", + "topic2" + ], + "subscriptions": [ + "sub1", + "sub2", + "sub3" + ] + } + ], + "name": "UserA", + "first_name": "FirstA", + "last_name": "LastA", + "organization": "OrgA", + "description": "DescA", + "token": "S3CR3T1", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z" + }, + { + "uuid": "uuid0", + "projects": [ + { + "project": "ARGO", + "roles": [ + "consumer", + "publisher" + ], + "topics": [], + "subscriptions": [] + } + ], + "name": "Test", + "token": "S3CR3T", + "email": "Test@test.com", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z" + } + ], + "nextPageToken": "", + "totalSize": 7 +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/users", WrapMockAuthConfig(ProjectListUsers, cfgKafka, &brk, str, &mgr, nil, "service_admin")) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *ProjectsHandlersTestSuite) TestProjectUserListARGONoUserDetails() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/users?details=false&pageSize=1", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "users": [ + { + "uuid": "same_uuid", + "name": "UserSame2", + "token": "S3CR3T42", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z", + "created_by": "UserA" + } + ], + "nextPageToken": "NQ==", + "totalSize": 7 +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/users", WrapMockAuthConfig(ProjectListUsers, cfgKafka, &brk, str, &mgr, nil, "service_admin")) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *ProjectsHandlersTestSuite) TestProjectUserListUnprivARGO() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/members?details=true", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "users": [ + { + "uuid": "same_uuid", + "projects": [ + { + "project": "ARGO", + "roles": [ + "publisher", + "consumer" + ], + "topics": [], + "subscriptions": [] + } + ], + "name": "UserSame2", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z" + }, + { + "uuid": "same_uuid", + "projects": [ + { + "project": "ARGO", + "roles": [ + "publisher", + "consumer" + ], + "topics": [], + "subscriptions": [] + } + ], + "name": "UserSame1", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z" + }, + { + "uuid": "uuid4", + "projects": [ + { + "project": "ARGO", + "roles": [ + "publisher", + "consumer" + ], + "topics": [ + "topic2" + ], + "subscriptions": [ + "sub3", + "sub4" + ] + } + ], + "name": "UserZ", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z" + }, + { + "uuid": "uuid3", + "projects": [ + { + "project": "ARGO", + "roles": [ + "publisher", + "consumer" + ], + "topics": [ + "topic3" + ], + "subscriptions": [ + "sub2" + ] + } + ], + "name": "UserX", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z" + }, + { + "uuid": "uuid2", + "projects": [ + { + "project": "ARGO", + "roles": [ + "consumer", + "publisher" + ], + "topics": [ + "topic1", + "topic2" + ], + "subscriptions": [ + "sub1", + "sub3", + "sub4" + ] + } + ], + "name": "UserB", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z" + }, + { + "uuid": "uuid1", + "projects": [ + { + "project": "ARGO", + "roles": [ + "consumer", + "publisher" + ], + "topics": [ + "topic1", + "topic2" + ], + "subscriptions": [ + "sub1", + "sub2", + "sub3" + ] + } + ], + "name": "UserA", + "first_name": "FirstA", + "last_name": "LastA", + "organization": "OrgA", + "description": "DescA", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z" + }, + { + "uuid": "uuid0", + "projects": [ + { + "project": "ARGO", + "roles": [ + "consumer", + "publisher" + ], + "topics": [], + "subscriptions": [] + } + ], + "name": "Test", + "email": "Test@test.com", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z" + } + ], + "nextPageToken": "", + "totalSize": 7 +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/members", WrapMockAuthConfig(ProjectListUsers, cfgKafka, &brk, str, &mgr, nil, "project_admin")) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *ProjectsHandlersTestSuite) TestProjectUserCreate() { + + type td struct { + user string + postBody string + expectedResponse string + expectedStatusCode int + msg string + } + + testData := []td{ + { + user: "member-user", + postBody: `{ + "email": "test@example.com", + "service_roles": ["service_admin"], + "projects": [ + { + "project": "ARGO", + "roles": ["project_admin", "publisher", "consumer"] + }, + { + "project": "unknown" + } + ] + }`, + expectedResponse: `{ + "uuid": "{{UUID}}", + "projects": [ + { + "project": "ARGO", + "roles": [ + "project_admin", + "publisher", + "consumer" + ], + "topics": [], + "subscriptions": [] + } + ], + "name": "member-user", + "token": "{{TOKEN}}", + "email": "test@example.com", + "service_roles": [], + "created_on": "{{CON}}", + "modified_on": "{{MON}}", + "created_by": "UserA" +}`, + expectedStatusCode: 200, + msg: "Create a member of a project(ignore other projects & service roles)", + }, + { + user: "member-user-2", + postBody: `{ + "email": "test@example.com", + "service_roles": ["service_admin"], + "projects": [] + }`, + expectedResponse: `{ + "uuid": "{{UUID}}", + "projects": [ + { + "project": "ARGO", + "roles": [], + "topics": [], + "subscriptions": [] + } + ], + "name": "member-user-2", + "token": "{{TOKEN}}", + "email": "test@example.com", + "service_roles": [], + "created_on": "{{CON}}", + "modified_on": "{{MON}}", + "created_by": "UserA" +}`, + expectedStatusCode: 200, + msg: "Create a member/user that automatically gets assigned to the respective project", + }, + { + user: "member-user-unknown", + postBody: `{ + "email": "test@example.com", + "service_roles": ["service_admin"], + "projects": [ + { + "project": "ARGO", + "roles": ["unknown"] + }, + { + "project": "unknown" + } + ] + }`, + expectedResponse: `{ + "error": { + "code": 400, + "message": "invalid role: unknown", + "status": "INVALID_ARGUMENT" + } +}`, + expectedStatusCode: 400, + msg: "Invalid user role", + }, + { + user: "member-user", + postBody: `{ + "email": "test@example.com", + "service_roles": ["service_admin"], + "projects": [ + { + "project": "ARGO", + "roles": ["unknown"] + }, + { + "project": "unknown" + } + ] + }`, + expectedResponse: `{ + "error": { + "code": 409, + "message": "User already exists", + "status": "ALREADY_EXISTS" + } +}`, + expectedStatusCode: 409, + msg: "user already exists", + }, + } + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + cfgKafka.PushEnabled = true + cfgKafka.PushWorkerToken = "push_token" + cfgKafka.ResAuth = false + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + + for _, t := range testData { + + w := httptest.NewRecorder() + url := fmt.Sprintf("http://localhost:8080/v1/projects/ARGO/members/%v", t.user) + req, err := http.NewRequest("POST", url, strings.NewReader(t.postBody)) + if err != nil { + log.Fatal(err) + } + router.HandleFunc("/v1/projects/{project}/members/{user}", WrapMockAuthConfig(ProjectUserCreate, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + if t.expectedStatusCode == 200 { + u, _ := auth.FindUsers("argo_uuid", "", t.user, true, str) + t.expectedResponse = strings.Replace(t.expectedResponse, "{{UUID}}", u.List[0].UUID, 1) + t.expectedResponse = strings.Replace(t.expectedResponse, "{{TOKEN}}", u.List[0].Token, 1) + t.expectedResponse = strings.Replace(t.expectedResponse, "{{CON}}", u.List[0].CreatedOn, 1) + t.expectedResponse = strings.Replace(t.expectedResponse, "{{MON}}", u.List[0].ModifiedOn, 1) + } + suite.Equal(t.expectedStatusCode, w.Code, t.msg) + suite.Equal(t.expectedResponse, w.Body.String(), t.msg) + } + +} + +func (suite *ProjectsHandlersTestSuite) TestProjectUserUpdate() { + + type td struct { + user string + postBody string + authRole string + expectedResponse string + expectedStatusCode int + msg string + } + + testData := []td{ + { + user: "UserA", + postBody: `{ + "email": "test@example.com", + "name": "new-name", + "service_roles": ["service_admin"], + "projects": [ + { + "project": "ARGO", + "roles": ["project_admin", "publisher"] + }, + { + "project": "unknown" + } + ] + }`, + authRole: "project_admin", + expectedResponse: `{ + "uuid": "{{UUID}}", + "projects": [ + { + "project": "ARGO", + "roles": [ + "project_admin", + "publisher" + ], + "topics": [ + "topic1", + "topic2" + ], + "subscriptions": [ + "sub1", + "sub2", + "sub3" + ] + } + ], + "name": "UserA", + "first_name": "FirstA", + "last_name": "LastA", + "organization": "OrgA", + "description": "DescA", + "email": "foo-email", + "service_roles": [], + "created_on": "{{CON}}", + "modified_on": "{{MON}}" +}`, + expectedStatusCode: 200, + msg: "Update a member of a project(ignore other projects & service roles & email & name)(project_admin)", + }, + { + user: "UserA", + postBody: `{ + "email": "test@example.com", + "name": "new-name", + "service_roles": ["service_admin"], + "projects": [ + { + "project": "ARGO", + "roles": ["project_admin", "publisher"] + }, + { + "project": "unknown" + } + ] + }`, + authRole: "service_admin", + expectedResponse: `{ + "uuid": "{{UUID}}", + "projects": [ + { + "project": "ARGO", + "roles": [ + "project_admin", + "publisher" + ], + "topics": [ + "topic1", + "topic2" + ], + "subscriptions": [ + "sub1", + "sub2", + "sub3" + ] + } + ], + "name": "UserA", + "first_name": "FirstA", + "last_name": "LastA", + "organization": "OrgA", + "description": "DescA", + "token": "{{TOKEN}}", + "email": "foo-email", + "service_roles": [], + "created_on": "{{CON}}", + "modified_on": "{{MON}}" +}`, + expectedStatusCode: 200, + msg: "Update a member of a project(ignore other projects & service roles & email & name)(service_admin)", + }, + { + user: "UserA", + postBody: `{ + "email": "test@example.com", + "service_roles": ["service_admin"], + "projects": [ + { + "project": "ARGO", + "roles": ["unknown"] + } + ] + }`, + authRole: "project_admin", + expectedResponse: `{ + "error": { + "code": 400, + "message": "invalid role: unknown", + "status": "INVALID_ARGUMENT" + } +}`, + expectedStatusCode: 400, + msg: "Invalid user role", + }, + { + user: "UserA", + postBody: `{ + "email": "test@example.com", + "service_roles": ["service_admin"], + "projects": [ + { + "project": "ARGO2", + "roles": ["publisher"] + } + ] + }`, + authRole: "project_admin", + expectedResponse: `{ + "error": { + "code": 403, + "message": "Access to this resource is forbidden. User is not a member of the project", + "status": "FORBIDDEN" + } +}`, + expectedStatusCode: 403, + msg: "user is not a member of the project", + }, + { + user: "unknown", + postBody: `{ + "email": "test@example.com", + "service_roles": ["service_admin"], + "projects": [ + { + "project": "ARGO", + "roles": ["publisher"] + } + ] + }`, + authRole: "project_admin", + expectedResponse: `{ + "error": { + "code": 404, + "message": "User doesn't exist", + "status": "NOT_FOUND" + } +}`, + expectedStatusCode: 404, + msg: "user doesn't exist" + + "", + }, + } + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + cfgKafka.PushEnabled = true + cfgKafka.PushWorkerToken = "push_token" + cfgKafka.ResAuth = true + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + mgr := oldPush.Manager{} + pc := new(push.MockClient) + + for _, t := range testData { + + w := httptest.NewRecorder() + url := fmt.Sprintf("http://localhost:8080/v1/projects/ARGO/members/%v", t.user) + req, err := http.NewRequest("PUT", url, strings.NewReader(t.postBody)) + if err != nil { + log.Fatal(err) + } + router := mux.NewRouter().StrictSlash(true) + router.HandleFunc("/v1/projects/{project}/members/{user}", WrapMockAuthConfig(ProjectUserUpdate, cfgKafka, &brk, str, &mgr, pc, t.authRole)) + router.ServeHTTP(w, req) + if t.expectedStatusCode == 200 { + u, _ := auth.FindUsers("argo_uuid", "", t.user, true, str) + t.expectedResponse = strings.Replace(t.expectedResponse, "{{UUID}}", u.List[0].UUID, 1) + t.expectedResponse = strings.Replace(t.expectedResponse, "{{TOKEN}}", u.List[0].Token, 1) + t.expectedResponse = strings.Replace(t.expectedResponse, "{{CON}}", u.List[0].CreatedOn, 1) + t.expectedResponse = strings.Replace(t.expectedResponse, "{{MON}}", u.List[0].ModifiedOn, 1) + } + suite.Equal(t.expectedStatusCode, w.Code, t.msg) + suite.Equal(t.expectedResponse, w.Body.String(), t.msg) + } + +} + +func (suite *ProjectsHandlersTestSuite) TestProjectUserRemove() { + + type td struct { + user string + expectedResponse string + expectedStatusCode int + msg string + } + + testData := []td{ + { + user: "UserA", + expectedResponse: `{}`, + expectedStatusCode: 200, + msg: "Remove a member from the project", + }, + { + user: "UserA", + expectedResponse: `{ + "error": { + "code": 403, + "message": "Access to this resource is forbidden. User is not a member of the project", + "status": "FORBIDDEN" + } +}`, + expectedStatusCode: 403, + msg: "user is not a member of the project", + }, + { + user: "unknown", + expectedResponse: `{ + "error": { + "code": 404, + "message": "User doesn't exist", + "status": "NOT_FOUND" + } +}`, + expectedStatusCode: 404, + msg: "user doesn't exist" + + "", + }, + } + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + cfgKafka.PushEnabled = true + cfgKafka.PushWorkerToken = "push_token" + cfgKafka.ResAuth = true + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + + for _, t := range testData { + + w := httptest.NewRecorder() + url := fmt.Sprintf("http://localhost:8080/v1/projects/ARGO/members/%v:remove", t.user) + req, err := http.NewRequest("POST", url, nil) + if err != nil { + log.Fatal(err) + } + router.HandleFunc("/v1/projects/{project}/members/{user}:remove", WrapMockAuthConfig(ProjectUserRemove, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + suite.Equal(t.expectedStatusCode, w.Code, t.msg) + suite.Equal(t.expectedResponse, w.Body.String(), t.msg) + } +} + +func (suite *ProjectsHandlersTestSuite) TestProjectUserAdd() { + + type td struct { + user string + project string + authRole string + postBody string + expectedResponse string + expectedStatusCode int + msg string + } + + testData := []td{ + { + user: "UserA", + project: "ARGO2", + postBody: `{ + "roles": ["unknown"] + }`, + expectedResponse: `{ + "error": { + "code": 400, + "message": "invalid role: unknown", + "status": "INVALID_ARGUMENT" + } +}`, + expectedStatusCode: 400, + msg: "Invalid user role", + }, + { + user: "UserA", + project: "ARGO2", + postBody: `{ + "roles": ["project_admin", "publisher", "consumer"] + }`, + authRole: "project_admin", + expectedResponse: `{ + "uuid": "{{UUID}}", + "projects": [ + { + "project": "ARGO2", + "roles": [ + "project_admin", + "publisher", + "consumer" + ], + "topics": [], + "subscriptions": [] + } + ], + "name": "UserA", + "first_name": "FirstA", + "last_name": "LastA", + "organization": "OrgA", + "description": "DescA", + "email": "foo-email", + "service_roles": [], + "created_on": "{{CON}}", + "modified_on": "{{MON}}" +}`, + expectedStatusCode: 200, + msg: "Add user to project(project_admin)", + }, + { + user: "UserA", + project: "ARGO2", + postBody: `{ + "roles": ["project_admin", "consumer", "publisher"] + }`, + authRole: "service_admin", + expectedResponse: `{ + "uuid": "{{UUID}}", + "projects": [ + { + "project": "ARGO", + "roles": [ + "consumer", + "publisher" + ], + "topics": [ + "topic1", + "topic2" + ], + "subscriptions": [ + "sub1", + "sub2", + "sub3" + ] + }, + { + "project": "ARGO2", + "roles": [ + "project_admin", + "consumer", + "publisher" + ], + "topics": [], + "subscriptions": [] + } + ], + "name": "UserA", + "first_name": "FirstA", + "last_name": "LastA", + "organization": "OrgA", + "description": "DescA", + "token": "{{TOKEN}}", + "email": "foo-email", + "service_roles": [], + "created_on": "{{CON}}", + "modified_on": "{{MON}}" +}`, + expectedStatusCode: 200, + msg: "Add user to project(service_admin)", + }, + { + user: "UserA", + project: "ARGO", + postBody: `{ + "roles": ["project_admin"] + }`, + expectedResponse: `{ + "error": { + "code": 409, + "message": "User is already a member of the project", + "status": "CONFLICT" + } +}`, + expectedStatusCode: 409, + msg: "user already member of the project", + }, + } + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + cfgKafka.PushEnabled = true + cfgKafka.PushWorkerToken = "push_token" + cfgKafka.ResAuth = false + brk := brokers.MockBroker{} + mgr := oldPush.Manager{} + pc := new(push.MockClient) + + for _, t := range testData { + str := stores.NewMockStore("whatever", "argo_mgs") + w := httptest.NewRecorder() + url := fmt.Sprintf("http://localhost:8080/v1/projects/%v/members/%v:add", t.project, t.user) + req, err := http.NewRequest("POST", url, strings.NewReader(t.postBody)) + if err != nil { + log.Fatal(err) + } + router := mux.NewRouter().StrictSlash(true) + router.HandleFunc("/v1/projects/{project}/members/{user}:add", WrapMockAuthConfig(ProjectUserAdd, cfgKafka, &brk, str, &mgr, pc, t.authRole)) + router.ServeHTTP(w, req) + if t.expectedStatusCode == 200 { + u, _ := auth.FindUsers("argo_uuid", "", t.user, true, str) + t.expectedResponse = strings.Replace(t.expectedResponse, "{{UUID}}", u.List[0].UUID, 1) + t.expectedResponse = strings.Replace(t.expectedResponse, "{{TOKEN}}", u.List[0].Token, 1) + t.expectedResponse = strings.Replace(t.expectedResponse, "{{CON}}", u.List[0].CreatedOn, 1) + t.expectedResponse = strings.Replace(t.expectedResponse, "{{MON}}", u.List[0].ModifiedOn, 1) + } + suite.Equal(t.expectedStatusCode, w.Code, t.msg) + suite.Equal(t.expectedResponse, w.Body.String(), t.msg) + } + +} + +func (suite *ProjectsHandlersTestSuite) TestProjectDelete() { + + req, err := http.NewRequest("DELETE", "http://localhost:8080/v1/projects/ARGO", nil) + + if err != nil { + log.Fatal(err) + } + + expResp := "" + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}", WrapMockAuthConfig(ProjectDelete, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) +} + +func (suite *ProjectsHandlersTestSuite) TestProjectUpdate() { + + postJSON := `{ + "name":"NEWARGO", + "description":"time to change the description mates and the name" +}` + + req, err := http.NewRequest("PUT", "http://localhost:8080/v1/projects/ARGO", bytes.NewBuffer([]byte(postJSON))) + if err != nil { + log.Fatal(err) + } + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + w := httptest.NewRecorder() + router.HandleFunc("/v1/projects/{project}", WrapMockAuthConfig(ProjectUpdate, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + projOut, _ := projects.GetFromJSON([]byte(w.Body.String())) + suite.Equal("NEWARGO", projOut.Name) + // Check if the mock authenticated userA has been marked as the creator + suite.Equal("UserA", projOut.CreatedBy) + suite.Equal("time to change the description mates and the name", projOut.Description) +} + +func (suite *ProjectsHandlersTestSuite) TestProjectCreate() { + + postJSON := `{ + "description":"This is a newly created project" +}` + + req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGONEW", bytes.NewBuffer([]byte(postJSON))) + if err != nil { + log.Fatal(err) + } + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + w := httptest.NewRecorder() + router.HandleFunc("/v1/projects/{project}", WrapMockAuthConfig(ProjectCreate, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + projOut, _ := projects.GetFromJSON([]byte(w.Body.String())) + suite.Equal("ARGONEW", projOut.Name) + // Check if the mock authenticated userA has been marked as the creator + suite.Equal("UserA", projOut.CreatedBy) + suite.Equal("This is a newly created project", projOut.Description) +} + +func (suite *ProjectsHandlersTestSuite) TestProjectListAll() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "projects": [ + { + "name": "ARGO", + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z", + "created_by": "UserA", + "description": "simple project" + }, + { + "name": "ARGO2", + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z", + "created_by": "UserA", + "description": "simple project" + } + ] +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + + router.HandleFunc("/v1/projects", WrapMockAuthConfig(ProjectListAll, cfgKafka, &brk, str, &mgr, nil)) + + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *ProjectsHandlersTestSuite) TestProjectListOneNotFound() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGONAUFTS", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "error": { + "code": 404, + "message": "ProjectUUID doesn't exist", + "status": "NOT_FOUND" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}", WrapMockAuthConfig(ProjectListOne, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(404, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *ProjectsHandlersTestSuite) TestProjectListOne() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "name": "ARGO", + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z", + "created_by": "UserA", + "description": "simple project" +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}", WrapMockAuthConfig(ProjectListOne, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func TestProjectsHandlersTestSuite(t *testing.T) { + log.SetOutput(ioutil.Discard) + suite.Run(t, new(ProjectsHandlersTestSuite)) +} diff --git a/handlers/registrations.go b/handlers/registrations.go new file mode 100644 index 00000000..4f41f902 --- /dev/null +++ b/handlers/registrations.go @@ -0,0 +1,260 @@ +package handlers + +import ( + "encoding/json" + "fmt" + "github.com/ARGOeu/argo-messaging/auth" + "github.com/ARGOeu/argo-messaging/stores" + gorillaContext "github.com/gorilla/context" + "github.com/gorilla/mux" + log "github.com/sirupsen/logrus" + "github.com/twinj/uuid" + "io/ioutil" + "net/http" + "time" +) + +// RegisterUser(POST) registers a new user +func RegisterUser(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + + // Read POST JSON body + body, err := ioutil.ReadAll(r.Body) + if err != nil { + err := APIErrorInvalidRequestBody() + respondErr(w, err) + return + } + + // Parse pull options + requestBody := auth.UserRegistration{} + err = json.Unmarshal(body, &requestBody) + if err != nil { + err := APIErrorInvalidArgument("User") + respondErr(w, err) + return + } + + // check if a user with that name already exists + if auth.ExistsWithName(requestBody.Name, refStr) { + err := APIErrorConflict("User") + respondErr(w, err) + return + } + + uuid := uuid.NewV4().String() + registered := time.Now().UTC().Format("2006-01-02T15:04:05Z") + tkn, err := auth.GenToken() + if err != nil { + err := APIErrGenericInternal("") + respondErr(w, err) + return + } + + ur, err := auth.RegisterUser(uuid, requestBody.Name, requestBody.FirstName, requestBody.LastName, requestBody.Email, + requestBody.Organization, requestBody.Description, registered, tkn, auth.PendingRegistrationStatus, refStr) + + if err != nil { + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + output, err = json.MarshalIndent(ur, "", " ") + if err != nil { + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + respondOK(w, output) +} + +// AcceptUserRegister (POST) accepts a user registration and creates the respective user +func AcceptRegisterUser(w http.ResponseWriter, r *http.Request) { + + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + regUUID := urlVars["uuid"] + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + refUserUUID := gorillaContext.Get(r, "auth_user_uuid").(string) + + ru, err := auth.FindUserRegistration(regUUID, auth.PendingRegistrationStatus, refStr) + if err != nil { + + if err.Error() == "not found" { + err := APIErrorNotFound("User registration") + respondErr(w, err) + return + } + + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + userUUID := uuid.NewV4().String() // generate a new userUUID to attach to the new project + token, err := auth.GenToken() // generate a new user token + created := time.Now().UTC() + // Get Result Object + res, err := auth.CreateUser(userUUID, ru.Name, ru.FirstName, ru.LastName, ru.Organization, ru.Description, + []auth.ProjectRoles{}, token, ru.Email, []string{}, created, refUserUUID, refStr) + + if err != nil { + if err.Error() == "exists" { + err := APIErrorConflict("User") + respondErr(w, err) + return + } + + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + // update the registration + err = auth.UpdateUserRegistration(regUUID, auth.AcceptedRegistrationStatus, refUserUUID, created, refStr) + if err != nil { + log.Errorf("Could not update registration, %v", err.Error()) + } + + // Output result to JSON + resJSON, err := res.ExportJSON() + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + // Write response + respondOK(w, []byte(resJSON)) +} + +func DeclineRegisterUser(w http.ResponseWriter, r *http.Request) { + + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + regUUID := urlVars["uuid"] + refUserUUID := gorillaContext.Get(r, "auth_user_uuid").(string) + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + + _, err := auth.FindUserRegistration(regUUID, auth.PendingRegistrationStatus, refStr) + if err != nil { + + if err.Error() == "not found" { + err := APIErrorNotFound("User registration") + respondErr(w, err) + return + } + + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + err = auth.UpdateUserRegistration(regUUID, auth.DeclinedRegistrationStatus, refUserUUID, time.Now().UTC(), refStr) + if err != nil { + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + respondOK(w, []byte("{}")) + +} + +// ListOneRegistration(GET) retrieves information for a specific registration based on the provided activation token +func ListOneRegistration(w http.ResponseWriter, r *http.Request) { + + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + regUUID := urlVars["uuid"] + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + + ur, err := auth.FindUserRegistration(regUUID, "", refStr) + if err != nil { + + if err.Error() == "not found" { + err := APIErrorNotFound("User registration") + respondErr(w, err) + return + } + + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + urb, err := json.MarshalIndent(ur, "", " ") + if err != nil { + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + respondOK(w, urb) +} + +// ListAllRegistrations(GET) retrieves information about all the registrations in the service +func ListAllRegistrations(w http.ResponseWriter, r *http.Request) { + + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + + name := r.URL.Query().Get("name") + status := r.URL.Query().Get("status") + email := r.URL.Query().Get("email") + org := r.URL.Query().Get("organization") + activationToken := r.URL.Query().Get("activation_token") + + ur, err := auth.FindUserRegistrations(status, activationToken, name, email, org, refStr) + if err != nil { + + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + urb, err := json.MarshalIndent(ur, "", " ") + if err != nil { + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + respondOK(w, urb) +} diff --git a/handlers/registrations_test.go b/handlers/registrations_test.go new file mode 100644 index 00000000..75984c68 --- /dev/null +++ b/handlers/registrations_test.go @@ -0,0 +1,422 @@ +package handlers + +import ( + "fmt" + "github.com/ARGOeu/argo-messaging/auth" + "github.com/ARGOeu/argo-messaging/brokers" + "github.com/ARGOeu/argo-messaging/config" + oldPush "github.com/ARGOeu/argo-messaging/push" + push "github.com/ARGOeu/argo-messaging/push/grpc/client" + "github.com/ARGOeu/argo-messaging/stores" + "github.com/gorilla/mux" + log "github.com/sirupsen/logrus" + "github.com/stretchr/testify/suite" + "io/ioutil" + "net/http" + "net/http/httptest" + "strings" + "testing" +) + +type RegistrationsHandlersTestSuite struct { + suite.Suite + cfgStr string +} + +func (suite *RegistrationsHandlersTestSuite) SetupTest() { + suite.cfgStr = `{ + "bind_ip":"", + "port":8080, + "zookeeper_hosts":["localhost"], + "kafka_znode":"", + "store_host":"localhost", + "store_db":"argo_msg", + "certificate":"/etc/pki/tls/certs/localhost.crt", + "certificate_key":"/etc/pki/tls/private/localhost.key", + "per_resource_auth":"true", + "push_enabled": "true", + "push_worker_token": "push_token" + }` +} + +func (suite *RegistrationsHandlersTestSuite) TestRegisterUser() { + + type td struct { + postBody string + expectedResponse string + expectedStatusCode int + msg string + } + + testData := []td{ + { + postBody: `{ + "name": "new-register-user", + "first_name": "first-name", + "last_name": "last-name", + "email": "test@example.com", + "organization": "org1", + "description": "desc1" + }`, + expectedResponse: `{ + "uuid": "{{UUID}}", + "name": "new-register-user", + "first_name": "first-name", + "last_name": "last-name", + "organization": "org1", + "description": "desc1", + "email": "test@example.com", + "status": "pending", + "activation_token": "{{ATKN}}", + "registered_at": "{{REAT}}" +}`, + expectedStatusCode: 200, + msg: "User registration successful", + }, + { + postBody: `{ + "name": "UserA", + "first_name": "new-name", + "last_name": "last-name", + "email": "test@example.com", + "organization": "org1", + "description": "desc1" + }`, + expectedResponse: `{ + "error": { + "code": 409, + "message": "User already exists", + "status": "ALREADY_EXISTS" + } +}`, + expectedStatusCode: 409, + msg: "user already exists", + }, + } + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + cfgKafka.PushEnabled = true + cfgKafka.PushWorkerToken = "push_token" + cfgKafka.ResAuth = false + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + + for _, t := range testData { + + w := httptest.NewRecorder() + req, err := http.NewRequest("POST", "http://localhost:8080/v1/registrations", strings.NewReader(t.postBody)) + if err != nil { + log.Fatal(err) + } + router.HandleFunc("/v1/registrations", WrapMockAuthConfig(RegisterUser, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + if t.expectedStatusCode == 200 { + t.expectedResponse = strings.Replace(t.expectedResponse, "{{UUID}}", str.UserRegistrations[1].UUID, 1) + t.expectedResponse = strings.Replace(t.expectedResponse, "{{REAT}}", str.UserRegistrations[1].RegisteredAt, 1) + t.expectedResponse = strings.Replace(t.expectedResponse, "{{ATKN}}", str.UserRegistrations[1].ActivationToken, 1) + } + suite.Equal(t.expectedStatusCode, w.Code, t.msg) + suite.Equal(t.expectedResponse, w.Body.String(), t.msg) + } + +} + +func (suite *RegistrationsHandlersTestSuite) TestAcceptRegisterUser() { + + type td struct { + ruuid string + uname string + expectedResponse string + expectedStatusCode int + msg string + } + + testData := []td{{ + ruuid: "ur-uuid1", + uname: "urname", + expectedResponse: `{ + "uuid": "{{UUID}}", + "name": "urname", + "first_name": "urfname", + "last_name": "urlname", + "organization": "urorg", + "description": "urdesc", + "token": "{{TOKEN}}", + "email": "uremail", + "service_roles": [], + "created_on": "{{CON}}", + "modified_on": "{{MON}}", + "created_by": "UserA" +}`, + expectedStatusCode: 200, + msg: "Successfully accepted a user's registration", + }, + { + ruuid: "ur-uuid1", + uname: "urname", + expectedResponse: `{ + "error": { + "code": 404, + "message": "User registration doesn't exist", + "status": "NOT_FOUND" + } +}`, + expectedStatusCode: 404, + msg: "User registration doesn't exist", + }} + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + cfgKafka.PushEnabled = true + cfgKafka.PushWorkerToken = "push_token" + cfgKafka.ResAuth = false + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + + for _, t := range testData { + + w := httptest.NewRecorder() + url := fmt.Sprintf("http://localhost:8080/v1/registrations/%v:accept", t.ruuid) + req, err := http.NewRequest("POST", url, nil) + if err != nil { + log.Fatal(err) + } + router.HandleFunc("/v1/registrations/{uuid}:accept", WrapMockAuthConfig(AcceptRegisterUser, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + if t.expectedStatusCode == 200 { + u, _ := auth.FindUsers("", "", t.uname, true, str) + t.expectedResponse = strings.Replace(t.expectedResponse, "{{UUID}}", u.List[0].UUID, 1) + t.expectedResponse = strings.Replace(t.expectedResponse, "{{TOKEN}}", u.List[0].Token, 1) + t.expectedResponse = strings.Replace(t.expectedResponse, "{{CON}}", u.List[0].CreatedOn, 1) + t.expectedResponse = strings.Replace(t.expectedResponse, "{{MON}}", u.List[0].ModifiedOn, 1) + } + suite.Equal(t.expectedStatusCode, w.Code, t.msg) + suite.Equal(t.expectedResponse, w.Body.String(), t.msg) + } + +} + +func (suite *RegistrationsHandlersTestSuite) TestDeclineRegisterUser() { + + type td struct { + regUUID string + expectedResponse string + expectedStatusCode int + msg string + } + + testData := []td{{ + regUUID: "ur-uuid1", + expectedResponse: `{}`, + expectedStatusCode: 200, + msg: "Successfully declined a user's registration", + }, + { + regUUID: "unknown", + expectedResponse: `{ + "error": { + "code": 404, + "message": "User registration doesn't exist", + "status": "NOT_FOUND" + } +}`, + expectedStatusCode: 404, + msg: "User registration doesn't exist", + }} + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + cfgKafka.PushEnabled = true + cfgKafka.PushWorkerToken = "push_token" + cfgKafka.ResAuth = false + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + + for _, t := range testData { + + w := httptest.NewRecorder() + url := fmt.Sprintf("http://localhost:8080/v1/registrations/%v:decline", t.regUUID) + req, err := http.NewRequest("POST", url, nil) + if err != nil { + log.Fatal(err) + } + router.HandleFunc("/v1/registrations/{uuid}:decline", WrapMockAuthConfig(DeclineRegisterUser, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + if t.expectedStatusCode == 200 { + suite.Equal(auth.DeclinedRegistrationStatus, str.UserRegistrations[0].Status) + } + suite.Equal(t.expectedStatusCode, w.Code, t.msg) + suite.Equal(t.expectedResponse, w.Body.String(), t.msg) + } + +} + +func (suite *RegistrationsHandlersTestSuite) TestListOneRegistration() { + + type td struct { + regUUID string + expectedResponse string + expectedStatusCode int + msg string + } + + testData := []td{ + { + regUUID: "ur-uuid1", + expectedResponse: `{ + "uuid": "ur-uuid1", + "name": "urname", + "first_name": "urfname", + "last_name": "urlname", + "organization": "urorg", + "description": "urdesc", + "email": "uremail", + "status": "pending", + "activation_token": "uratkn-1", + "registered_at": "2019-05-12T22:26:58Z", + "modified_by": "UserA", + "modified_at": "2020-05-15T22:26:58Z" +}`, + expectedStatusCode: 200, + msg: "User registration retrieved successfully", + }, + { + regUUID: "unknown", + expectedResponse: `{ + "error": { + "code": 404, + "message": "User registration doesn't exist", + "status": "NOT_FOUND" + } +}`, + expectedStatusCode: 404, + msg: "User registration doesn't exist", + }, + } + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + cfgKafka.PushEnabled = true + cfgKafka.PushWorkerToken = "push_token" + cfgKafka.ResAuth = false + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + + for _, t := range testData { + + w := httptest.NewRecorder() + url := fmt.Sprintf("http://localhost:8080/v1/registrations/%v", t.regUUID) + req, err := http.NewRequest("GET", url, nil) + if err != nil { + log.Fatal(err) + } + router.HandleFunc("/v1/registrations/{uuid}", WrapMockAuthConfig(ListOneRegistration, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + suite.Equal(t.expectedStatusCode, w.Code, t.msg) + suite.Equal(t.expectedResponse, w.Body.String(), t.msg) + } + +} + +func (suite *RegistrationsHandlersTestSuite) TestListManyRegistrations() { + + type td struct { + urlPath string + expectedResponse string + expectedStatusCode int + msg string + } + + testData := []td{ + { + urlPath: "registrations", + expectedResponse: `{ + "user_registrations": [ + { + "uuid": "ur-uuid1", + "name": "urname", + "first_name": "urfname", + "last_name": "urlname", + "organization": "urorg", + "description": "urdesc", + "email": "uremail", + "status": "pending", + "activation_token": "uratkn-1", + "registered_at": "2019-05-12T22:26:58Z", + "modified_by": "UserA", + "modified_at": "2020-05-15T22:26:58Z" + } + ] +}`, + expectedStatusCode: 200, + msg: "Retrieve all available user registrations without any filters", + }, + { + urlPath: "registrations?status=pending&name=urname&activation_token=uratkn-1&email=uremail&organization=urorg", + expectedResponse: `{ + "user_registrations": [ + { + "uuid": "ur-uuid1", + "name": "urname", + "first_name": "urfname", + "last_name": "urlname", + "organization": "urorg", + "description": "urdesc", + "email": "uremail", + "status": "pending", + "activation_token": "uratkn-1", + "registered_at": "2019-05-12T22:26:58Z", + "modified_by": "UserA", + "modified_at": "2020-05-15T22:26:58Z" + } + ] +}`, + expectedStatusCode: 200, + msg: "Retrieve all available user registrations with filters", + }, + } + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + cfgKafka.PushEnabled = true + cfgKafka.PushWorkerToken = "push_token" + cfgKafka.ResAuth = false + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + + for _, t := range testData { + + w := httptest.NewRecorder() + url := fmt.Sprintf("http://localhost:8080/v1/%v", t.urlPath) + req, err := http.NewRequest("GET", url, nil) + if err != nil { + log.Fatal(err) + } + router.HandleFunc("/v1/registrations", WrapMockAuthConfig(ListAllRegistrations, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + suite.Equal(t.expectedStatusCode, w.Code, t.msg) + suite.Equal(t.expectedResponse, w.Body.String(), t.msg) + } + +} + +func TestRegistrationsHandlersTestSuite(t *testing.T) { + log.SetOutput(ioutil.Discard) + suite.Run(t, new(RegistrationsHandlersTestSuite)) +} diff --git a/handlers/schemas.go b/handlers/schemas.go new file mode 100644 index 00000000..72ce5c4f --- /dev/null +++ b/handlers/schemas.go @@ -0,0 +1,334 @@ +package handlers + +import ( + "bytes" + "encoding/base64" + "encoding/json" + "fmt" + "github.com/ARGOeu/argo-messaging/messages" + "github.com/ARGOeu/argo-messaging/schemas" + "github.com/ARGOeu/argo-messaging/stores" + gorillaContext "github.com/gorilla/context" + "github.com/gorilla/mux" + "github.com/twinj/uuid" + "net/http" +) + +// SchemaCreate(POST) handles the creation of a new schema +func SchemaCreate(w http.ResponseWriter, r *http.Request) { + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Get url path variables + urlVars := mux.Vars(r) + schemaName := urlVars["schema"] + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + + // Get project UUID First to use as reference + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + + schemaUUID := uuid.NewV4().String() + + schema := schemas.Schema{} + + err := json.NewDecoder(r.Body).Decode(&schema) + if err != nil { + err := APIErrorInvalidArgument("Schema") + respondErr(w, err) + return + } + + schema, err = schemas.Create(projectUUID, schemaUUID, schemaName, schema.Type, schema.RawSchema, refStr) + if err != nil { + if err.Error() == "exists" { + err := APIErrorConflict("Schema") + respondErr(w, err) + return + + } + + if err.Error() == "unsupported" { + err := APIErrorInvalidData(schemas.UnsupportedSchemaError) + respondErr(w, err) + return + + } + + err := APIErrorInvalidData(err.Error()) + respondErr(w, err) + return + } + + output, _ := json.MarshalIndent(schema, "", " ") + respondOK(w, output) +} + +// SchemaListOne(GET) retrieves information about the requested schema +func SchemaListOne(w http.ResponseWriter, r *http.Request) { + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Get url path variables + urlVars := mux.Vars(r) + schemaName := urlVars["schema"] + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + + // Get project UUID First to use as reference + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + schemasList, err := schemas.Find(projectUUID, "", schemaName, refStr) + if err != nil { + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + if schemasList.Empty() { + err := APIErrorNotFound("Schema") + respondErr(w, err) + return + } + + output, _ := json.MarshalIndent(schemasList.Schemas[0], "", " ") + respondOK(w, output) +} + +// SchemaLisAll(GET) retrieves all the schemas under the given project +func SchemaListAll(w http.ResponseWriter, r *http.Request) { + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + + // Get project UUID First to use as reference + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + schemasList, err := schemas.Find(projectUUID, "", "", refStr) + if err != nil { + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + output, _ := json.MarshalIndent(schemasList, "", " ") + respondOK(w, output) +} + +// SchemaUpdate(PUT) updates the given schema +func SchemaUpdate(w http.ResponseWriter, r *http.Request) { + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Get url path variables + urlVars := mux.Vars(r) + schemaName := urlVars["schema"] + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + + // Get project UUID First to use as reference + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + schemasList, err := schemas.Find(projectUUID, "", schemaName, refStr) + if err != nil { + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + if schemasList.Empty() { + err := APIErrorNotFound("Schema") + respondErr(w, err) + return + } + + updatedSchema := schemas.Schema{} + + err = json.NewDecoder(r.Body).Decode(&updatedSchema) + if err != nil { + err := APIErrorInvalidArgument("Schema") + respondErr(w, err) + return + } + + if updatedSchema.FullName != "" { + _, schemaName, err := schemas.ExtractSchema(updatedSchema.FullName) + if err != nil { + err := APIErrorInvalidData(err.Error()) + respondErr(w, err) + return + } + updatedSchema.Name = schemaName + } + + schema, err := schemas.Update(schemasList.Schemas[0], updatedSchema.Name, updatedSchema.Type, updatedSchema.RawSchema, refStr) + if err != nil { + if err.Error() == "exists" { + err := APIErrorConflict("Schema") + respondErr(w, err) + return + + } + + if err.Error() == "unsupported" { + err := APIErrorInvalidData(schemas.UnsupportedSchemaError) + respondErr(w, err) + return + + } + + err := APIErrorInvalidData(err.Error()) + respondErr(w, err) + return + } + + output, _ := json.MarshalIndent(schema, "", " ") + respondOK(w, output) +} + +func SchemaDelete(w http.ResponseWriter, r *http.Request) { + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Get url path variables + urlVars := mux.Vars(r) + schemaName := urlVars["schema"] + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + + // Get project UUID First to use as reference + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + + schemasList, err := schemas.Find(projectUUID, "", schemaName, refStr) + if err != nil { + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + if schemasList.Empty() { + err := APIErrorNotFound("Schema") + respondErr(w, err) + return + } + + err = schemas.Delete(schemasList.Schemas[0].UUID, refStr) + if err != nil { + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + respondOK(w, nil) +} + +// SchemaValidateMessage(POST) validates the given message against the schema +func SchemaValidateMessage(w http.ResponseWriter, r *http.Request) { + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Get url path variables + urlVars := mux.Vars(r) + schemaName := urlVars["schema"] + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + + // Get project UUID First to use as reference + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + schemasList, err := schemas.Find(projectUUID, "", schemaName, refStr) + if err != nil { + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + if schemasList.Empty() { + err := APIErrorNotFound("Schema") + respondErr(w, err) + return + } + + buf := bytes.Buffer{} + _, err = buf.ReadFrom(r.Body) + if err != nil { + err := APIErrorInvalidData(err.Error()) + respondErr(w, err) + return + } + + msgList := messages.MsgList{} + + switch schemasList.Schemas[0].Type { + case schemas.JSON: + msg := messages.Message{ + Data: base64.StdEncoding.EncodeToString(buf.Bytes()), + } + + msgList.Msgs = append(msgList.Msgs, msg) + + case schemas.AVRO: + + body := map[string]string{} + err := json.Unmarshal(buf.Bytes(), &body) + if err != nil { + err := APIErrorInvalidRequestBody() + respondErr(w, err) + return + } + + // check to find the payload field + if val, ok := body["data"]; ok { + + msg := messages.Message{ + Data: val, + } + + msgList.Msgs = append(msgList.Msgs, msg) + + } else { + + err := APIErrorInvalidArgument("Schema Payload") + respondErr(w, err) + return + } + } + + err = schemas.ValidateMessages(schemasList.Schemas[0], msgList) + if err != nil { + if err.Error() == "500" { + err := APIErrGenericInternal(schemas.GenericError) + respondErr(w, err) + return + } else { + err := APIErrorInvalidData(err.Error()) + respondErr(w, err) + return + } + } + + res, _ := json.MarshalIndent(map[string]string{"message": "Message validated successfully"}, "", " ") + + respondOK(w, res) +} diff --git a/handlers/schemas_test.go b/handlers/schemas_test.go new file mode 100644 index 00000000..2340dd1b --- /dev/null +++ b/handlers/schemas_test.go @@ -0,0 +1,773 @@ +package handlers + +import ( + "bytes" + "encoding/json" + "fmt" + "github.com/ARGOeu/argo-messaging/brokers" + "github.com/ARGOeu/argo-messaging/config" + oldPush "github.com/ARGOeu/argo-messaging/push" + push "github.com/ARGOeu/argo-messaging/push/grpc/client" + "github.com/ARGOeu/argo-messaging/schemas" + "github.com/ARGOeu/argo-messaging/stores" + "github.com/gorilla/mux" + log "github.com/sirupsen/logrus" + "github.com/stretchr/testify/suite" + "io/ioutil" + "net/http" + "net/http/httptest" + "strings" + "testing" +) + +type SchemasHandlersTestSuite struct { + suite.Suite + cfgStr string +} + +func (suite *SchemasHandlersTestSuite) SetupTest() { + suite.cfgStr = `{ + "bind_ip":"", + "port":8080, + "zookeeper_hosts":["localhost"], + "kafka_znode":"", + "store_host":"localhost", + "store_db":"argo_msg", + "certificate":"/etc/pki/tls/certs/localhost.crt", + "certificate_key":"/etc/pki/tls/private/localhost.key", + "per_resource_auth":"true", + "push_enabled": "true", + "push_worker_token": "push_token" + }` +} + +func (suite *SchemasHandlersTestSuite) TestSchemaCreate() { + + type td struct { + postBody string + expectedResponse string + schemaName string + expectedStatusCode int + msg string + } + + testData := []td{ + { + postBody: `{ + "type": "json", + "schema":{ + "type": "string" + } +}`, + schemaName: "new-schema", + expectedStatusCode: 200, + expectedResponse: `{ + "uuid": "{{UUID}}", + "name": "projects/ARGO/schemas/new-schema", + "type": "json", + "schema": { + "type": "string" + } +}`, + msg: "Case where the schema is valid and successfully created(JSON)", + }, + { + postBody: `{ + "type": "avro", + "schema":{ + "type": "record", + "namespace": "user.avro", + "name":"User", + "fields": [ + {"name": "username", "type": "string"}, + {"name": "phone", "type": "int"} + ] + } +}`, + schemaName: "new-schema-avro", + expectedStatusCode: 200, + expectedResponse: `{ + "uuid": "{{UUID}}", + "name": "projects/ARGO/schemas/new-schema-avro", + "type": "avro", + "schema": { + "fields": [ + { + "name": "username", + "type": "string" + }, + { + "name": "phone", + "type": "int" + } + ], + "name": "User", + "namespace": "user.avro", + "type": "record" + } +}`, + msg: "Case where the schema is valid and successfully created(AVRO)", + }, + { + postBody: `{ + "type": "unknown", + "schema":{ + "type": "string" + } +}`, + schemaName: "new-schema-2", + expectedStatusCode: 400, + expectedResponse: `{ + "error": { + "code": 400, + "message": "Schema type can only be 'json' or 'avro'", + "status": "INVALID_ARGUMENT" + } +}`, + msg: "Case where the schema type is unsupported", + }, + { + postBody: `{ + "type": "json", + "schema":{ + "type": "unknown" + } +}`, + schemaName: "new-schema-2", + expectedStatusCode: 400, + expectedResponse: `{ + "error": { + "code": 400, + "message": "has a primitive type that is NOT VALID -- given: /unknown/ Expected valid values are:[array boolean integer number null object string]", + "status": "INVALID_ARGUMENT" + } +}`, + msg: "Case where the json schema is not valid", + }, + { + postBody: `{ + "type": "avro", + "schema":{ + "type": "unknown" + } +}`, + schemaName: "new-schema-2", + expectedStatusCode: 400, + expectedResponse: `{ + "error": { + "code": 400, + "message": "unknown type name: \"unknown\"", + "status": "INVALID_ARGUMENT" + } +}`, + msg: "Case where the avro schema is not valid", + }, + { + postBody: `{ + "type": "json", + "schema":{ + "type": "string" + } +}`, + schemaName: "schema-1", + expectedStatusCode: 409, + expectedResponse: `{ + "error": { + "code": 409, + "message": "Schema already exists", + "status": "ALREADY_EXISTS" + } +}`, + msg: "Case where the json schema name already exists", + }, + } + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + cfgKafka.PushEnabled = true + cfgKafka.PushWorkerToken = "push_token" + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + + for _, t := range testData { + + w := httptest.NewRecorder() + url := fmt.Sprintf("http://localhost:8080/v1/projects/ARGO/schemas/%v", t.schemaName) + req, err := http.NewRequest("POST", url, strings.NewReader(t.postBody)) + if err != nil { + log.Fatal(err) + } + router.HandleFunc("/v1/projects/{project}/schemas/{schema}", WrapMockAuthConfig(SchemaCreate, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + + if t.expectedStatusCode == 200 { + s := schemas.Schema{} + json.Unmarshal(w.Body.Bytes(), &s) + t.expectedResponse = strings.Replace(t.expectedResponse, "{{UUID}}", s.UUID, 1) + } + + suite.Equal(t.expectedStatusCode, w.Code, t.msg) + suite.Equal(t.expectedResponse, w.Body.String(), t.msg) + } + +} + +func (suite *SchemasHandlersTestSuite) TestSchemaListOne() { + + type td struct { + expectedResponse string + schemaName string + expectedStatusCode int + msg string + } + + testData := []td{ + { + schemaName: "schema-1", + expectedStatusCode: 200, + expectedResponse: `{ + "uuid": "schema_uuid_1", + "name": "projects/ARGO/schemas/schema-1", + "type": "json", + "schema": { + "properties": { + "address": { + "type": "string" + }, + "email": { + "type": "string" + }, + "name": { + "type": "string" + }, + "telephone": { + "type": "string" + } + }, + "required": [ + "name", + "email" + ], + "type": "object" + } +}`, + msg: "Case where a specific schema is retrieved successfully", + }, + { + schemaName: "unknown", + expectedStatusCode: 404, + expectedResponse: `{ + "error": { + "code": 404, + "message": "Schema doesn't exist", + "status": "NOT_FOUND" + } +}`, + msg: "Case where the requested schema doesn't exist", + }, + } + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + cfgKafka.PushEnabled = true + cfgKafka.PushWorkerToken = "push_token" + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + + for _, t := range testData { + + w := httptest.NewRecorder() + url := fmt.Sprintf("http://localhost:8080/v1/projects/ARGO/schemas/%v", t.schemaName) + req, err := http.NewRequest("GET", url, nil) + if err != nil { + log.Fatal(err) + } + router.HandleFunc("/v1/projects/{project}/schemas/{schema}", WrapMockAuthConfig(SchemaListOne, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + + suite.Equal(t.expectedStatusCode, w.Code, t.msg) + suite.Equal(t.expectedResponse, w.Body.String(), t.msg) + } +} + +func (suite *SchemasHandlersTestSuite) TestSchemaListAll() { + + type td struct { + expectedResponse string + projectName string + expectedStatusCode int + msg string + } + + testData := []td{ + { + projectName: "ARGO", + expectedStatusCode: 200, + expectedResponse: `{ + "schemas": [ + { + "uuid": "schema_uuid_1", + "name": "projects/ARGO/schemas/schema-1", + "type": "json", + "schema": { + "properties": { + "address": { + "type": "string" + }, + "email": { + "type": "string" + }, + "name": { + "type": "string" + }, + "telephone": { + "type": "string" + } + }, + "required": [ + "name", + "email" + ], + "type": "object" + } + }, + { + "uuid": "schema_uuid_2", + "name": "projects/ARGO/schemas/schema-2", + "type": "json", + "schema": { + "properties": { + "address": { + "type": "string" + }, + "email": { + "type": "string" + }, + "name": { + "type": "string" + }, + "telephone": { + "type": "string" + } + }, + "required": [ + "name", + "email" + ], + "type": "object" + } + }, + { + "uuid": "schema_uuid_3", + "name": "projects/ARGO/schemas/schema-3", + "type": "avro", + "schema": { + "fields": [ + { + "name": "username", + "type": "string" + }, + { + "name": "phone", + "type": "int" + } + ], + "name": "User", + "namespace": "user.avro", + "type": "record" + } + } + ] +}`, + msg: "Case where the schemas under a project are successfully retrieved", + }, + { + projectName: "ARGO2", + expectedStatusCode: 200, + expectedResponse: `{ + "schemas": [] +}`, + msg: "Case where the given project has no schemas", + }, + } + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + cfgKafka.PushEnabled = true + cfgKafka.PushWorkerToken = "push_token" + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + + for _, t := range testData { + + w := httptest.NewRecorder() + url := fmt.Sprintf("http://localhost:8080/v1/projects/%s/schemas", t.projectName) + req, err := http.NewRequest("GET", url, nil) + if err != nil { + log.Fatal(err) + } + router.HandleFunc("/v1/projects/{project}/schemas", WrapMockAuthConfig(SchemaListAll, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + + suite.Equal(t.expectedStatusCode, w.Code, t.msg) + suite.Equal(t.expectedResponse, w.Body.String(), t.msg) + } +} + +func (suite *SchemasHandlersTestSuite) TestSchemaUpdate() { + + type td struct { + postBody string + expectedResponse string + schemaName string + expectedStatusCode int + msg string + } + + testData := []td{ + { + schemaName: "schema-2", + postBody: `{"name": "projects/ARGO/schemas/schema-1"}`, + expectedStatusCode: 409, + expectedResponse: `{ + "error": { + "code": 409, + "message": "Schema already exists", + "status": "ALREADY_EXISTS" + } +}`, + msg: "Case where the requested schema wants to update the name field to an already existing one", + }, + { + schemaName: "schema-1", + postBody: `{"type":"unsupported"}`, + expectedStatusCode: 400, + expectedResponse: `{ + "error": { + "code": 400, + "message": "Schema type can only be 'json' or 'avro'", + "status": "INVALID_ARGUMENT" + } +}`, + msg: "Case where the requested schema wants to update its type field to an unsupported option", + }, + { + schemaName: "schema-1", + postBody: `{"schema":{"type":"unknown"}}`, + expectedStatusCode: 400, + expectedResponse: `{ + "error": { + "code": 400, + "message": "has a primitive type that is NOT VALID -- given: /unknown/ Expected valid values are:[array boolean integer number null object string]", + "status": "INVALID_ARGUMENT" + } +}`, + msg: "Case where the requested schema wants to update its schema with invalid contents", + }, + { + schemaName: "schema-1", + expectedStatusCode: 200, + expectedResponse: `{ + "uuid": "schema_uuid_1", + "name": "projects/ARGO/schemas/new-name", + "type": "json", + "schema": { + "properties": { + "address": { + "type": "string" + }, + "email": { + "type": "string" + }, + "name": { + "type": "string" + }, + "telephone": { + "type": "string" + } + }, + "required": [ + "name", + "email", + "address" + ], + "type": "object" + } +}`, + postBody: `{ + "name": "projects/ARGO/schemas/new-name", + "type": "json", + "schema": { + "properties": { + "address": { + "type": "string" + }, + "email": { + "type": "string" + }, + "name": { + "type": "string" + }, + "telephone": { + "type": "string" + } + }, + "required": [ + "name", + "email", + "address" + ], + "type": "object" + } +}`, + + msg: "Case where a specific schema has all its fields updated successfully", + }, + { + schemaName: "unknown", + postBody: "", + expectedStatusCode: 404, + expectedResponse: `{ + "error": { + "code": 404, + "message": "Schema doesn't exist", + "status": "NOT_FOUND" + } +}`, + msg: "Case where the requested schema doesn't exist", + }, + } + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + cfgKafka.PushEnabled = true + cfgKafka.PushWorkerToken = "push_token" + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + + for _, t := range testData { + + w := httptest.NewRecorder() + url := fmt.Sprintf("http://localhost:8080/v1/projects/ARGO/schemas/%v", t.schemaName) + req, err := http.NewRequest("PUT", url, strings.NewReader(t.postBody)) + if err != nil { + log.Fatal(err) + } + router.HandleFunc("/v1/projects/{project}/schemas/{schema}", WrapMockAuthConfig(SchemaUpdate, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + + suite.Equal(t.expectedStatusCode, w.Code, t.msg) + suite.Equal(t.expectedResponse, w.Body.String(), t.msg) + } +} + +func (suite *SchemasHandlersTestSuite) TestSchemaDelete() { + + type td struct { + expectedResponse string + schemaName string + expectedStatusCode int + msg string + } + + testData := []td{ + { + expectedResponse: "", + schemaName: "schema-1", + expectedStatusCode: 200, + msg: "Case where the schema is successfully deleted", + }, + { + schemaName: "unknown", + expectedStatusCode: 404, + expectedResponse: `{ + "error": { + "code": 404, + "message": "Schema doesn't exist", + "status": "NOT_FOUND" + } +}`, + msg: "Case where the requested schema doesn't exist", + }, + } + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + cfgKafka.PushEnabled = true + cfgKafka.PushWorkerToken = "push_token" + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + + for _, t := range testData { + + w := httptest.NewRecorder() + url := fmt.Sprintf("http://localhost:8080/v1/projects/ARGO/schemas/%v", t.schemaName) + req, err := http.NewRequest("DELETE", url, nil) + if err != nil { + log.Fatal(err) + } + router.HandleFunc("/v1/projects/{project}/schemas/{schema}", WrapMockAuthConfig(SchemaDelete, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + + suite.Equal(t.expectedStatusCode, w.Code, t.msg) + suite.Equal(t.expectedResponse, w.Body.String(), t.msg) + } +} + +func (suite *SchemasHandlersTestSuite) TestSchemaValidateMessage() { + + type td struct { + expectedResponse string + postBody map[string]interface{} + schemaName string + expectedStatusCode int + msg string + } + + testData := []td{ + { + expectedResponse: `{ + "message": "Message validated successfully" +}`, + postBody: map[string]interface{}{ + "name": "name1", + "email": "email1", + }, + schemaName: "schema-1", + expectedStatusCode: 200, + msg: "Case where the message is successfully validated(JSON)", + }, + { + expectedResponse: `{ + "message": "Message validated successfully" +}`, + postBody: map[string]interface{}{ + "data": "DGFnZWxvc8T8Cg==", + }, + schemaName: "schema-3", + expectedStatusCode: 200, + msg: "Case where the message is successfully validated(AVRO)", + }, + { + postBody: map[string]interface{}{ + "name": "name1", + }, + schemaName: "schema-1", + expectedStatusCode: 400, + msg: "Case where the message is not valid(omit required email field)(JSON)", + expectedResponse: `{ + "error": { + "code": 400, + "message": "Message 0 data is not valid,(root): email is required", + "status": "INVALID_ARGUMENT" + } +}`, + }, + { + postBody: map[string]interface{}{ + "data": "T2JqAQQWYXZyby5zY2hlbWGYAnsidHlwZSI6InJlY29yZCIsIm5hbWUiOiJQbGFjZSIsIm5hbWVzcGFjZSI6InBsYWNlLmF2cm8iLCJmaWVsZHMiOlt7Im5hbWUiOiJwbGFjZW5hbWUiLCJ0eXBlIjoic3RyaW5nIn0seyJuYW1lIjoiYWRkcmVzcyIsInR5cGUiOiJzdHJpbmcifV19FGF2cm8uY29kZWMIbnVsbABM1P4b0GpYaCg9tqxa+YDZAiQSc3RyZWV0IDIyDnBsYWNlIGFM1P4b0GpYaCg9tqxa+YDZ", + }, + schemaName: "schema-3", + expectedStatusCode: 400, + msg: "Case where the message is not valid(AVRO)", + expectedResponse: `{ + "error": { + "code": 400, + "message": "Message 0 is not valid.cannot decode binary record \"user.avro.User\" field \"username\": cannot decode binary string: cannot decode binary bytes: negative size: -40", + "status": "INVALID_ARGUMENT" + } +}`, + }, + { + postBody: map[string]interface{}{ + "data": "DGFnZWxvc8T8Cg", + }, + schemaName: "schema-3", + expectedStatusCode: 400, + msg: "Case where the message is not in valid base64(AVRO)", + expectedResponse: `{ + "error": { + "code": 400, + "message": "Message 0 is not in valid base64 enocding,illegal base64 data at input byte 12", + "status": "INVALID_ARGUMENT" + } +}`, + }, + { + postBody: map[string]interface{}{ + "unknown": "unknown", + }, + schemaName: "schema-3", + expectedStatusCode: 400, + msg: "Case where the request arguments are missing the required data field(AVRO)", + expectedResponse: `{ + "error": { + "code": 400, + "message": "Invalid Schema Payload Arguments", + "status": "INVALID_ARGUMENT" + } +}`, + }, + { + schemaName: "unknown", + expectedStatusCode: 404, + expectedResponse: `{ + "error": { + "code": 404, + "message": "Schema doesn't exist", + "status": "NOT_FOUND" + } +}`, + msg: "Case where the schema doesn't exist", + }, + } + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + cfgKafka.PushEnabled = true + cfgKafka.PushWorkerToken = "push_token" + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + + for _, t := range testData { + + w := httptest.NewRecorder() + + url := fmt.Sprintf("http://localhost:8080/v1/projects/ARGO/schemas/%v:validate", t.schemaName) + + body, _ := json.MarshalIndent(t.postBody, "", "") + + req, err := http.NewRequest("POST", url, bytes.NewReader(body)) + if err != nil { + log.Fatal(err) + } + router.HandleFunc("/v1/projects/{project}/schemas/{schema}:validate", WrapMockAuthConfig(SchemaValidateMessage, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + + suite.Equal(t.expectedStatusCode, w.Code, t.msg) + suite.Equal(t.expectedResponse, w.Body.String(), t.msg) + } +} + +func TestSchemasHandlersTestSuite(t *testing.T) { + log.SetOutput(ioutil.Discard) + suite.Run(t, new(SchemasHandlersTestSuite)) +} diff --git a/handlers/subscriptions.go b/handlers/subscriptions.go new file mode 100644 index 00000000..ee53a5b8 --- /dev/null +++ b/handlers/subscriptions.go @@ -0,0 +1,1411 @@ +package handlers + +import ( + "context" + "encoding/json" + "fmt" + "github.com/ARGOeu/argo-messaging/auth" + "github.com/ARGOeu/argo-messaging/brokers" + "github.com/ARGOeu/argo-messaging/messages" + "github.com/ARGOeu/argo-messaging/projects" + push "github.com/ARGOeu/argo-messaging/push/grpc/client" + "github.com/ARGOeu/argo-messaging/stores" + "github.com/ARGOeu/argo-messaging/subscriptions" + "github.com/ARGOeu/argo-messaging/topics" + "github.com/ARGOeu/argo-messaging/validation" + gorillaContext "github.com/gorilla/context" + "github.com/gorilla/mux" + log "github.com/sirupsen/logrus" + "io/ioutil" + "net/http" + "strconv" + "time" +) + +// SubAck (POST) acknowledge the consumption of specific messages +func SubAck(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + // Read POST JSON body + body, err := ioutil.ReadAll(r.Body) + if err != nil { + err := APIErrorInvalidRequestBody() + respondErr(w, err) + return + } + + // Parse pull options + postBody, err := subscriptions.GetAckFromJSON(body) + if err != nil { + err := APIErrorInvalidData("Invalid ack parameter") + respondErr(w, err) + return + } + + // Get urlParams + projectName := urlVars["project"] + subName := urlVars["subscription"] + + // Check if sub exists + + cur_sub, err := subscriptions.Find(projectUUID, "", subName, "", 0, refStr) + if err != nil { + err := APIErrHandlingAcknowledgement() + respondErr(w, err) + return + } + if len(cur_sub.Subscriptions) == 0 { + err := APIErrorNotFound("Subscription") + respondErr(w, err) + return + } + + // Get list of AckIDs + if postBody.IDs == nil { + err := APIErrorInvalidData("Invalid ack id") + respondErr(w, err) + return + } + + // Check if each AckID is valid + for _, ackID := range postBody.IDs { + if validation.ValidAckID(projectName, subName, ackID) == false { + err := APIErrorInvalidData("Invalid ack id") + respondErr(w, err) + return + } + } + + // Get Max ackID + maxAckID, err := subscriptions.GetMaxAckID(postBody.IDs) + if err != nil { + err := APIErrHandlingAcknowledgement() + respondErr(w, err) + return + } + // Extract offset from max ackID + off, err := subscriptions.GetOffsetFromAckID(maxAckID) + + if err != nil { + err := APIErrorInvalidData("Invalid ack id") + respondErr(w, err) + return + } + + zSec := "2006-01-02T15:04:05Z" + t := time.Now().UTC() + ts := t.Format(zSec) + + err = refStr.UpdateSubOffsetAck(projectUUID, urlVars["subscription"], int64(off+1), ts) + if err != nil { + + if err.Error() == "ack timeout" { + err := APIErrorTimeout(err.Error()) + respondErr(w, err) + return + } + + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + // Output result to JSON + resJSON := "{}" + + // Write response + output = []byte(resJSON) + respondOK(w, output) +} + +// SubListOne (GET) one subscription +func SubListOne(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + + results, err := subscriptions.Find(projectUUID, "", urlVars["subscription"], "", 0, refStr) + + if err != nil { + err := APIErrGenericBackend() + respondErr(w, err) + return + } + + // If not found + if results.Empty() { + err := APIErrorNotFound("Subscription") + respondErr(w, err) + return + } + + // if its a push enabled sub and it has a verified endpoint + // call the push server to find its real time push status + if results.Subscriptions[0].PushCfg != (subscriptions.PushConfig{}) { + if results.Subscriptions[0].PushCfg.Verified { + apsc := gorillaContext.Get(r, "apsc").(push.Client) + results.Subscriptions[0].PushStatus = apsc.SubscriptionStatus(context.TODO(), results.Subscriptions[0].FullName).Result(false) + } + } + + // Output result to JSON + resJSON, err := results.Subscriptions[0].ExportJSON() + + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + // Write response + output = []byte(resJSON) + respondOK(w, output) +} + +// SubSetOffset (PUT) sets subscriptions current offset +func SubSetOffset(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + // Get Result Object + urlSub := urlVars["subscription"] + + // Read POST JSON body + body, err := ioutil.ReadAll(r.Body) + if err != nil { + err := APIErrorInvalidRequestBody() + respondErr(w, err) + return + } + + // Parse pull options + postBody, err := subscriptions.GetSetOffsetJSON(body) + if err != nil { + err := APIErrorInvalidArgument("Offset") + respondErr(w, err) + return + } + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + refBrk := gorillaContext.Get(r, "brk").(brokers.Broker) + // Get project UUID First to use as reference + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + + // Find Subscription + results, err := subscriptions.Find(projectUUID, "", urlVars["subscription"], "", 0, refStr) + + if err != nil { + err := APIErrGenericBackend() + respondErr(w, err) + return + } + + // If not found + if results.Empty() { + err := APIErrorNotFound("Subscription") + respondErr(w, err) + return + } + brk_topic := projectUUID + "." + results.Subscriptions[0].Topic + min_offset := refBrk.GetMinOffset(brk_topic) + max_offset := refBrk.GetMaxOffset(brk_topic) + + //Check if given offset is between min max + if postBody.Offset < min_offset || postBody.Offset > max_offset { + err := APIErrorInvalidData("Offset out of bounds") + respondErr(w, err) + } + + // Get subscription offsets + refStr.UpdateSubOffset(projectUUID, urlSub, postBody.Offset) + + respondOK(w, output) +} + +// SubGetOffsets (GET) gets offset indices from a subscription +func SubGetOffsets(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + refBrk := gorillaContext.Get(r, "brk").(brokers.Broker) + + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + + results, err := subscriptions.Find(projectUUID, "", urlVars["subscription"], "", 0, refStr) + + if err != nil { + err := APIErrGenericBackend() + respondErr(w, err) + return + } + + // If not found + if results.Empty() { + err := APIErrorNotFound("Subscription") + respondErr(w, err) + return + } + + // Output result to JSON + brkTopic := projectUUID + "." + results.Subscriptions[0].Topic + curOffset := results.Subscriptions[0].Offset + minOffset := refBrk.GetMinOffset(brkTopic) + maxOffset := refBrk.GetMaxOffset(brkTopic) + + // if the current subscription offset is behind the min available offset for the topic + // update it + if curOffset < minOffset { + refStr.UpdateSubOffset(projectUUID, urlVars["subscription"], minOffset) + curOffset = minOffset + } + + // Create offset struct + offResult := subscriptions.Offsets{ + Current: curOffset, + Min: minOffset, + Max: maxOffset, + } + + resJSON, err := offResult.ExportJSON() + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + // Write response + output = []byte(resJSON) + respondOK(w, output) +} + +// SubTimeToOffset (GET) gets offset indices closest to a timestamp +func SubTimeToOffset(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + refBrk := gorillaContext.Get(r, "brk").(brokers.Broker) + + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + + results, err := subscriptions.Find(projectUUID, "", urlVars["subscription"], "", 0, refStr) + + if err != nil { + err := APIErrGenericBackend() + respondErr(w, err) + return + } + + // If not found + if results.Empty() { + err := APIErrorNotFound("Subscription") + respondErr(w, err) + return + } + + t, err := time.Parse("2006-01-02T15:04:05.000Z", r.URL.Query().Get("time")) + if err != nil { + err := APIErrorInvalidData("Time is not in valid Zulu format.") + respondErr(w, err) + return + } + + // Output result to JSON + brkTopic := projectUUID + "." + results.Subscriptions[0].Topic + off, err := refBrk.TimeToOffset(brkTopic, t.Local()) + + if err != nil { + log.Errorf(err.Error()) + err := APIErrGenericBackend() + respondErr(w, err) + return + } + + if off < 0 { + err := APIErrorGenericConflict("Timestamp is out of bounds for the subscription's topic/partition") + respondErr(w, err) + return + } + + topicOffset := brokers.TopicOffset{Offset: off} + output, err = json.Marshal(topicOffset) + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + respondOK(w, output) +} + +// SubDelete (DEL) deletes an existing subscription +func SubDelete(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + + // Get Result Object + results, err := subscriptions.Find(projectUUID, "", urlVars["subscription"], "", 0, refStr) + if err != nil { + err := APIErrGenericBackend() + respondErr(w, err) + return + } + + // If not found + if results.Empty() { + err := APIErrorNotFound("Subscription") + respondErr(w, err) + return + } + + err = subscriptions.RemoveSub(projectUUID, urlVars["subscription"], refStr) + if err != nil { + if err.Error() == "not found" { + err := APIErrorNotFound("Subscription") + respondErr(w, err) + return + } + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + // if it is a push sub and it is also has a verified push endpoint, deactivate it + if results.Subscriptions[0].PushCfg != (subscriptions.PushConfig{}) { + if results.Subscriptions[0].PushCfg.Verified { + pr := make(map[string]string) + apsc := gorillaContext.Get(r, "apsc").(push.Client) + pr["message"] = apsc.DeactivateSubscription(context.TODO(), results.Subscriptions[0].FullName).Result(false) + b, _ := json.Marshal(pr) + output = b + } + } + respondOK(w, output) +} + +// SubModACL (POST) modifies the ACL +func SubModACL(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + // Get Result Object + urlSub := urlVars["subscription"] + + // Read POST JSON body + body, err := ioutil.ReadAll(r.Body) + if err != nil { + err := APIErrorInvalidRequestBody() + respondErr(w, err) + return + } + + // Parse pull options + postBody, err := auth.GetACLFromJSON(body) + if err != nil { + err := APIErrorInvalidArgument("Subscription ACL") + respondErr(w, err) + log.Error(string(body[:])) + return + } + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + // Get project UUID First to use as reference + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + + // check if user list contain valid users for the given project + _, err = auth.AreValidUsers(projectUUID, postBody.AuthUsers, refStr) + if err != nil { + err := APIErrorRoot{Body: APIErrorBody{Code: http.StatusNotFound, Message: err.Error(), Status: "NOT_FOUND"}} + respondErr(w, err) + return + } + + err = auth.ModACL(projectUUID, "subscriptions", urlSub, postBody.AuthUsers, refStr) + + if err != nil { + + if err.Error() == "not found" { + err := APIErrorNotFound("Subscription") + respondErr(w, err) + return + } + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + respondOK(w, output) +} + +// SubModPush (POST) modifies the push configuration +func SubModPush(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + subName := urlVars["subscription"] + + // Get project UUID First to use as reference + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + + // Read POST JSON body + body, err := ioutil.ReadAll(r.Body) + if err != nil { + err := APIErrorInvalidRequestBody() + respondErr(w, err) + return + } + + // Parse pull options + postBody, err := subscriptions.GetFromJSON(body) + if err != nil { + err := APIErrorInvalidArgument("Subscription") + respondErr(w, err) + return + } + + // Get Result Object + res, err := subscriptions.Find(projectUUID, "", subName, "", 0, refStr) + + if err != nil { + err := APIErrGenericBackend() + respondErr(w, err) + return + } + + if res.Empty() { + err := APIErrorNotFound("Subscription") + respondErr(w, err) + return + } + + existingSub := res.Subscriptions[0] + + pushEnd := "" + rPolicy := "" + rPeriod := 0 + vhash := "" + verified := false + authzType := subscriptions.AutoGenerationAuthorizationHeader + authzHeaderValue := "" + maxMessages := int64(0) + pushWorker := auth.User{} + pwToken := gorillaContext.Get(r, "push_worker_token").(string) + + if postBody.PushCfg != (subscriptions.PushConfig{}) { + + pushEnabled := gorillaContext.Get(r, "push_enabled").(bool) + + // check the state of the push functionality + if !pushEnabled { + err := APIErrorPushConflict() + respondErr(w, err) + return + } + + pushWorker, err = auth.GetPushWorker(pwToken, refStr) + if err != nil { + err := APIErrInternalPush() + respondErr(w, err) + return + } + + pushEnd = postBody.PushCfg.Pend + // Check if push endpoint is not a valid https:// endpoint + if !(validation.IsValidHTTPS(pushEnd)) { + err := APIErrorInvalidData("Push endpoint should be addressed by a valid https url") + respondErr(w, err) + return + } + + rPolicy = postBody.PushCfg.RetPol.PolicyType + rPeriod = postBody.PushCfg.RetPol.Period + maxMessages = postBody.PushCfg.MaxMessages + + if rPolicy == "" { + rPolicy = subscriptions.LinearRetryPolicyType + } + if rPeriod <= 0 { + rPeriod = 3000 + } + + if !subscriptions.IsRetryPolicySupported(rPolicy) { + err := APIErrorInvalidData(subscriptions.UnSupportedRetryPolicyError) + respondErr(w, err) + return + } + + authzType = postBody.PushCfg.AuthorizationHeader.Type + // if there is a given authorization type check if its supported by the service + if authzType != "" { + if !subscriptions.IsAuthorizationHeaderTypeSupported(authzType) { + err := APIErrorInvalidData(subscriptions.UnSupportedAuthorizationHeader) + respondErr(w, err) + return + } + } + + // if the subscription was not push enabled before + // and no authorization_header has been specified + // use autogen + if authzType == "" && (existingSub.PushCfg == subscriptions.PushConfig{}) { + authzType = subscriptions.AutoGenerationAuthorizationHeader + } + + // if the provided authorization_header is of autogen type + // generate a new header + if authzType == subscriptions.AutoGenerationAuthorizationHeader { + authzHeaderValue, err = auth.GenToken() + if err != nil { + log.Errorf("Could not generate authorization header for subscription %v, %v", urlVars["subscription"], err.Error()) + err := APIErrGenericInternal("Could not generate authorization header") + respondErr(w, err) + return + } + } + + // if the provided authorization_header is of disabled type + if authzType == subscriptions.DisabledAuthorizationHeader { + authzHeaderValue = "" + } + + // if there is no authorization_type provided and the push cfg has an empty value because the sub + // was push activated before the implementation of the feature + // declare it disabled + if authzType == "" && existingSub.PushCfg.AuthorizationHeader.Type == "" { + authzType = subscriptions.DisabledAuthorizationHeader + } + + // if there is no authorization_header provided but the existing one is of disabled type + // preserve it + if authzType == "" && existingSub.PushCfg.AuthorizationHeader.Type == subscriptions.DisabledAuthorizationHeader { + authzType = subscriptions.DisabledAuthorizationHeader + } + + // if there is no authorization_header provided but the existing one is of autogen type + // preserve the value and type + if authzType == "" && existingSub.PushCfg.AuthorizationHeader.Type == subscriptions.AutoGenerationAuthorizationHeader { + authzType = subscriptions.AutoGenerationAuthorizationHeader + authzHeaderValue = existingSub.PushCfg.AuthorizationHeader.Value + } + } + + if maxMessages == 0 { + if existingSub.PushCfg.MaxMessages == 0 { + maxMessages = int64(1) + } else { + maxMessages = existingSub.PushCfg.MaxMessages + } + } + + // if the request wants to transform a pull subscription to a push one + // we need to begin the verification process + if postBody.PushCfg != (subscriptions.PushConfig{}) { + + // if the endpoint in not the same with the old one, we need to verify it again + if postBody.PushCfg.Pend != existingSub.PushCfg.Pend { + vhash, err = auth.GenToken() + if err != nil { + log.Errorf("Could not generate verification hash for subscription %v, %v", urlVars["subscription"], err.Error()) + err := APIErrGenericInternal("Could not generate verification hash") + respondErr(w, err) + return + } + // else keep the already existing data + } else { + vhash = existingSub.PushCfg.VerificationHash + verified = existingSub.PushCfg.Verified + } + } + + err = subscriptions.ModSubPush(projectUUID, subName, pushEnd, authzType, authzHeaderValue, maxMessages, rPolicy, rPeriod, vhash, verified, refStr) + + if err != nil { + if err.Error() == "not found" { + err := APIErrorNotFound("Subscription") + respondErr(w, err) + return + } + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + // if this is an deactivate request, try to retrieve the push worker in order to remove him from the sub's acl + if existingSub.PushCfg != (subscriptions.PushConfig{}) && postBody.PushCfg == (subscriptions.PushConfig{}) { + pushWorker, _ = auth.GetPushWorker(pwToken, refStr) + } + + // if the sub, was push enabled before the update and the endpoint was verified + // we need to deactivate it on the push server + if existingSub.PushCfg != (subscriptions.PushConfig{}) { + if existingSub.PushCfg.Verified { + // deactivate the subscription on the push backend + apsc := gorillaContext.Get(r, "apsc").(push.Client) + apsc.DeactivateSubscription(context.TODO(), existingSub.FullName).Result(false) + + // remove the push worker user from the sub's acl + err = auth.RemoveFromACL(projectUUID, "subscriptions", existingSub.Name, []string{pushWorker.Name}, refStr) + if err != nil { + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + } + } + // if the update on push configuration is not intended to stop the push functionality + // activate the subscription with the new values + if postBody.PushCfg != (subscriptions.PushConfig{}) { + + // reactivate only if the push endpoint hasn't changed and it wes already verified + // otherwise we need to verify the ownership again before wee activate it + if postBody.PushCfg.Pend == existingSub.PushCfg.Pend && existingSub.PushCfg.Verified { + + // activate the subscription on the push backend + apsc := gorillaContext.Get(r, "apsc").(push.Client) + apsc.ActivateSubscription(context.TODO(), existingSub.FullName, existingSub.FullTopic, + pushEnd, rPolicy, uint32(rPeriod), maxMessages, authzHeaderValue).Result(false) + + // modify the sub's acl with the push worker's uuid + err = auth.AppendToACL(projectUUID, "subscriptions", existingSub.Name, []string{pushWorker.Name}, refStr) + if err != nil { + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + // link the sub's project with the push worker + err = auth.AppendToUserProjects(pushWorker.UUID, projectUUID, refStr) + if err != nil { + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + } + } + + // Write empty response if everything's ok + respondOK(w, output) +} + +// SubVerifyPushEndpoint (POST) verifies the ownership of a push endpoint registered in a push enabled subscription +func SubVerifyPushEndpoint(w http.ResponseWriter, r *http.Request) { + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + subName := urlVars["subscription"] + + // Get project UUID First to use as reference + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + + pwToken := gorillaContext.Get(r, "push_worker_token").(string) + + pushEnabled := gorillaContext.Get(r, "push_enabled").(bool) + + pushW := auth.User{} + + // check the state of the push functionality + if !pushEnabled { + err := APIErrorPushConflict() + respondErr(w, err) + return + } + + pushW, err := auth.GetPushWorker(pwToken, refStr) + if err != nil { + err := APIErrInternalPush() + respondErr(w, err) + return + } + + // Get Result Object + res, err := subscriptions.Find(projectUUID, "", subName, "", 0, refStr) + + if err != nil { + err := APIErrGenericBackend() + respondErr(w, err) + return + } + + if res.Empty() { + err := APIErrorNotFound("Subscription") + respondErr(w, err) + return + } + + sub := res.Subscriptions[0] + + // check that the subscription is push enabled + if sub.PushCfg == (subscriptions.PushConfig{}) { + err := APIErrorGenericConflict("Subscription is not in push mode") + respondErr(w, err) + return + } + + // check that the endpoint isn't already verified + if sub.PushCfg.Verified { + err := APIErrorGenericConflict("Push endpoint is already verified") + respondErr(w, err) + return + } + + // verify the push endpoint + c := new(http.Client) + err = subscriptions.VerifyPushEndpoint(sub, c, refStr) + if err != nil { + err := APIErrPushVerification(err.Error()) + respondErr(w, err) + return + } + + // activate the subscription on the push backend + apsc := gorillaContext.Get(r, "apsc").(push.Client) + apsc.ActivateSubscription(context.TODO(), sub.FullName, sub.FullTopic, sub.PushCfg.Pend, + sub.PushCfg.RetPol.PolicyType, uint32(sub.PushCfg.RetPol.Period), + sub.PushCfg.MaxMessages, sub.PushCfg.AuthorizationHeader.Value).Result(false) + + // modify the sub's acl with the push worker's uuid + err = auth.AppendToACL(projectUUID, "subscriptions", sub.Name, []string{pushW.Name}, refStr) + if err != nil { + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + // link the sub's project with the push worker + err = auth.AppendToUserProjects(pushW.UUID, projectUUID, refStr) + if err != nil { + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + respondOK(w, []byte{}) +} + +// SubModAck (POST) modifies the Ack deadline of the subscription +func SubModAck(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + // Get Result Object + urlSub := urlVars["subscription"] + + // Read POST JSON body + body, err := ioutil.ReadAll(r.Body) + if err != nil { + err := APIErrorInvalidRequestBody() + respondErr(w, err) + return + } + + // Parse pull options + postBody, err := subscriptions.GetAckDeadlineFromJSON(body) + if err != nil { + err := APIErrorInvalidArgument("ackDeadlineSeconds(needs value between 0 and 600)") + respondErr(w, err) + log.Error(string(body[:])) + return + } + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + // Get project UUID First to use as reference + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + + err = subscriptions.ModAck(projectUUID, urlSub, postBody.AckDeadline, refStr) + + if err != nil { + if err.Error() == "wrong value" { + respondErr(w, APIErrorInvalidArgument("ackDeadlineSeconds(needs value between 0 and 600)")) + return + } + if err.Error() == "not found" { + err := APIErrorNotFound("Subscription") + respondErr(w, err) + return + } + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + respondOK(w, output) +} + +// SubCreate (PUT) creates a new subscription +func SubCreate(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + refBrk := gorillaContext.Get(r, "brk").(brokers.Broker) + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + + // Read POST JSON body + body, err := ioutil.ReadAll(r.Body) + if err != nil { + err := APIErrorInvalidRequestBody() + respondErr(w, err) + return + } + + // Parse pull options + postBody, err := subscriptions.GetFromJSON(body) + if err != nil { + err := APIErrorInvalidArgument("Subscription") + respondErr(w, err) + log.Error(string(body[:])) + return + } + + tProject, tName, err := subscriptions.ExtractFullTopicRef(postBody.FullTopic) + + if err != nil { + err := APIErrorInvalidName("Topic") + respondErr(w, err) + return + } + + if topics.HasTopic(projectUUID, tName, refStr) == false { + err := APIErrorNotFound("Topic") + respondErr(w, err) + return + } + + // Get current topic offset + tProjectUUID := projects.GetUUIDByName(tProject, refStr) + fullTopic := tProjectUUID + "." + tName + curOff := refBrk.GetMaxOffset(fullTopic) + + pushEnd := "" + authzType := "" + authzHeaderValue := "" + rPolicy := "" + rPeriod := 0 + maxMessages := int64(1) + + //pushWorker := auth.User{} + verifyHash := "" + + if postBody.PushCfg != (subscriptions.PushConfig{}) { + + // check the state of the push functionality + pwToken := gorillaContext.Get(r, "push_worker_token").(string) + pushEnabled := gorillaContext.Get(r, "push_enabled").(bool) + + if !pushEnabled { + err := APIErrorPushConflict() + respondErr(w, err) + return + } + + _, err = auth.GetPushWorker(pwToken, refStr) + if err != nil { + err := APIErrInternalPush() + respondErr(w, err) + return + } + + pushEnd = postBody.PushCfg.Pend + // Check if push endpoint is not a valid https:// endpoint + if !(validation.IsValidHTTPS(pushEnd)) { + err := APIErrorInvalidData("Push endpoint should be addressed by a valid https url") + respondErr(w, err) + return + } + rPolicy = postBody.PushCfg.RetPol.PolicyType + rPeriod = postBody.PushCfg.RetPol.Period + maxMessages = postBody.PushCfg.MaxMessages + + authzType = postBody.PushCfg.AuthorizationHeader.Type + if authzType == "" { + authzType = subscriptions.AutoGenerationAuthorizationHeader + } + + if !subscriptions.IsAuthorizationHeaderTypeSupported(authzType) { + err := APIErrorInvalidData(subscriptions.UnSupportedAuthorizationHeader) + respondErr(w, err) + return + } + + switch authzType { + case subscriptions.AutoGenerationAuthorizationHeader: + authzHeaderValue, err = auth.GenToken() + if err != nil { + log.Errorf("Could not generate authorization header for subscription %v, %v", urlVars["subscription"], err.Error()) + err := APIErrGenericInternal("Could not generate authorization header") + respondErr(w, err) + return + } + case subscriptions.DisabledAuthorizationHeader: + authzHeaderValue = "" + } + + if rPolicy == "" { + rPolicy = subscriptions.LinearRetryPolicyType + } + + if maxMessages == 0 { + maxMessages = int64(1) + } + + if rPeriod <= 0 { + rPeriod = 3000 + } + + if !subscriptions.IsRetryPolicySupported(rPolicy) { + err := APIErrorInvalidData(subscriptions.UnSupportedRetryPolicyError) + respondErr(w, err) + return + } + + verifyHash, err = auth.GenToken() + if err != nil { + log.Errorf("Could not generate verification hash for subscription %v, %v", urlVars["subscription"], err.Error()) + err := APIErrGenericInternal("Could not generate verification hash") + respondErr(w, err) + return + } + + } + + created := time.Now().UTC() + + // Get Result Object + res, err := subscriptions.CreateSub(projectUUID, urlVars["subscription"], tName, pushEnd, curOff, maxMessages, authzType, authzHeaderValue, postBody.Ack, rPolicy, rPeriod, verifyHash, false, created, refStr) + + if err != nil { + if err.Error() == "exists" { + err := APIErrorConflict("Subscription") + respondErr(w, err) + return + } + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + // Output result to JSON + resJSON, err := res.ExportJSON() + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + // Write response + output = []byte(resJSON) + respondOK(w, output) + +} + +// SubACL (GET) one sub's authorized users +func SubACL(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + urlSub := urlVars["subscription"] + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + + // Get project UUID First to use as reference + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + res, err := auth.GetACL(projectUUID, "subscriptions", urlSub, refStr) + + // If not found + if err != nil { + err := APIErrorNotFound("Subscription") + respondErr(w, err) + return + } + + // Output result to JSON + resJSON, err := res.ExportJSON() + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + // Write response + output = []byte(resJSON) + respondOK(w, output) +} + +//SubListAll (GET) all subscriptions +func SubListAll(w http.ResponseWriter, r *http.Request) { + + var err error + var strPageSize string + var pageSize int + var res subscriptions.PaginatedSubscriptions + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + roles := gorillaContext.Get(r, "auth_roles").([]string) + + urlValues := r.URL.Query() + pageToken := urlValues.Get("pageToken") + strPageSize = urlValues.Get("pageSize") + + // if this route is used by a user who only has a consumer role + // return all subscriptions that he has access to + userUUID := "" + if !auth.IsProjectAdmin(roles) && !auth.IsServiceAdmin(roles) && auth.IsConsumer(roles) { + userUUID = gorillaContext.Get(r, "auth_user_uuid").(string) + } + + if strPageSize != "" { + if pageSize, err = strconv.Atoi(strPageSize); err != nil { + log.Errorf("Pagesize %v produced an error while being converted to int: %v", strPageSize, err.Error()) + err := APIErrorInvalidData("Invalid page size") + respondErr(w, err) + return + } + } + + if res, err = subscriptions.Find(projectUUID, userUUID, "", pageToken, int32(pageSize), refStr); err != nil { + err := APIErrorInvalidData("Invalid page token") + respondErr(w, err) + return + } + + // Output result to JSON + resJSON, err := res.ExportJSON() + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + // Write Response + output = []byte(resJSON) + respondOK(w, output) +} + +// SubPull (POST) consumes messages from the underlying topic +func SubPull(w http.ResponseWriter, r *http.Request) { + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Get url path variables + urlVars := mux.Vars(r) + urlProject := urlVars["project"] + urlSub := urlVars["subscription"] + + // Grab context references + refBrk := gorillaContext.Get(r, "brk").(brokers.Broker) + refStr := gorillaContext.Get(r, "str").(stores.Store) + refUserUUID := gorillaContext.Get(r, "auth_user_uuid").(string) + refRoles := gorillaContext.Get(r, "auth_roles").([]string) + refAuthResource := gorillaContext.Get(r, "auth_resource").(bool) + pushEnabled := gorillaContext.Get(r, "push_enabled").(bool) + + // Get project UUID First to use as reference + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + + // Get the subscription + results, err := subscriptions.Find(projectUUID, "", urlSub, "", 0, refStr) + if err != nil { + err := APIErrGenericBackend() + respondErr(w, err) + return + } + + if results.Empty() { + err := APIErrorNotFound("Subscription") + respondErr(w, err) + return + } + + targetSub := results.Subscriptions[0] + fullTopic := targetSub.ProjectUUID + "." + targetSub.Topic + retImm := true + max := 1 + + // if the subscription is push enabled but push enabled is false, don't allow push worker user to consume + if targetSub.PushCfg != (subscriptions.PushConfig{}) && !pushEnabled && auth.IsPushWorker(refRoles) { + err := APIErrorPushConflict() + respondErr(w, err) + return + } + + // if the subscription is push enabled, allow only push worker and service_admin users to pull from it + if targetSub.PushCfg != (subscriptions.PushConfig{}) && !auth.IsPushWorker(refRoles) && !auth.IsServiceAdmin(refRoles) { + err := APIErrorForbidden() + respondErr(w, err) + return + } + + // Check Authorization per subscription + // - if enabled in config + // - if user has only consumer role + if refAuthResource && auth.IsConsumer(refRoles) { + if auth.PerResource(projectUUID, "subscriptions", targetSub.Name, refUserUUID, refStr) == false { + err := APIErrorForbidden() + respondErr(w, err) + return + } + } + + // check if the subscription's topic exists + if !topics.HasTopic(projectUUID, targetSub.Topic, refStr) { + err := APIErrorPullNoTopic() + respondErr(w, err) + return + } + + // Read POST JSON body + body, err := ioutil.ReadAll(r.Body) + if err != nil { + err := APIErrorInvalidRequestBody() + respondErr(w, err) + return + } + + // Parse pull options + pullInfo, err := subscriptions.GetPullOptionsJSON(body) + if err != nil { + err := APIErrorInvalidArgument("Pull Parameters") + respondErr(w, err) + return + } + + if pullInfo.MaxMsg != "" { + max, err = strconv.Atoi(pullInfo.MaxMsg) + if err != nil { + max = 1 + } + } + + if pullInfo.RetImm == "false" { + retImm = false + } + + // Init Received Message List + recList := messages.RecList{} + + msgs, err := refBrk.Consume(r.Context(), fullTopic, targetSub.Offset, retImm, int64(max)) + if err != nil { + // If tracked offset is off + if err == brokers.ErrOffsetOff { + log.Debug("Will increment now...") + // Increment tracked offset to current min offset + targetSub.Offset = refBrk.GetMinOffset(fullTopic) + refStr.UpdateSubOffset(projectUUID, targetSub.Name, targetSub.Offset) + // Try again to consume + msgs, err = refBrk.Consume(r.Context(), fullTopic, targetSub.Offset, retImm, int64(max)) + // If still error respond and return + if err != nil { + log.Errorf("Couldn't consume messages for subscription %v, %v", targetSub.FullName, err.Error()) + err := APIErrGenericBackend() + respondErr(w, err) + return + } + } else { + log.Errorf("Couldn't consume messages for subscription %v, %v", targetSub.FullName, err.Error()) + err := APIErrGenericBackend() + respondErr(w, err) + return + } + } + var limit int + limit, err = strconv.Atoi(pullInfo.MaxMsg) + if err != nil { + limit = 0 + } + + ackPrefix := "projects/" + urlProject + "/subscriptions/" + urlSub + ":" + + for i, msg := range msgs { + if limit > 0 && i >= limit { + break // max messages left + } + curMsg, err := messages.LoadMsgJSON([]byte(msg)) + if err != nil { + err := APIErrGenericInternal("Message retrieved from broker network has invalid JSON Structure") + respondErr(w, err) + return + } + // calc the message id = message's kafka offset (read offst + msg position) + idOff := targetSub.Offset + int64(i) + curMsg.ID = strconv.FormatInt(idOff, 10) + curRec := messages.RecMsg{AckID: ackPrefix + curMsg.ID, Msg: curMsg} + recList.RecMsgs = append(recList.RecMsgs, curRec) + } + + // amount of messages consumed + msgCount := int64(len(msgs)) + + // consumption time + consumeTime := time.Now().UTC() + + // increment subscription number of message metric + refStr.IncrementSubMsgNum(projectUUID, urlSub, msgCount) + refStr.IncrementSubBytes(projectUUID, urlSub, recList.TotalSize()) + refStr.UpdateSubLatestConsume(projectUUID, targetSub.Name, consumeTime) + + // count the rate of consumed messages per sec between the last two consume events + var dt float64 = 1 + // if its the first consume to the subscription + // skip the subtraction that computes the DT between the last two consume events + if !targetSub.LatestConsume.IsZero() { + dt = consumeTime.Sub(targetSub.LatestConsume).Seconds() + } + + refStr.UpdateSubConsumeRate(projectUUID, targetSub.Name, float64(msgCount)/dt) + + resJSON, err := recList.ExportJSON() + + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + // Stamp time to UTC Z to seconds + zSec := "2006-01-02T15:04:05Z" + t := time.Now().UTC() + ts := t.Format(zSec) + refStr.UpdateSubPull(targetSub.ProjectUUID, targetSub.Name, int64(len(recList.RecMsgs))+targetSub.Offset, ts) + + output = []byte(resJSON) + respondOK(w, output) +} diff --git a/handlers/subscriptions_test.go b/handlers/subscriptions_test.go new file mode 100644 index 00000000..9ec61288 --- /dev/null +++ b/handlers/subscriptions_test.go @@ -0,0 +1,2497 @@ +package handlers + +import ( + "bytes" + "fmt" + "github.com/ARGOeu/argo-messaging/brokers" + "github.com/ARGOeu/argo-messaging/config" + oldPush "github.com/ARGOeu/argo-messaging/push" + push "github.com/ARGOeu/argo-messaging/push/grpc/client" + "github.com/ARGOeu/argo-messaging/stores" + "github.com/ARGOeu/argo-messaging/subscriptions" + "github.com/gorilla/mux" + log "github.com/sirupsen/logrus" + "github.com/stretchr/testify/suite" + "io/ioutil" + "net/http" + "net/http/httptest" + "strings" + "testing" + "time" +) + +type SubscriptionsHandlersTestSuite struct { + cfgStr string + suite.Suite +} + +func (suite *SubscriptionsHandlersTestSuite) SetupTest() { + suite.cfgStr = `{ + "bind_ip":"", + "port":8080, + "zookeeper_hosts":["localhost"], + "kafka_znode":"", + "store_host":"localhost", + "store_db":"argo_msg", + "certificate":"/etc/pki/tls/certs/localhost.crt", + "certificate_key":"/etc/pki/tls/private/localhost.key", + "per_resource_auth":"true", + "push_enabled": "true", + "push_worker_token": "push_token" + }` +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubModPushConfigError() { + + postJSON := `{ + "topic":"projects/ARGO/topics/topic1", + "pushConfig": { + "pushEndpoint": "http://www.example.com", + "retryPolicy": {} + } +}` + + req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub1:modifyPushConfig", bytes.NewBuffer([]byte(postJSON))) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "error": { + "code": 400, + "message": "Push endpoint should be addressed by a valid https url", + "status": "INVALID_ARGUMENT" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + w := httptest.NewRecorder() + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:modifyPushConfig", WrapMockAuthConfig(SubModPush, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(400, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubModPushInvalidRetPol() { + + postJSON := `{ + "topic":"projects/ARGO/topics/topic1", + "pushConfig": { + "pushEndpoint": "https://www.example.com", + "retryPolicy": { + "type": "unknown" + } + } +}` + + req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub1:modifyPushConfig", bytes.NewBuffer([]byte(postJSON))) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "error": { + "code": 400, + "message": "Retry policy can only be of 'linear' or 'slowstart' type", + "status": "INVALID_ARGUMENT" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + w := httptest.NewRecorder() + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:modifyPushConfig", WrapMockAuthConfig(SubModPush, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(400, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +// TestSubModPushConfigToActive tests the case where the user modifies the push configuration, +// in order to activate the subscription on the push server +// the push configuration was empty before the api call +func (suite *SubscriptionsHandlersTestSuite) TestSubModPushConfigToActive() { + + postJSON := `{ + "pushConfig": { + "pushEndpoint": "https://www.example.com", + "retryPolicy": {} + } +}` + + req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub1:modifyPushConfig", strings.NewReader(postJSON)) + if err != nil { + log.Fatal(err) + } + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + w := httptest.NewRecorder() + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:modifyPushConfig", WrapMockAuthConfig(SubModPush, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + sub, _ := str.QueryOneSub("argo_uuid", "sub1") + suite.Equal(200, w.Code) + suite.Equal("", w.Body.String()) + suite.Equal("https://www.example.com", sub.PushEndpoint) + suite.Equal(int64(1), sub.MaxMessages) + suite.Equal(3000, sub.RetPeriod) + suite.Equal("linear", sub.RetPolicy) + suite.False(sub.Verified) + suite.NotEqual("", sub.VerificationHash) + suite.Equal(subscriptions.AutoGenerationAuthorizationHeader, sub.AuthorizationType) + suite.NotEqual("", sub.AuthorizationHeader) +} + +// TestSubModPushConfigToInactive tests the use case where the user modifies the push configuration +// in order to deactivate the subscription on the push server +// the push configuration has values before the call and turns into an empty one by the end of the call +func (suite *SubscriptionsHandlersTestSuite) TestSubModPushConfigToInactive() { + + postJSON := `{ + "pushConfig": {} +}` + + req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub4:modifyPushConfig", strings.NewReader(postJSON)) + if err != nil { + log.Fatal(err) + } + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + w := httptest.NewRecorder() + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:modifyPushConfig", WrapMockAuthConfig(SubModPush, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + sub, _ := str.QueryOneSub("argo_uuid", "sub4") + suite.Equal(200, w.Code) + suite.Equal("", w.Body.String()) + suite.Equal("", sub.PushEndpoint) + suite.Equal(0, sub.RetPeriod) + suite.Equal("", sub.RetPolicy) + suite.Equal("", sub.VerificationHash) + suite.False(sub.Verified) + // check to see that the push worker user has been removed from the subscription's acl + a1, _ := str.QueryACL("argo_uuid", "subscriptions", "sub4") + suite.Equal([]string{"uuid2", "uuid4"}, a1.ACL) +} + +// TestSubModPushConfigToInactivePushDisabled tests the use case where the user modifies the push configuration +// in order to deactivate the subscription on the push server +// the push configuration has values before the call and turns into an empty one by the end of the call +// push enabled is false, but turning a subscription from push to pull should always be available as an api action +func (suite *SubscriptionsHandlersTestSuite) TestSubModPushConfigToInactivePushDisabled() { + + postJSON := `{ + "pushConfig": {} +}` + + req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub4:modifyPushConfig", strings.NewReader(postJSON)) + if err != nil { + log.Fatal(err) + } + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + cfgKafka.PushEnabled = false + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + w := httptest.NewRecorder() + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:modifyPushConfig", WrapMockAuthConfig(SubModPush, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + sub, _ := str.QueryOneSub("argo_uuid", "sub4") + suite.Equal(200, w.Code) + suite.Equal("", w.Body.String()) + suite.Equal("", sub.PushEndpoint) + suite.Equal(0, sub.RetPeriod) + suite.Equal("", sub.RetPolicy) +} + +// TestSubModPushConfigToInactiveMissingPushWorker tests the use case where the user modifies the push configuration +// in order to deactivate the subscription on the push server +// the push configuration has values before the call and turns into an empty one by the end of the call +// push enabled is true, we cannot retrieve the push worker user in order to remove him from the subscription's acl +// but turning a subscription from push to pull should always be available as an api action +func (suite *SubscriptionsHandlersTestSuite) TestSubModPushConfigToInactiveMissingPushWorker() { + + postJSON := `{ + "pushConfig": {} +}` + + req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub4:modifyPushConfig", strings.NewReader(postJSON)) + if err != nil { + log.Fatal(err) + } + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + cfgKafka.PushWorkerToken = "missing" + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + w := httptest.NewRecorder() + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:modifyPushConfig", WrapMockAuthConfig(SubModPush, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + sub, _ := str.QueryOneSub("argo_uuid", "sub4") + suite.Equal(200, w.Code) + suite.Equal("", w.Body.String()) + suite.Equal("", sub.PushEndpoint) + suite.Equal(0, sub.RetPeriod) + suite.Equal("", sub.RetPolicy) +} + +// TestSubModPushConfigToActive tests the case where the user modifies the push configuration, +// in order to activate the subscription on the push server +// the push configuration was empty before the api call +// since the push endpoint that has been registered is different from the previous verified one +// the sub will be deactivated on the push server and turn into unverified +func (suite *SubscriptionsHandlersTestSuite) TestSubModPushConfigUpdate() { + + postJSON := `{ + "pushConfig": { + "pushEndpoint": "https://www.example2.com", + "maxMessages": 5, + "authorization_header": { + "type": "autogen" + }, + "retryPolicy": { + "type":"linear", + "period": 5000 + } + } +}` + + req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub4:modifyPushConfig", strings.NewReader(postJSON)) + if err != nil { + log.Fatal(err) + } + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + w := httptest.NewRecorder() + subBeforeUpdate, _ := str.QueryOneSub("argo_uuid", "sub4") + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:modifyPushConfig", WrapMockAuthConfig(SubModPush, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + sub, _ := str.QueryOneSub("argo_uuid", "sub4") + suite.Equal(200, w.Code) + suite.Equal("", w.Body.String()) + suite.Equal("https://www.example2.com", sub.PushEndpoint) + suite.Equal(int64(5), sub.MaxMessages) + suite.Equal(5000, sub.RetPeriod) + suite.Equal("linear", sub.RetPolicy) + suite.False(sub.Verified) + suite.NotEqual("", sub.VerificationHash) + suite.NotEqual("push-id-1", sub.VerificationHash) + suite.NotEqual(subBeforeUpdate.AuthorizationHeader, sub.AuthorizationHeader) +} + +// TestSubModPushConfigToActiveORUpdatePushDisabled tests the case where the user modifies the push configuration, +// in order to activate the subscription on the push server +// the push enabled config option is set to false +func (suite *SubscriptionsHandlersTestSuite) TestSubModPushConfigToActiveORUpdatePushDisabled() { + + postJSON := `{ + "pushConfig": { + "pushEndpoint": "https://www.example2.com", + "retryPolicy": { + "type":"linear", + "period": 5000 + } + } +}` + + req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub4:modifyPushConfig", strings.NewReader(postJSON)) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "error": { + "code": 409, + "message": "Push functionality is currently disabled", + "status": "CONFLICT" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + cfgKafka.PushEnabled = false + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + w := httptest.NewRecorder() + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:modifyPushConfig", WrapMockAuthConfig(SubModPush, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + suite.Equal(409, w.Code) + suite.Equal(expResp, w.Body.String()) +} + +// TestSubModPushConfigToActiveORUpdateMissingWorker tests the case where the user modifies the push configuration, +// in order to activate the subscription on the push server +// push enabled is true, but ams can't retrieve the push worker user +func (suite *SubscriptionsHandlersTestSuite) TestSubModPushConfigToActiveORUpdateMissingWorker() { + + postJSON := `{ + "pushConfig": { + "pushEndpoint": "https://www.example2.com", + "retryPolicy": { + "type":"linear", + "period": 5000 + } + } +}` + + req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub4:modifyPushConfig", strings.NewReader(postJSON)) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "error": { + "code": 500, + "message": "Push functionality is currently unavailable", + "status": "INTERNAL_SERVER_ERROR" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + cfgKafka.PushWorkerToken = "missing" + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + w := httptest.NewRecorder() + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:modifyPushConfig", WrapMockAuthConfig(SubModPush, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + suite.Equal(500, w.Code) + suite.Equal(expResp, w.Body.String()) +} + +// TestSubModPushConfigUpdateAuthzDisabled tests the case where the user modifies the push configuration, +// in order to activate the subscription on the push server +// the push configuration was empty before the api call +// since the push endpoint that has been registered is different from the previous verified one +// the sub will be deactivated on the push server and turn into unverified +func (suite *SubscriptionsHandlersTestSuite) TestSubModPushConfigUpdateAuthzDisabled() { + + postJSON := `{ + "pushConfig": { + "pushEndpoint": "https://www.example2.com", + "maxMessages": 5, + "authorization_header": { + "type": "disabled" + }, + "retryPolicy": { + "type":"linear", + "period": 5000 + } + } +}` + + req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub4:modifyPushConfig", strings.NewReader(postJSON)) + if err != nil { + log.Fatal(err) + } + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + w := httptest.NewRecorder() + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:modifyPushConfig", WrapMockAuthConfig(SubModPush, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + sub, _ := str.QueryOneSub("argo_uuid", "sub4") + suite.Equal(200, w.Code) + suite.Equal("", w.Body.String()) + suite.Equal("https://www.example2.com", sub.PushEndpoint) + suite.Equal(int64(5), sub.MaxMessages) + suite.Equal(5000, sub.RetPeriod) + suite.Equal("linear", sub.RetPolicy) + suite.False(sub.Verified) + suite.NotEqual("", sub.VerificationHash) + suite.NotEqual("push-id-1", sub.VerificationHash) + suite.Equal(subscriptions.DisabledAuthorizationHeader, sub.AuthorizationType) + suite.Equal("", sub.AuthorizationHeader) +} + +func (suite *SubscriptionsHandlersTestSuite) TestVerifyPushEndpoint() { + + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusOK) + w.Write([]byte("vhash-1")) + })) + + defer ts.Close() + + req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscriptions/push-sub-v1:verifyPushEndpoint", nil) + if err != nil { + log.Fatal(err) + } + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + + // add a temporary subscription + q1 := stores.QSub{ + Name: "push-sub-v1", + ProjectUUID: "argo_uuid", + PushEndpoint: ts.URL, + VerificationHash: "vhash-1", + Verified: false, + } + + str.SubList = append(str.SubList, q1) + str.SubsACL["push-sub-v1"] = stores.QAcl{} + + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + w := httptest.NewRecorder() + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:verifyPushEndpoint", WrapMockAuthConfig(SubVerifyPushEndpoint, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal("", w.Body.String()) + // check to see that the push worker user has been added to the subscription's acl + a1, _ := str.QueryACL("argo_uuid", "subscriptions", "push-sub-v1") + suite.Equal([]string{"uuid7"}, a1.ACL) +} + +func (suite *SubscriptionsHandlersTestSuite) TestVerifyPushEndpointHashMisMatch() { + + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusOK) + w.Write([]byte("unknown_hash")) + })) + + defer ts.Close() + + req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscriptions/push-sub-v1:verifyPushEndpoint", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "error": { + "code": 401, + "message": "Endpoint verification failed.Wrong verification hash", + "status": "UNAUTHORIZED" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + + // add a temporary subscription + q1 := stores.QSub{ + Name: "push-sub-v1", + ProjectUUID: "argo_uuid", + PushEndpoint: ts.URL, + VerificationHash: "vhash-1", + Verified: false, + } + + str.SubList = append(str.SubList, q1) + str.SubsACL["push-sub-v1"] = stores.QAcl{} + + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + w := httptest.NewRecorder() + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:verifyPushEndpoint", WrapMockAuthConfig(SubVerifyPushEndpoint, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + suite.Equal(401, w.Code) + suite.Equal(expResp, w.Body.String()) + // check to see that the push worker user has NOT been added to the subscription's acl + a1, _ := str.QueryACL("argo_uuid", "subscriptions", "push-sub-v1") + suite.Equal(0, len(a1.ACL)) +} + +func (suite *SubscriptionsHandlersTestSuite) TestVerifyPushEndpointUnknownResponse() { + + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusInternalServerError) + w.Write([]byte("unknown_hash")) + })) + + defer ts.Close() + + req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscriptions/push-sub-v1:verifyPushEndpoint", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "error": { + "code": 401, + "message": "Endpoint verification failed.Wrong response status code", + "status": "UNAUTHORIZED" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + + // add a temporary subscription + q1 := stores.QSub{ + Name: "push-sub-v1", + ProjectUUID: "argo_uuid", + PushEndpoint: ts.URL, + VerificationHash: "vhash-1", + Verified: false, + } + + str.SubList = append(str.SubList, q1) + str.SubsACL["push-sub-v1"] = stores.QAcl{} + + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + w := httptest.NewRecorder() + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:verifyPushEndpoint", WrapMockAuthConfig(SubVerifyPushEndpoint, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + suite.Equal(401, w.Code) + suite.Equal(expResp, w.Body.String()) + // check to see that the push worker user has NOT been added to the subscription's acl + a1, _ := str.QueryACL("argo_uuid", "subscriptions", "push-sub-v1") + suite.Equal(0, len(a1.ACL)) +} + +// TestVerifyPushEndpointPushServerError tests the case where the endpoint is verified, the push worker is moved to +// the sub's acl despite the push server being unavailable for now +func (suite *SubscriptionsHandlersTestSuite) TestVerifyPushEndpointPushServerError() { + + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusOK) + w.Write([]byte("vhash-1")) + })) + + defer ts.Close() + + req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscriptions/errorSub:verifyPushEndpoint", nil) + if err != nil { + log.Fatal(err) + } + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + + // add a temporary subscription + q1 := stores.QSub{ + Name: "errorSub", + ProjectUUID: "argo_uuid", + PushEndpoint: ts.URL, + VerificationHash: "vhash-1", + Verified: false, + } + + str.SubList = append(str.SubList, q1) + str.SubsACL["errorSub"] = stores.QAcl{} + + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + w := httptest.NewRecorder() + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:verifyPushEndpoint", WrapMockAuthConfig(SubVerifyPushEndpoint, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal("", w.Body.String()) + // check to see that the push worker user has been added to the subscription's acl + a1, _ := str.QueryACL("argo_uuid", "subscriptions", "errorSub") + suite.Equal([]string{"uuid7"}, a1.ACL) +} + +func (suite *SubscriptionsHandlersTestSuite) TestVerifyPushEndpointAlreadyVerified() { + + req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscriptions/push-sub-v1:verifyPushEndpoint", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "error": { + "code": 409, + "message": "Push endpoint is already verified", + "status": "CONFLICT" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + + // add a temporary subscription + q1 := stores.QSub{ + Name: "push-sub-v1", + ProjectUUID: "argo_uuid", + PushEndpoint: "https://example.com/receive_here", + VerificationHash: "vhash-1", + Verified: true, + } + + str.SubList = append(str.SubList, q1) + + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + w := httptest.NewRecorder() + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:verifyPushEndpoint", WrapMockAuthConfig(SubVerifyPushEndpoint, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + suite.Equal(409, w.Code) + suite.Equal(expResp, w.Body.String()) +} + +func (suite *SubscriptionsHandlersTestSuite) TestVerifyPushEndpointNotPushEnabled() { + + req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscriptions/push-sub-v1:verifyPushEndpoint", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "error": { + "code": 409, + "message": "Subscription is not in push mode", + "status": "CONFLICT" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + + // add a temporary subscription + q1 := stores.QSub{ + Name: "push-sub-v1", + ProjectUUID: "argo_uuid", + } + + str.SubList = append(str.SubList, q1) + + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + w := httptest.NewRecorder() + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:verifyPushEndpoint", WrapMockAuthConfig(SubVerifyPushEndpoint, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + suite.Equal(409, w.Code) + suite.Equal(expResp, w.Body.String()) +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubCreatePushConfig() { + + postJSON := `{ + "topic":"projects/ARGO/topics/topic1", + "pushConfig": { + "pushEndpoint": "https://www.example.com", + "retryPolicy": {} + } +}` + + req, err := http.NewRequest("PUT", "http://localhost:8080/v1/projects/ARGO/subscriptions/subNew", bytes.NewBuffer([]byte(postJSON))) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "name": "/projects/ARGO/subscriptions/subNew", + "topic": "/projects/ARGO/topics/topic1", + "pushConfig": { + "pushEndpoint": "https://www.example.com", + "maxMessages": 1, + "authorization_header": { + "type": "autogen", + "value": "{{AUTHZV}}" + }, + "retryPolicy": { + "type": "linear", + "period": 3000 + }, + "verification_hash": "{{VHASH}}", + "verified": false + }, + "ackDeadlineSeconds": 10, + "created_on": "{{CON}}" +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + w := httptest.NewRecorder() + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}", WrapMockAuthConfig(SubCreate, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + sub, _ := str.QueryOneSub("argo_uuid", "subNew") + expResp = strings.Replace(expResp, "{{VHASH}}", sub.VerificationHash, 1) + expResp = strings.Replace(expResp, "{{AUTHZV}}", sub.AuthorizationHeader, 1) + expResp = strings.Replace(expResp, "{{CON}}", sub.CreatedOn.Format("2006-01-02T15:04:05Z"), 1) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubCreatePushConfigSlowStart() { + + postJSON := `{ + "topic":"projects/ARGO/topics/topic1", + "pushConfig": { + "pushEndpoint": "https://www.example.com", + "authorization_header": { + "type": "disabled" + }, + "retryPolicy": { + "type": "slowstart" + } + } +}` + + req, err := http.NewRequest("PUT", "http://localhost:8080/v1/projects/ARGO/subscriptions/subNew", bytes.NewBuffer([]byte(postJSON))) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "name": "/projects/ARGO/subscriptions/subNew", + "topic": "/projects/ARGO/topics/topic1", + "pushConfig": { + "pushEndpoint": "https://www.example.com", + "maxMessages": 1, + "authorization_header": { + "type": "disabled" + }, + "retryPolicy": { + "type": "slowstart" + }, + "verification_hash": "{{VHASH}}", + "verified": false + }, + "ackDeadlineSeconds": 10, + "created_on": "{{CON}}" +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + w := httptest.NewRecorder() + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}", WrapMockAuthConfig(SubCreate, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + sub, _ := str.QueryOneSub("argo_uuid", "subNew") + expResp = strings.Replace(expResp, "{{VHASH}}", sub.VerificationHash, 1) + expResp = strings.Replace(expResp, "{{CON}}", sub.CreatedOn.Format("2006-01-02T15:04:05Z"), 1) + suite.Equal(0, sub.RetPeriod) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubCreatePushConfigMissingPushWorker() { + + postJSON := `{ + "topic":"projects/ARGO/topics/topic1", + "pushConfig": { + "pushEndpoint": "https://www.example.com", + "retryPolicy": {} + } +}` + + req, err := http.NewRequest("PUT", "http://localhost:8080/v1/projects/ARGO/subscriptions/subNew", bytes.NewBuffer([]byte(postJSON))) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "error": { + "code": 500, + "message": "Push functionality is currently unavailable", + "status": "INTERNAL_SERVER_ERROR" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + cfgKafka.PushWorkerToken = "missing" + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + w := httptest.NewRecorder() + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}", WrapMockAuthConfig(SubCreate, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + // subscription should not have been inserted to the store if it has push configuration + // but we can't retrieve the push worker + _, errSub := str.QueryOneSub("argo_uuid", "subNew") + suite.Equal(500, w.Code) + suite.Equal(expResp, w.Body.String()) + suite.Equal("empty", errSub.Error()) +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubCreatePushConfigPushDisabled() { + + postJSON := `{ + "topic":"projects/ARGO/topics/topic1", + "pushConfig": { + "pushEndpoint": "https://www.example.com", + "retryPolicy": {} + } +}` + + req, err := http.NewRequest("PUT", "http://localhost:8080/v1/projects/ARGO/subscriptions/subNew", bytes.NewBuffer([]byte(postJSON))) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "error": { + "code": 409, + "message": "Push functionality is currently disabled", + "status": "CONFLICT" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + cfgKafka.PushEnabled = false + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + w := httptest.NewRecorder() + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}", WrapMockAuthConfig(SubCreate, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + // subscription should not have been inserted to the store if it has push configuration + // but push enables is false + _, errSub := str.QueryOneSub("argo_uuid", "subNew") + suite.Equal(409, w.Code) + suite.Equal(expResp, w.Body.String()) + suite.Equal("empty", errSub.Error()) +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubCreateInvalidRetPol() { + + postJSON := `{ + "topic":"projects/ARGO/topics/topic1", + "pushConfig": { + "pushEndpoint": "https://www.example.com", + "retryPolicy": { + "type": "unknown" + } + } +}` + + req, err := http.NewRequest("PUT", "http://localhost:8080/v1/projects/ARGO/subscriptions/subNew", bytes.NewBuffer([]byte(postJSON))) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "error": { + "code": 400, + "message": "Retry policy can only be of 'linear' or 'slowstart' type", + "status": "INVALID_ARGUMENT" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + w := httptest.NewRecorder() + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}", WrapMockAuthConfig(SubCreate, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + suite.Equal(400, w.Code) + suite.Equal(expResp, w.Body.String()) +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubCreatePushConfigError() { + + postJSON := `{ + "topic":"projects/ARGO/topics/topic1", + "pushConfig": { + "pushEndpoint": "http://www.example.com", + "retryPolicy": {} + } +}` + + req, err := http.NewRequest("PUT", "http://localhost:8080/v1/projects/ARGO/subscriptions/subNew", bytes.NewBuffer([]byte(postJSON))) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "error": { + "code": 400, + "message": "Push endpoint should be addressed by a valid https url", + "status": "INVALID_ARGUMENT" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + w := httptest.NewRecorder() + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}", WrapMockAuthConfig(SubCreate, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(400, w.Code) + suite.Equal(expResp, w.Body.String()) +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubCreate() { + + postJSON := `{ + "topic":"projects/ARGO/topics/topic1" +}` + + req, err := http.NewRequest("PUT", "http://localhost:8080/v1/projects/ARGO/subscriptions/subNew", bytes.NewBuffer([]byte(postJSON))) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "name": "/projects/ARGO/subscriptions/subNew", + "topic": "/projects/ARGO/topics/topic1", + "pushConfig": { + "pushEndpoint": "", + "maxMessages": 0, + "authorization_header": {}, + "retryPolicy": {}, + "verification_hash": "", + "verified": false + }, + "ackDeadlineSeconds": 10, + "created_on": "{{CON}}" +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + w := httptest.NewRecorder() + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}", WrapMockAuthConfig(SubCreate, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + sub, _ := str.QueryOneSub("argo_uuid", "subNew") + fmt.Println(sub) + expResp = strings.Replace(expResp, "{{CON}}", sub.CreatedOn.Format("2006-01-02T15:04:05Z"), 1) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubCreateExists() { + + postJSON := `{ + "topic":"projects/ARGO/topics/topic1" +}` + + req, err := http.NewRequest("PUT", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub1", bytes.NewBuffer([]byte(postJSON))) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "error": { + "code": 409, + "message": "Subscription already exists", + "status": "ALREADY_EXISTS" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}", WrapMockAuthConfig(SubCreate, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(409, w.Code) + suite.Equal(expResp, w.Body.String()) +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubCreateErrorTopic() { + + postJSON := `{ + "topic":"projects/ARGO/topics/topicFoo" +}` + + req, err := http.NewRequest("PUT", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub1", bytes.NewBuffer([]byte(postJSON))) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "error": { + "code": 404, + "message": "Topic doesn't exist", + "status": "NOT_FOUND" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + mgr := oldPush.Manager{} + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}", WrapMockAuthConfig(SubCreate, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(404, w.Code) + suite.Equal(expResp, w.Body.String()) +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubDelete() { + + req, err := http.NewRequest("DELETE", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub1", nil) + if err != nil { + log.Fatal(err) + } + + expResp := "" + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + mgr := oldPush.Manager{} + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}", WrapMockAuthConfig(SubDelete, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubWithPushConfigDelete() { + + req, err := http.NewRequest("DELETE", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub4", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{"message":"Subscription /projects/ARGO/subscriptions/sub4 deactivated"}` + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + mgr := oldPush.Manager{} + router := mux.NewRouter().StrictSlash(true) + pc := new(push.MockClient) + w := httptest.NewRecorder() + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}", WrapMockAuthConfig(SubDelete, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubWithPushConfigDeletePushServerError() { + + req, err := http.NewRequest("DELETE", "http://localhost:8080/v1/projects/ARGO/subscriptions/errorSub", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{"message":"Subscription /projects/ARGO/subscriptions/errorSub is not active"}` + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + str.SubList = append(str.SubList, stores.QSub{ + Name: "errorSub", + ProjectUUID: "argo_uuid", + PushEndpoint: "example.com", + // sub needs to be verified in order to perform the call to the push server + Verified: true, + }) + mgr := oldPush.Manager{} + router := mux.NewRouter().StrictSlash(true) + pc := new(push.MockClient) + w := httptest.NewRecorder() + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}", WrapMockAuthConfig(SubDelete, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubGetOffsets() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub2:offsets", nil) + if err != nil { + log.Fatal(err) + } + expResp := `{ + "max": 2, + "min": 1, + "current": 1 +}` + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + + brk := brokers.MockBroker{} + // append a msg to the broker to cause the min topic from the offset to be at 1 while the sub's current is at 0 + brk.MsgList = append(brk.MsgList, "msg1") + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:offsets", WrapMockAuthConfig(SubGetOffsets, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubListOne() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub1", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "name": "/projects/ARGO/subscriptions/sub1", + "topic": "/projects/ARGO/topics/topic1", + "pushConfig": { + "pushEndpoint": "", + "maxMessages": 0, + "authorization_header": {}, + "retryPolicy": {}, + "verification_hash": "", + "verified": false + }, + "ackDeadlineSeconds": 10, + "created_on": "2020-11-19T00:00:00Z" +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}", WrapMockAuthConfig(SubListOne, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubListAll() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/subscriptions", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "subscriptions": [ + { + "name": "/projects/ARGO/subscriptions/sub4", + "topic": "/projects/ARGO/topics/topic4", + "pushConfig": { + "pushEndpoint": "endpoint.foo", + "maxMessages": 1, + "authorization_header": { + "type": "autogen", + "value": "auth-header-1" + }, + "retryPolicy": { + "type": "linear", + "period": 300 + }, + "verification_hash": "push-id-1", + "verified": true + }, + "ackDeadlineSeconds": 10, + "created_on": "2020-11-22T00:00:00Z" + }, + { + "name": "/projects/ARGO/subscriptions/sub3", + "topic": "/projects/ARGO/topics/topic3", + "pushConfig": { + "pushEndpoint": "", + "maxMessages": 0, + "authorization_header": {}, + "retryPolicy": {}, + "verification_hash": "", + "verified": false + }, + "ackDeadlineSeconds": 10, + "created_on": "2020-11-21T00:00:00Z" + }, + { + "name": "/projects/ARGO/subscriptions/sub2", + "topic": "/projects/ARGO/topics/topic2", + "pushConfig": { + "pushEndpoint": "", + "maxMessages": 0, + "authorization_header": {}, + "retryPolicy": {}, + "verification_hash": "", + "verified": false + }, + "ackDeadlineSeconds": 10, + "created_on": "2020-11-20T00:00:00Z" + }, + { + "name": "/projects/ARGO/subscriptions/sub1", + "topic": "/projects/ARGO/topics/topic1", + "pushConfig": { + "pushEndpoint": "", + "maxMessages": 0, + "authorization_header": {}, + "retryPolicy": {}, + "verification_hash": "", + "verified": false + }, + "ackDeadlineSeconds": 10, + "created_on": "2020-11-19T00:00:00Z" + } + ], + "nextPageToken": "", + "totalSize": 4 +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + w := httptest.NewRecorder() + router.HandleFunc("/v1/projects/{project}/subscriptions", WrapMockAuthConfig(SubListAll, cfgKafka, &brk, str, &mgr, nil, "project_admin")) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubListAllFirstPage() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/subscriptions?pageSize=2", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "subscriptions": [ + { + "name": "/projects/ARGO/subscriptions/sub4", + "topic": "/projects/ARGO/topics/topic4", + "pushConfig": { + "pushEndpoint": "endpoint.foo", + "maxMessages": 1, + "authorization_header": { + "type": "autogen", + "value": "auth-header-1" + }, + "retryPolicy": { + "type": "linear", + "period": 300 + }, + "verification_hash": "push-id-1", + "verified": true + }, + "ackDeadlineSeconds": 10, + "created_on": "2020-11-22T00:00:00Z" + }, + { + "name": "/projects/ARGO/subscriptions/sub3", + "topic": "/projects/ARGO/topics/topic3", + "pushConfig": { + "pushEndpoint": "", + "maxMessages": 0, + "authorization_header": {}, + "retryPolicy": {}, + "verification_hash": "", + "verified": false + }, + "ackDeadlineSeconds": 10, + "created_on": "2020-11-21T00:00:00Z" + } + ], + "nextPageToken": "MQ==", + "totalSize": 4 +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + w := httptest.NewRecorder() + router.HandleFunc("/v1/projects/{project}/subscriptions", WrapMockAuthConfig(SubListAll, cfgKafka, &brk, str, &mgr, nil, "project_admin")) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubListAllNextPage() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/subscriptions?pageSize=2&pageToken=MQ==", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "subscriptions": [ + { + "name": "/projects/ARGO/subscriptions/sub2", + "topic": "/projects/ARGO/topics/topic2", + "pushConfig": { + "pushEndpoint": "", + "maxMessages": 0, + "authorization_header": {}, + "retryPolicy": {}, + "verification_hash": "", + "verified": false + }, + "ackDeadlineSeconds": 10, + "created_on": "2020-11-20T00:00:00Z" + }, + { + "name": "/projects/ARGO/subscriptions/sub1", + "topic": "/projects/ARGO/topics/topic1", + "pushConfig": { + "pushEndpoint": "", + "maxMessages": 0, + "authorization_header": {}, + "retryPolicy": {}, + "verification_hash": "", + "verified": false + }, + "ackDeadlineSeconds": 10, + "created_on": "2020-11-19T00:00:00Z" + } + ], + "nextPageToken": "", + "totalSize": 4 +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + w := httptest.NewRecorder() + router.HandleFunc("/v1/projects/{project}/subscriptions", WrapMockAuthConfig(SubListAll, cfgKafka, &brk, str, &mgr, nil, "project_admin")) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubListAllEmpty() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/subscriptions", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "subscriptions": [], + "nextPageToken": "", + "totalSize": 0 +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + // empty the store + str.SubList = []stores.QSub{} + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + w := httptest.NewRecorder() + router.HandleFunc("/v1/projects/{project}/subscriptions", WrapMockAuthConfig(SubListAll, cfgKafka, &brk, str, &mgr, nil, "project_admin")) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubListAllConsumer() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/subscriptions", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "subscriptions": [ + { + "name": "/projects/ARGO/subscriptions/sub4", + "topic": "/projects/ARGO/topics/topic4", + "pushConfig": { + "pushEndpoint": "endpoint.foo", + "maxMessages": 1, + "authorization_header": { + "type": "autogen", + "value": "auth-header-1" + }, + "retryPolicy": { + "type": "linear", + "period": 300 + }, + "verification_hash": "push-id-1", + "verified": true + }, + "ackDeadlineSeconds": 10, + "created_on": "2020-11-22T00:00:00Z" + }, + { + "name": "/projects/ARGO/subscriptions/sub3", + "topic": "/projects/ARGO/topics/topic3", + "pushConfig": { + "pushEndpoint": "", + "maxMessages": 0, + "authorization_header": {}, + "retryPolicy": {}, + "verification_hash": "", + "verified": false + }, + "ackDeadlineSeconds": 10, + "created_on": "2020-11-21T00:00:00Z" + }, + { + "name": "/projects/ARGO/subscriptions/sub2", + "topic": "/projects/ARGO/topics/topic2", + "pushConfig": { + "pushEndpoint": "", + "maxMessages": 0, + "authorization_header": {}, + "retryPolicy": {}, + "verification_hash": "", + "verified": false + }, + "ackDeadlineSeconds": 10, + "created_on": "2020-11-20T00:00:00Z" + } + ], + "nextPageToken": "", + "totalSize": 3 +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + w := httptest.NewRecorder() + router.HandleFunc("/v1/projects/{project}/subscriptions", WrapMockAuthConfig(SubListAll, cfgKafka, &brk, str, &mgr, nil, "consumer")) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubListAllConsumerWithPagination() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/subscriptions?pageSize=2", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "subscriptions": [ + { + "name": "/projects/ARGO/subscriptions/sub4", + "topic": "/projects/ARGO/topics/topic4", + "pushConfig": { + "pushEndpoint": "endpoint.foo", + "maxMessages": 1, + "authorization_header": { + "type": "autogen", + "value": "auth-header-1" + }, + "retryPolicy": { + "type": "linear", + "period": 300 + }, + "verification_hash": "push-id-1", + "verified": true + }, + "ackDeadlineSeconds": 10, + "created_on": "2020-11-22T00:00:00Z" + }, + { + "name": "/projects/ARGO/subscriptions/sub3", + "topic": "/projects/ARGO/topics/topic3", + "pushConfig": { + "pushEndpoint": "", + "maxMessages": 0, + "authorization_header": {}, + "retryPolicy": {}, + "verification_hash": "", + "verified": false + }, + "ackDeadlineSeconds": 10, + "created_on": "2020-11-21T00:00:00Z" + } + ], + "nextPageToken": "MQ==", + "totalSize": 3 +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + w := httptest.NewRecorder() + router.HandleFunc("/v1/projects/{project}/subscriptions", WrapMockAuthConfig(SubListAll, cfgKafka, &brk, str, &mgr, nil, "consumer")) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubListAllInvalidPageSize() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/subscriptions?pageSize=invalid", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "error": { + "code": 400, + "message": "Invalid page size", + "status": "INVALID_ARGUMENT" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/subscriptions", WrapMockAuthConfig(SubListAll, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(400, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubListAllInvalidPageToken() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/subscriptions?pageToken=invalid", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "error": { + "code": 400, + "message": "Invalid page token", + "status": "INVALID_ARGUMENT" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/subscriptions", WrapMockAuthConfig(SubListAll, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(400, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *SubscriptionsHandlersTestSuite) TestTopicDelete() { + + req, err := http.NewRequest("DELETE", "http://localhost:8080/v1/projects/ARGO/topics/topic1", nil) + + if err != nil { + log.Fatal(err) + } + + expResp := "" + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + brk.Topics = map[string]string{} + brk.Topics["argo_uuid.topic1"] = "" + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/topics/{topic}", WrapMockAuthConfig(TopicDelete, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) + // make sure the topic got deleted + suite.Equal(0, len(brk.Topics)) +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubTimeToOffset() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub1?time=2019-06-10T9:38:30.500Z", nil) + + if err != nil { + log.Fatal(err) + } + + expResp := `{"offset":93204}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + brk.TopicTimeIndices = map[string][]brokers.TimeToOffset{} + + brk.TopicTimeIndices["argo_uuid.topic1"] = []brokers.TimeToOffset{ + {Timestamp: time.Date(2019, 6, 11, 0, 0, 0, 0, time.UTC), Offset: 93204}, + {Timestamp: time.Date(2019, 6, 12, 0, 0, 0, 0, time.UTC), Offset: 94000}, + } + + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}", WrapMockAuthConfig(SubTimeToOffset, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubTimeToOffsetOutOfBounds() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub1?time=2020-06-10T9:38:30.500Z", nil) + + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "error": { + "code": 409, + "message": "Timestamp is out of bounds for the subscription's topic/partition", + "status": "CONFLICT" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + brk.TopicTimeIndices = map[string][]brokers.TimeToOffset{} + brk.TopicTimeIndices["argo_uuid.topic1"] = []brokers.TimeToOffset{ + {Timestamp: time.Date(2019, 6, 11, 0, 0, 0, 0, time.UTC), Offset: 93204}, + {Timestamp: time.Date(2019, 6, 12, 0, 0, 0, 0, time.UTC), Offset: 94000}, + } + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}", WrapMockAuthConfig(SubTimeToOffset, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(409, w.Code) + suite.Equal(expResp, w.Body.String()) +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubDeleteNotFound() { + + req, err := http.NewRequest("DELETE", "http://localhost:8080/v1/projects/ARGO/subscriptions/subFoo", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "error": { + "code": 404, + "message": "Subscription doesn't exist", + "status": "NOT_FOUND" + } +}` + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}", WrapMockAuthConfig(SubDelete, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(404, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *SubscriptionsHandlersTestSuite) TestModSubACLWrong() { + + postExp := `{"authorized_users":["UserX","UserFoo"]}` + + expRes := `{ + "error": { + "code": 404, + "message": "User(s): UserFoo do not exist", + "status": "NOT_FOUND" + } +}` + + req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub101:modAcl", bytes.NewBuffer([]byte(postExp))) + if err != nil { + log.Fatal(err) + } + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:modAcl", WrapMockAuthConfig(SubModACL, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(404, w.Code) + suite.Equal(expRes, w.Body.String()) + +} + +func (suite *SubscriptionsHandlersTestSuite) TestModSubACL01() { + + postExp := `{"authorized_users":["UserX","UserZ"]}` + + req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscription/sub1:modAcl", bytes.NewBuffer([]byte(postExp))) + if err != nil { + log.Fatal(err) + } + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/subscription/{subscription}:modAcl", WrapMockAuthConfig(SubModACL, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal("", w.Body.String()) + + req2, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/subscription/sub1:acl", nil) + if err != nil { + log.Fatal(err) + } + router.HandleFunc("/v1/projects/{project}/subscription/{subscription}:acl", WrapMockAuthConfig(SubACL, cfgKafka, &brk, str, &mgr, nil)) + w2 := httptest.NewRecorder() + router.ServeHTTP(w2, req2) + suite.Equal(200, w2.Code) + + expResp := `{ + "authorized_users": [ + "UserX", + "UserZ" + ] +}` + + suite.Equal(expResp, w2.Body.String()) + +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubACL01() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/subscription/sub1:acl", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "authorized_users": [ + "UserA", + "UserB" + ] +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/subscription/{subscription}:acl", WrapMockAuthConfig(SubACL, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubACL02() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub3:acl", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "authorized_users": [ + "UserZ", + "UserB", + "UserA" + ] +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:acl", WrapMockAuthConfig(SubACL, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubPullOne() { + + postJSON := `{ + "maxMessages":"1" +}` + url := "http://localhost:8080/v1/projects/ARGO/subscriptions/sub1:pull" + req, err := http.NewRequest("POST", url, bytes.NewBuffer([]byte(postJSON))) + if err != nil { + log.Fatal(err) + } + + expJSON := `{ + "receivedMessages": [ + { + "ackId": "projects/ARGO/subscriptions/sub1:0", + "message": { + "messageId": "0", + "attributes": { + "foo": "bar" + }, + "data": "YmFzZTY0ZW5jb2RlZA==", + "publishTime": "2016-02-24T11:55:09.786127994Z" + } + } + ] +}` + tn := time.Now().UTC() + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + brk.Initialize([]string{"localhost"}) + brk.PopulateThree() // Add three messages to the broker queue + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:pull", WrapMockAuthConfig(SubPull, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expJSON, w.Body.String()) + spc, _, _, _ := str.QuerySubs("argo_uuid", "", "sub1", "", 0) + suite.True(tn.Before(spc[0].LatestConsume)) + suite.NotEqual(spc[0].ConsumeRate, 10) + +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubPullFromPushEnabledAsPushWorker() { + + postJSON := `{ + "maxMessages":"1" +}` + url := "http://localhost:8080/v1/projects/ARGO/subscriptions/sub4:pull" + req, err := http.NewRequest("POST", url, strings.NewReader(postJSON)) + if err != nil { + log.Fatal(err) + } + + expJSON := `{ + "receivedMessages": [ + { + "ackId": "projects/ARGO/subscriptions/sub4:0", + "message": { + "messageId": "0", + "attributes": { + "foo": "bar" + }, + "data": "YmFzZTY0ZW5jb2RlZA==", + "publishTime": "2016-02-24T11:55:09.786127994Z" + } + } + ] +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + brk.Initialize([]string{"localhost"}) + brk.PopulateThree() // Add three messages to the broker queue + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:pull", WrapMockAuthConfig(SubPull, cfgKafka, &brk, str, &mgr, nil, "push_worker")) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expJSON, w.Body.String()) +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubPullFromPushEnabledAsPushWorkerDISABLED() { + + postJSON := `{ + "maxMessages":"1" +}` + url := "http://localhost:8080/v1/projects/ARGO/subscriptions/sub4:pull" + req, err := http.NewRequest("POST", url, strings.NewReader(postJSON)) + if err != nil { + log.Fatal(err) + } + + expJSON := `{ + "error": { + "code": 409, + "message": "Push functionality is currently disabled", + "status": "CONFLICT" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + // disable push functionality + cfgKafka.PushEnabled = false + brk := brokers.MockBroker{} + brk.Initialize([]string{"localhost"}) + brk.PopulateThree() // Add three messages to the broker queue + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:pull", WrapMockAuthConfig(SubPull, cfgKafka, &brk, str, &mgr, nil, "push_worker")) + router.ServeHTTP(w, req) + suite.Equal(409, w.Code) + suite.Equal(expJSON, w.Body.String()) +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubPullFromPushEnabledAsServiceAdmin() { + + postJSON := `{ + "maxMessages":"1" +}` + url := "http://localhost:8080/v1/projects/ARGO/subscriptions/sub4:pull" + req, err := http.NewRequest("POST", url, strings.NewReader(postJSON)) + if err != nil { + log.Fatal(err) + } + + expJSON := `{ + "receivedMessages": [ + { + "ackId": "projects/ARGO/subscriptions/sub4:0", + "message": { + "messageId": "0", + "attributes": { + "foo": "bar" + }, + "data": "YmFzZTY0ZW5jb2RlZA==", + "publishTime": "2016-02-24T11:55:09.786127994Z" + } + } + ] +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + brk.Initialize([]string{"localhost"}) + brk.PopulateThree() // Add three messages to the broker queue + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:pull", WrapMockAuthConfig(SubPull, cfgKafka, &brk, str, &mgr, nil, "service_admin")) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expJSON, w.Body.String()) +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubPullFromPushEnabledNoPushWorker() { + + postJSON := `{ + "maxMessages":"1" +}` + url := "http://localhost:8080/v1/projects/ARGO/subscriptions/sub4:pull" + req, err := http.NewRequest("POST", url, strings.NewReader(postJSON)) + if err != nil { + log.Fatal(err) + } + + expJSON := `{ + "error": { + "code": 403, + "message": "Access to this resource is forbidden", + "status": "FORBIDDEN" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + brk.Initialize([]string{"localhost"}) + brk.PopulateThree() // Add three messages to the broker queue + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:pull", WrapMockAuthConfig(SubPull, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(403, w.Code) + suite.Equal(expJSON, w.Body.String()) +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubModAck() { + + postJSON := `{ + "ackDeadlineSeconds":33 +}` + + postJSON2 := `{ + "ackDeadlineSeconds":700 +}` + + postJSON3 := `{ + "ackDeadlineSeconds":-22 +}` + + url := "http://localhost:8080/v1/projects/ARGO/subscriptions/sub1:modifyAckDeadline" + req, err := http.NewRequest("POST", url, bytes.NewBuffer([]byte(postJSON))) + if err != nil { + log.Fatal(err) + } + + expJSON1 := `` + + expJSON2 := `{ + "error": { + "code": 400, + "message": "Invalid ackDeadlineSeconds(needs value between 0 and 600) Arguments", + "status": "INVALID_ARGUMENT" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + brk.Initialize([]string{"localhost"}) + brk.PopulateThree() // Add three messages to the broker queue + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:modifyAckDeadline", WrapMockAuthConfig(SubModAck, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expJSON1, w.Body.String()) + + subRes, err := str.QueryOneSub("argo_uuid", "sub1") + suite.Equal(33, subRes.Ack) + + req2, err := http.NewRequest("POST", url, bytes.NewBuffer([]byte(postJSON2))) + router2 := mux.NewRouter().StrictSlash(true) + w2 := httptest.NewRecorder() + mgr = oldPush.Manager{} + router2.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:modifyAckDeadline", WrapMockAuthConfig(SubModAck, cfgKafka, &brk, str, &mgr, nil)) + router2.ServeHTTP(w2, req2) + suite.Equal(400, w2.Code) + suite.Equal(expJSON2, w2.Body.String()) + + req3, err := http.NewRequest("POST", url, bytes.NewBuffer([]byte(postJSON3))) + router3 := mux.NewRouter().StrictSlash(true) + w3 := httptest.NewRecorder() + mgr = oldPush.Manager{} + router3.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:modifyAckDeadline", WrapMockAuthConfig(SubModAck, cfgKafka, &brk, str, &mgr, nil)) + router3.ServeHTTP(w3, req3) + suite.Equal(400, w3.Code) + suite.Equal(expJSON2, w3.Body.String()) + +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubAck() { + + postJSON := `{ + "ackIds":["projects/ARGO/subscriptions/sub2:1"] +}` + + postJSON2 := `{ +"ackIds":["projects/ARGO/subscriptions/sub1:2"] +}` + + postJSON3 := `{ +"ackIds":["projects/ARGO/subscriptions/sub1:2"] +}` + + url := "http://localhost:8080/v1/projects/ARGO/subscriptions/sub1:acknowledge" + req, err := http.NewRequest("POST", url, bytes.NewBuffer([]byte(postJSON))) + if err != nil { + log.Fatal(err) + } + + expJSON1 := `{ + "error": { + "code": 400, + "message": "Invalid ack id", + "status": "INVALID_ARGUMENT" + } +}` + + expJSON2 := `{ + "error": { + "code": 408, + "message": "ack timeout", + "status": "TIMEOUT" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + brk.Initialize([]string{"localhost"}) + brk.PopulateThree() // Add three messages to the broker queue + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:acknowledge", WrapMockAuthConfig(SubAck, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(400, w.Code) + suite.Equal(expJSON1, w.Body.String()) + + // grab sub1 + zSec := "2006-01-02T15:04:05Z" + t := time.Now().UTC() + ts := t.Format(zSec) + str.SubList[0].PendingAck = ts + str.SubList[0].NextOffset = 3 + + req2, err := http.NewRequest("POST", url, bytes.NewBuffer([]byte(postJSON2))) + router2 := mux.NewRouter().StrictSlash(true) + w2 := httptest.NewRecorder() + mgr = oldPush.Manager{} + router2.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:acknowledge", WrapMockAuthConfig(SubAck, cfgKafka, &brk, str, &mgr, nil)) + router2.ServeHTTP(w2, req2) + suite.Equal(200, w2.Code) + suite.Equal("{}", w2.Body.String()) + + // mess with the timeout + t2 := time.Now().UTC().Add(-11 * time.Second) + ts2 := t2.Format(zSec) + str.SubList[0].PendingAck = ts2 + str.SubList[0].NextOffset = 4 + + req3, err := http.NewRequest("POST", url, bytes.NewBuffer([]byte(postJSON3))) + router3 := mux.NewRouter().StrictSlash(true) + w3 := httptest.NewRecorder() + mgr = oldPush.Manager{} + router3.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:acknowledge", WrapMockAuthConfig(SubAck, cfgKafka, &brk, str, &mgr, nil)) + router3.ServeHTTP(w3, req3) + suite.Equal(408, w3.Code) + suite.Equal(expJSON2, w3.Body.String()) + +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubError() { + + postJSON := `{ + +}` + url := "http://localhost:8080/v1/projects/ARGO/subscriptions/foo:pull" + req, err := http.NewRequest("POST", url, bytes.NewBuffer([]byte(postJSON))) + if err != nil { + log.Fatal(err) + } + + expJSON := `{ + "error": { + "code": 404, + "message": "Subscription doesn't exist", + "status": "NOT_FOUND" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + brk.Initialize([]string{"localhost"}) + brk.PopulateThree() // Add three messages to the broker queue + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:pull", WrapMockAuthConfig(SubPull, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(404, w.Code) + suite.Equal(expJSON, w.Body.String()) + +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubNoTopic() { + + postJSON := `{ + +}` + url := "http://localhost:8080/v1/projects/ARGO/subscriptions/no_topic_sub:pull" + req, err := http.NewRequest("POST", url, bytes.NewBuffer([]byte(postJSON))) + if err != nil { + log.Fatal(err) + } + + expJSON := `{ + "error": { + "code": 409, + "message": "Subscription's topic doesn't exist", + "status": "CONFLICT" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + // add a mock sub that is linked to a non existent topic + str.SubList = append(str.SubList, stores.QSub{ + Name: "no_topic_sub", + ProjectUUID: "argo_uuid", + Topic: "unknown_topic"}, + ) + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:pull", WrapMockAuthConfig(SubPull, cfgKafka, &brk, str, &mgr, nil, "project_admin")) + router.ServeHTTP(w, req) + suite.Equal(409, w.Code) + suite.Equal(expJSON, w.Body.String()) +} + +func (suite *SubscriptionsHandlersTestSuite) TestSubPullAll() { + + postJSON := `{ + +}` + url := "http://localhost:8080/v1/projects/ARGO/subscriptions/sub1:pull" + req, err := http.NewRequest("POST", url, bytes.NewBuffer([]byte(postJSON))) + if err != nil { + log.Fatal(err) + } + + expJSON := `{ + "receivedMessages": [ + { + "ackId": "projects/ARGO/subscriptions/sub1:0", + "message": { + "messageId": "0", + "attributes": { + "foo": "bar" + }, + "data": "YmFzZTY0ZW5jb2RlZA==", + "publishTime": "2016-02-24T11:55:09.786127994Z" + } + }, + { + "ackId": "projects/ARGO/subscriptions/sub1:1", + "message": { + "messageId": "1", + "attributes": { + "foo2": "bar2" + }, + "data": "YmFzZTY0ZW5jb2RlZA==", + "publishTime": "2016-02-24T11:55:09.827678754Z" + } + }, + { + "ackId": "projects/ARGO/subscriptions/sub1:2", + "message": { + "messageId": "2", + "attributes": { + "foo2": "bar2" + }, + "data": "YmFzZTY0ZW5jb2RlZA==", + "publishTime": "2016-02-24T11:55:09.830417467Z" + } + } + ] +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + brk.Initialize([]string{"localhost"}) + brk.PopulateThree() // Add three messages to the broker queue + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:pull", WrapMockAuthConfig(SubPull, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expJSON, w.Body.String()) +} + +func (suite *SubscriptionsHandlersTestSuite) TestValidationInSubs() { + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + brk.Initialize([]string{"localhost"}) + brk.PopulateThree() // Add three messages to the broker queue + str := stores.NewMockStore("whatever", "argo_mgs") + + okResp := `{ + "name": "/projects/ARGO/subscriptions/sub1", + "topic": "/projects/ARGO/topics/topic1", + "pushConfig": { + "pushEndpoint": "", + "maxMessages": 0, + "authorization_header": {}, + "retryPolicy": {}, + "verification_hash": "", + "verified": false + }, + "ackDeadlineSeconds": 10, + "created_on": "2020-11-19T00:00:00Z" +}` + + invProject := `{ + "error": { + "code": 400, + "message": "Invalid project name", + "status": "INVALID_ARGUMENT" + } +}` + + invSub := `{ + "error": { + "code": 400, + "message": "Invalid subscription name", + "status": "INVALID_ARGUMENT" + } +}` + + urls := []string{ + "http://localhost:8080/v1/projects/ARGO/subscriptions/sub1", + "http://localhost:8080/v1/projects/AR:GO/subscriptions/sub1", + "http://localhost:8080/v1/projects/ARGO/subscriptions/s,ub1", + "http://localhost:8080/v1/projects/AR,GO/subscriptions/s:ub1", + } + + codes := []int(nil) + responses := []string(nil) + + for _, url := range urls { + w := httptest.NewRecorder() + req, err := http.NewRequest("GET", url, bytes.NewBuffer([]byte(""))) + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}", WrapValidate(WrapMockAuthConfig(SubListOne, cfgKafka, &brk, str, &mgr, nil))) + + if err != nil { + log.Fatal(err) + } + + router.ServeHTTP(w, req) + codes = append(codes, w.Code) + responses = append(responses, w.Body.String()) + + } + + // First request is valid so response is ok + suite.Equal(200, codes[0]) + suite.Equal(okResp, responses[0]) + + // Second request has invalid project name + suite.Equal(400, codes[1]) + suite.Equal(invProject, responses[1]) + + // Third request has invalid subscription name + suite.Equal(400, codes[2]) + suite.Equal(invSub, responses[2]) + + // Fourth request has invalid project and subscription name, but project is caught first + suite.Equal(400, codes[3]) + suite.Equal(invProject, responses[3]) + +} + +func TestSubscriptionsHandlersTestSuite(t *testing.T) { + log.SetOutput(ioutil.Discard) + suite.Run(t, new(SubscriptionsHandlersTestSuite)) +} diff --git a/handlers/topics.go b/handlers/topics.go new file mode 100644 index 00000000..5e6ddc71 --- /dev/null +++ b/handlers/topics.go @@ -0,0 +1,627 @@ +package handlers + +import ( + "encoding/json" + "fmt" + "github.com/ARGOeu/argo-messaging/auth" + "github.com/ARGOeu/argo-messaging/brokers" + "github.com/ARGOeu/argo-messaging/messages" + "github.com/ARGOeu/argo-messaging/schemas" + "github.com/ARGOeu/argo-messaging/stores" + "github.com/ARGOeu/argo-messaging/subscriptions" + "github.com/ARGOeu/argo-messaging/topics" + gorillaContext "github.com/gorilla/context" + "github.com/gorilla/mux" + log "github.com/sirupsen/logrus" + "io/ioutil" + "net/http" + "strconv" + "time" +) + +// TopicDelete (DEL) deletes an existing topic +func TopicDelete(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + refBrk := gorillaContext.Get(r, "brk").(brokers.Broker) + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + + // Get Result Object + + err := topics.RemoveTopic(projectUUID, urlVars["topic"], refStr) + if err != nil { + if err.Error() == "not found" { + err := APIErrorNotFound("Topic") + respondErr(w, err) + return + } + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + fullTopic := projectUUID + "." + urlVars["topic"] + err = refBrk.DeleteTopic(fullTopic) + if err != nil { + log.Errorf("Couldn't delete topic %v from broker, %v", fullTopic, err.Error()) + } + + // Write empty response if anything ok + respondOK(w, output) +} + +// TopicModACL (PUT) modifies the ACL +func TopicModACL(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + // Get Result Object + urlTopic := urlVars["topic"] + + // Read POST JSON body + body, err := ioutil.ReadAll(r.Body) + if err != nil { + err := APIErrorInvalidRequestBody() + respondErr(w, err) + return + } + + // Parse pull options + postBody, err := auth.GetACLFromJSON(body) + if err != nil { + err := APIErrorInvalidArgument("Topic ACL") + respondErr(w, err) + log.Error(string(body[:])) + return + } + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + // Get project UUID First to use as reference + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + + // check if user list contain valid users for the given project + _, err = auth.AreValidUsers(projectUUID, postBody.AuthUsers, refStr) + if err != nil { + err := APIErrorRoot{Body: APIErrorBody{Code: http.StatusNotFound, Message: err.Error(), Status: "NOT_FOUND"}} + respondErr(w, err) + return + } + + err = auth.ModACL(projectUUID, "topics", urlTopic, postBody.AuthUsers, refStr) + + if err != nil { + + if err.Error() == "not found" { + err := APIErrorNotFound("Topic") + respondErr(w, err) + return + } + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + respondOK(w, output) + +} + +// TopicCreate (PUT) creates a new topic +func TopicCreate(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + + postBody := map[string]string{} + schemaUUID := "" + + // check if there's a request body provided before trying to decode + if r.Body != nil { + + b, err := ioutil.ReadAll(r.Body) + + if err != nil { + err := APIErrorInvalidRequestBody() + respondErr(w, err) + return + } + defer r.Body.Close() + + if len(b) > 0 { + err = json.Unmarshal(b, &postBody) + if err != nil { + err := APIErrorInvalidRequestBody() + respondErr(w, err) + return + } + + schemaRef := postBody["schema"] + + // if there was a schema name provided, check its existence + if schemaRef != "" { + _, schemaName, err := schemas.ExtractSchema(schemaRef) + if err != nil { + err := APIErrorInvalidData(err.Error()) + respondErr(w, err) + return + } + sl, err := schemas.Find(projectUUID, "", schemaName, refStr) + if err != nil { + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + if sl.Empty() { + err := APIErrorNotFound("Schema") + respondErr(w, err) + return + } + + schemaUUID = sl.Schemas[0].UUID + } + } + } + + created := time.Now().UTC() + + // Get Result Object + res, err := topics.CreateTopic(projectUUID, urlVars["topic"], schemaUUID, created, refStr) + if err != nil { + if err.Error() == "exists" { + err := APIErrorConflict("Topic") + respondErr(w, err) + return + } + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + } + + // Output result to JSON + resJSON, err := res.ExportJSON() + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + // Write response + output = []byte(resJSON) + respondOK(w, output) +} + +// TopicListOne (GET) one topic +func TopicListOne(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + + results, err := topics.Find(projectUUID, "", urlVars["topic"], "", 0, refStr) + + if err != nil { + err := APIErrGenericBackend() + respondErr(w, err) + return + } + + // If not found + if results.Empty() { + err := APIErrorNotFound("Topic") + respondErr(w, err) + return + } + + res := results.Topics[0] + + // Output result to JSON + resJSON, err := res.ExportJSON() + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + // Write response + output = []byte(resJSON) + respondOK(w, output) +} + +// ListSubsByTopic (GET) lists all subscriptions associated with the given topic +func ListSubsByTopic(w http.ResponseWriter, r *http.Request) { + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + + results, err := topics.Find(projectUUID, "", urlVars["topic"], "", 0, refStr) + + if err != nil { + err := APIErrGenericBackend() + respondErr(w, err) + return + } + + // If not found + if results.Empty() { + err := APIErrorNotFound("Topic") + respondErr(w, err) + return + } + + subs, err := subscriptions.FindByTopic(projectUUID, results.Topics[0].Name, refStr) + if err != nil { + err := APIErrGenericBackend() + respondErr(w, err) + return + + } + + resJSON, err := json.Marshal(subs) + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + respondOK(w, resJSON) +} + +// TopicACL (GET) one topic's authorized users +func TopicACL(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + urlTopic := urlVars["topic"] + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + + // Get project UUID First to use as reference + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + res, err := auth.GetACL(projectUUID, "topics", urlTopic, refStr) + + // If not found + if err != nil { + err := APIErrorNotFound("Topic") + respondErr(w, err) + return + } + + // Output result to JSON + resJSON, err := res.ExportJSON() + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + // Write response + output = []byte(resJSON) + respondOK(w, output) +} + +// TopicListAll (GET) all topics +func TopicListAll(w http.ResponseWriter, r *http.Request) { + + var err error + var strPageSize string + var pageSize int + var res topics.PaginatedTopics + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + roles := gorillaContext.Get(r, "auth_roles").([]string) + + urlValues := r.URL.Query() + pageToken := urlValues.Get("pageToken") + strPageSize = urlValues.Get("pageSize") + + // if this route is used by a user who only has a publisher role + // return all topics that he has access to + userUUID := "" + if !auth.IsProjectAdmin(roles) && !auth.IsServiceAdmin(roles) && auth.IsPublisher(roles) { + userUUID = gorillaContext.Get(r, "auth_user_uuid").(string) + } + + if strPageSize != "" { + if pageSize, err = strconv.Atoi(strPageSize); err != nil { + log.Errorf("Pagesize %v produced an error while being converted to int: %v", strPageSize, err.Error()) + err := APIErrorInvalidData("Invalid page size") + respondErr(w, err) + return + } + } + + if res, err = topics.Find(projectUUID, userUUID, "", pageToken, int32(pageSize), refStr); err != nil { + err := APIErrorInvalidData("Invalid page token") + respondErr(w, err) + return + } + // Output result to JSON + resJSON, err := res.ExportJSON() + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + // Write Response + output = []byte(resJSON) + respondOK(w, output) +} + +// TopicPublish (POST) publish messages to a topic +func TopicPublish(w http.ResponseWriter, r *http.Request) { + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Get url path variables + urlVars := mux.Vars(r) + urlTopic := urlVars["topic"] + + // Grab context references + + refBrk := gorillaContext.Get(r, "brk").(brokers.Broker) + refStr := gorillaContext.Get(r, "str").(stores.Store) + refUserUUID := gorillaContext.Get(r, "auth_user_uuid").(string) + refRoles := gorillaContext.Get(r, "auth_roles").([]string) + refAuthResource := gorillaContext.Get(r, "auth_resource").(bool) + // Get project UUID First to use as reference + projectUUID := gorillaContext.Get(r, "auth_project_uuid").(string) + + results, err := topics.Find(projectUUID, "", urlVars["topic"], "", 0, refStr) + + if err != nil { + err := APIErrGenericBackend() + respondErr(w, err) + return + } + + // If not found + if results.Empty() { + err := APIErrorNotFound("Topic") + respondErr(w, err) + return + } + + res := results.Topics[0] + + // Check Authorization per topic + // - if enabled in config + // - if user has only publisher role + + if refAuthResource && auth.IsPublisher(refRoles) { + + if auth.PerResource(projectUUID, "topics", urlTopic, refUserUUID, refStr) == false { + err := APIErrorForbidden() + respondErr(w, err) + return + } + } + + // Read POST JSON body + body, err := ioutil.ReadAll(r.Body) + if err != nil { + err := APIErrorInvalidRequestBody() + respondErr(w, err) + return + } + + // Create Message List from Post JSON + msgList, err := messages.LoadMsgListJSON(body) + if err != nil { + err := APIErrorInvalidArgument("Message") + respondErr(w, err) + return + } + + // check if the topic has a schema associated with it + if res.Schema != "" { + + // retrieve the schema + _, schemaName, err := schemas.ExtractSchema(res.Schema) + if err != nil { + log.WithFields( + log.Fields{ + "type": "service_log", + "schema_name": res.Schema, + "topic_name": res.Name, + "error": err.Error(), + }, + ).Error("Could not extract schema name") + err := APIErrGenericInternal(schemas.GenericError) + respondErr(w, err) + return + } + + sl, err := schemas.Find(projectUUID, "", schemaName, refStr) + + if err != nil { + log.WithFields( + log.Fields{ + "type": "service_log", + "schema_name": schemaName, + "topic_name": res.Name, + "error": err.Error(), + }, + ).Error("Could not retrieve schema from the store") + err := APIErrGenericInternal(schemas.GenericError) + respondErr(w, err) + return + } + + if !sl.Empty() { + err := schemas.ValidateMessages(sl.Schemas[0], msgList) + if err != nil { + if err.Error() == "500" { + err := APIErrGenericInternal(schemas.GenericError) + respondErr(w, err) + return + } else { + err := APIErrorInvalidData(err.Error()) + respondErr(w, err) + return + } + } + } else { + log.WithFields( + log.Fields{ + "type": "service_log", + "schema_name": res.Schema, + "topic_name": res.Name, + }, + ).Error("List of schemas was empty") + err := APIErrGenericInternal(schemas.GenericError) + respondErr(w, err) + return + } + } + + // Init message ids list + msgIDs := messages.MsgIDs{IDs: []string{}} + + // For each message in message list + for _, msg := range msgList.Msgs { + // Get offset and set it as msg + fullTopic := projectUUID + "." + urlTopic + + msgID, rTop, _, _, err := refBrk.Publish(fullTopic, msg) + + if err != nil { + if err.Error() == "kafka server: Message was too large, server rejected it to avoid allocation error." { + err := APIErrTooLargeMessage("Message size too large") + respondErr(w, err) + return + } + + err := APIErrGenericBackend() + respondErr(w, err) + return + } + + msg.ID = msgID + // Assertions for Succesfull Publish + if rTop != fullTopic { + err := APIErrGenericInternal("Broker reports wrong topic") + respondErr(w, err) + return + } + + // Append the MsgID of the successful published message to the msgIds list + msgIDs.IDs = append(msgIDs.IDs, msg.ID) + } + + // timestamp of the publish event + publishTime := time.Now().UTC() + + // amount of messages published + msgCount := int64(len(msgList.Msgs)) + + // increment topic number of message metric + refStr.IncrementTopicMsgNum(projectUUID, urlTopic, msgCount) + + // increment daily count of topic messages + year, month, day := publishTime.Date() + refStr.IncrementDailyTopicMsgCount(projectUUID, urlTopic, msgCount, time.Date(year, month, day, 0, 0, 0, 0, time.UTC)) + + // increment topic total bytes published + refStr.IncrementTopicBytes(projectUUID, urlTopic, msgList.TotalSize()) + + // update latest publish date for the given topic + refStr.UpdateTopicLatestPublish(projectUUID, urlTopic, publishTime) + + // count the rate of published messages per sec between the last two publish events + var dt float64 = 1 + // if its the first publish to the topic + // skip the subtraction that computes the DT between the last two publish events + if !res.LatestPublish.IsZero() { + dt = publishTime.Sub(res.LatestPublish).Seconds() + } + refStr.UpdateTopicPublishRate(projectUUID, urlTopic, float64(msgCount)/dt) + + // Export the msgIDs + resJSON, err := msgIDs.ExportJSON() + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + // Write response + output = []byte(resJSON) + respondOK(w, output) +} diff --git a/handlers/topics_test.go b/handlers/topics_test.go new file mode 100644 index 00000000..e6c8f8db --- /dev/null +++ b/handlers/topics_test.go @@ -0,0 +1,1071 @@ +package handlers + +import ( + "bytes" + "fmt" + "github.com/ARGOeu/argo-messaging/brokers" + "github.com/ARGOeu/argo-messaging/config" + oldPush "github.com/ARGOeu/argo-messaging/push" + push "github.com/ARGOeu/argo-messaging/push/grpc/client" + "github.com/ARGOeu/argo-messaging/stores" + "github.com/gorilla/mux" + "github.com/stretchr/testify/suite" + "io/ioutil" + "log" + "net/http" + "net/http/httptest" + "strings" + "testing" + "time" +) + +type TopicsHandlersTestSuite struct { + suite.Suite + cfgStr string +} + +func (suite *TopicsHandlersTestSuite) SetupTest() { + suite.cfgStr = `{ + "bind_ip":"", + "port":8080, + "zookeeper_hosts":["localhost"], + "kafka_znode":"", + "store_host":"localhost", + "store_db":"argo_msg", + "certificate":"/etc/pki/tls/certs/localhost.crt", + "certificate_key":"/etc/pki/tls/private/localhost.key", + "per_resource_auth":"true", + "push_enabled": "true", + "push_worker_token": "push_token" + }` +} + +func (suite *TopicsHandlersTestSuite) TestTopicDeleteNotfound() { + + req, err := http.NewRequest("DELETE", "http://localhost:8080/v1/projects/ARGO/topics/topicFoo", nil) + + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "error": { + "code": 404, + "message": "Topic doesn't exist", + "status": "NOT_FOUND" + } +}` + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/topics/{topic}", WrapMockAuthConfig(TopicDelete, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(404, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *TopicsHandlersTestSuite) TestTopicCreate() { + + req, err := http.NewRequest("PUT", "http://localhost:8080/v1/projects/ARGO/topics/topicNew", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "name": "/projects/ARGO/topics/topicNew", + "created_on": "{{CON}}" +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/topics/{topic}", WrapMockAuthConfig(TopicCreate, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + tp, _, _, _ := str.QueryTopics("argo_uuid", "", "topicNew", "", 1) + expResp = strings.Replace(expResp, "{{CON}}", tp[0].CreatedOn.Format("2006-01-02T15:04:05Z"), 1) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *TopicsHandlersTestSuite) TestTopicCreateExists() { + + req, err := http.NewRequest("PUT", "http://localhost:8080/v1/projects/ARGO/topics/topic1", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "error": { + "code": 409, + "message": "Topic already exists", + "status": "ALREADY_EXISTS" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/topics/{topic}", WrapMockAuthConfig(TopicCreate, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(409, w.Code) + suite.Equal(expResp, w.Body.String()) +} + +func (suite *TopicsHandlersTestSuite) TestTopicListOne() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/topics/topic1", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "name": "/projects/ARGO/topics/topic1", + "created_on": "2020-11-22T00:00:00Z" +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/topics/{topic}", WrapMockAuthConfig(TopicListOne, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) +} + +func (suite *TopicsHandlersTestSuite) TestTopicListSubscriptions() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/topics/topic1/subscriptions", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{"subscriptions":["/projects/ARGO/subscriptions/sub1"]}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/topics/{topic}/subscriptions", WrapMockAuthConfig(ListSubsByTopic, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) +} + +func (suite *TopicsHandlersTestSuite) TestTopicListSubscriptionsEmpty() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/topics/topic1/subscriptions", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{"subscriptions":[]}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + str.SubList = nil + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/topics/{topic}/subscriptions", WrapMockAuthConfig(ListSubsByTopic, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) +} + +func (suite *TopicsHandlersTestSuite) TestModTopicACL01() { + + postExp := `{"authorized_users":["UserX","UserZ"]}` + + req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/topics/topic1:modAcl", bytes.NewBuffer([]byte(postExp))) + if err != nil { + log.Fatal(err) + } + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/topics/{topic}:modAcl", WrapMockAuthConfig(TopicModACL, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal("", w.Body.String()) + + req2, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/topics/topic1:acl", nil) + if err != nil { + log.Fatal(err) + } + router.HandleFunc("/v1/projects/{project}/topics/{topic}:acl", WrapMockAuthConfig(TopicACL, cfgKafka, &brk, str, &mgr, nil)) + w2 := httptest.NewRecorder() + router.ServeHTTP(w2, req2) + suite.Equal(200, w2.Code) + + expResp := `{ + "authorized_users": [ + "UserX", + "UserZ" + ] +}` + + suite.Equal(expResp, w2.Body.String()) + +} + +func (suite *TopicsHandlersTestSuite) TestTopicACL01() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/topics/topic1:acl", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "authorized_users": [ + "UserA", + "UserB" + ] +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/topics/{topic}:acl", WrapMockAuthConfig(TopicACL, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *TopicsHandlersTestSuite) TestTopicACL02() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/topics/topic3:acl", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "authorized_users": [ + "UserX" + ] +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/topics/{topic}:acl", WrapMockAuthConfig(TopicACL, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *TopicsHandlersTestSuite) TestModTopicACLWrong() { + + postExp := `{"authorized_users":["UserX","UserFoo"]}` + + expRes := `{ + "error": { + "code": 404, + "message": "User(s): UserFoo do not exist", + "status": "NOT_FOUND" + } +}` + + req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/topics/topic1:modAcl", bytes.NewBuffer([]byte(postExp))) + if err != nil { + log.Fatal(err) + } + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/topics/{topic}:modAcl", WrapMockAuthConfig(TopicModACL, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(404, w.Code) + suite.Equal(expRes, w.Body.String()) + +} + +func (suite *TopicsHandlersTestSuite) TestTopicListAll() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/topics", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "topics": [ + { + "name": "/projects/ARGO/topics/topic4", + "created_on": "2020-11-19T00:00:00Z" + }, + { + "name": "/projects/ARGO/topics/topic3", + "schema": "projects/ARGO/schemas/schema-3", + "created_on": "2020-11-20T00:00:00Z" + }, + { + "name": "/projects/ARGO/topics/topic2", + "schema": "projects/ARGO/schemas/schema-1", + "created_on": "2020-11-21T00:00:00Z" + }, + { + "name": "/projects/ARGO/topics/topic1", + "created_on": "2020-11-22T00:00:00Z" + } + ], + "nextPageToken": "", + "totalSize": 4 +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/topics", WrapMockAuthConfig(TopicListAll, cfgKafka, &brk, str, &mgr, nil, "project_admin")) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *TopicsHandlersTestSuite) TestTopicListAllPublisher() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/topics", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "topics": [ + { + "name": "/projects/ARGO/topics/topic2", + "schema": "projects/ARGO/schemas/schema-1", + "created_on": "2020-11-21T00:00:00Z" + }, + { + "name": "/projects/ARGO/topics/topic1", + "created_on": "2020-11-22T00:00:00Z" + } + ], + "nextPageToken": "", + "totalSize": 2 +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/topics", WrapMockAuthConfig(TopicListAll, cfgKafka, &brk, str, &mgr, nil, "publisher")) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) +} + +func (suite *TopicsHandlersTestSuite) TestTopicListAllPublisherWithPagination() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/topics?pageSize=1", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "topics": [ + { + "name": "/projects/ARGO/topics/topic2", + "schema": "projects/ARGO/schemas/schema-1", + "created_on": "2020-11-21T00:00:00Z" + } + ], + "nextPageToken": "MA==", + "totalSize": 2 +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/topics", WrapMockAuthConfig(TopicListAll, cfgKafka, &brk, str, &mgr, nil, "publisher")) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) +} + +func (suite *TopicsHandlersTestSuite) TestPublishWithSchema() { + + type td struct { + topic string + postBody string + expectedResponse string + expectedStatusCode int + msg string + } + + testData := []td{ + { + topic: "topic2", + postBody: `{ + "messages" : [ + + { + "attributes": {}, + "data": "eyJuYW1lIjoibmFtZS0xIiwgImVtYWlsIjogInRlc3RAZXhhbXBsZS5jb20ifQ==" + }, + + { + "attributes": {}, + "data": "eyJuYW1lIjoibmFtZS0xIiwgImVtYWlsIjogInRlc3RAZXhhbXBsZS5jb20iLCAiYWRkcmVzcyI6IlN0cmVldCAxMyIsInRlbGVwaG9uZSI6IjY5NDg1Njc4ODkifQ==" + } + ] +}`, + expectedStatusCode: 200, + expectedResponse: `{ + "messageIds": [ + "1", + "2" + ] +}`, + msg: "Case where the messages are validated successfully(JSON)", + }, + { + topic: "topic3", + postBody: `{ + "messages" : [ + + { + "attributes": {}, + "data": "DGFnZWxvc8T8Cg==" + }, + { + "attributes": {}, + "data": "DGFnZWxvc8T8Cg==" + } + ] +}`, + expectedStatusCode: 200, + expectedResponse: `{ + "messageIds": [ + "3", + "4" + ] +}`, + msg: "Case where the messages are validated successfully(AVRO)", + }, + { + topic: "topic2", + postBody: `{ + "messages" : [ + + { + "attributes": {}, + "data": "eyJuYW1lIjoibmFtZS0xIiwiYWRkcmVzcyI6IlN0cmVldCAxMyIsInRlbGVwaG9uZSI6Njk0ODU2Nzg4OX0=" + }, + + { + "attributes": {}, + "data": "eyJuYW1lIjoibmFtZS0xIiwgImVtYWlsIjogInRlc3RAZXhhbXBsZS5jb20iLCAiYWRkcmVzcyI6IlN0cmVldCAxMyIsInRlbGVwaG9uZSI6IjY5NDg1Njc4ODkifQ==" + } + ] +}`, + expectedStatusCode: 400, + expectedResponse: `{ + "error": { + "code": 400, + "message": "Message 0 data is not valid.1)(root): email is required.2)telephone: Invalid type. Expected: string, given: integer.", + "status": "INVALID_ARGUMENT" + } +}`, + msg: "Case where one of the messages is not successfully validated(2 errors)", + }, + { + topic: "topic3", + postBody: `{ + "messages" : [ + + { + "attributes": {}, + "data": "T2JqAQQWYXZyby5zY2hlbWGYAnsidHlwZSI6InJlY29yZCIsIm5hbWUiOiJQbGFjZSIsIm5hbWVzcGFjZSI6InBsYWNlLmF2cm8iLCJmaWVsZHMiOlt7Im5hbWUiOiJwbGFjZW5hbWUiLCJ0eXBlIjoic3RyaW5nIn0seyJuYW1lIjoiYWRkcmVzcyIsInR5cGUiOiJzdHJpbmcifV19FGF2cm8uY29kZWMIbnVsbABM1P4b0GpYaCg9tqxa+YDZAiQSc3RyZWV0IDIyDnBsYWNlIGFM1P4b0GpYaCg9tqxa+YDZ" + }, + + { + "attributes": {}, + "data": "DGFnZWxvc8T8Cg==" + } + ] +}`, + expectedStatusCode: 400, + expectedResponse: `{ + "error": { + "code": 400, + "message": "Message 0 is not valid.cannot decode binary record \"user.avro.User\" field \"username\": cannot decode binary string: cannot decode binary bytes: negative size: -40", + "status": "INVALID_ARGUMENT" + } +}`, + msg: "Case where one of the messages is not successfully validated(1 error)(AVRO)", + }, + + { + topic: "topic2", + postBody: `{ + "messages" : [ + + { + "attributes": {}, + "data": "eyJuYW1lIjoibmFtZS0xIiwiYWRkcmVzcyI6IlN0cmVldCAxMyIsInRlbGVwaG9uZSI6IjY5NDg1Njc4ODkifQo=" + }, + + { + "attributes": {}, + "data": "eyJuYW1lIjoibmFtZS0xIiwgImVtYWlsIjogInRlc3RAZXhhbXBsZS5jb20iLCAiYWRkcmVzcyI6IlN0cmVldCAxMyIsInRlbGVwaG9uZSI6IjY5NDg1Njc4ODkifQ==" + } + ] +}`, + expectedStatusCode: 400, + expectedResponse: `{ + "error": { + "code": 400, + "message": "Message 0 data is not valid,(root): email is required", + "status": "INVALID_ARGUMENT" + } +}`, + msg: "Case where the one of the messages is not successfully validated(1 error)", + }, + { + topic: "topic2", + postBody: `{ + "messages" : [ + + { + "attributes": {}, + "data": "eyJuYW1lIjoibmFtZS0xIiwgImVtYWlsIjogInRlc3RAZXhhbXBsZS5jb20iLCAiYWRkcmVzcyI6IlN0cmVldCAxMyIsInRlbGVwaG9uZSI6IjY5NDg1Njc4ODkifQ==" + }, + + { + "attributes": {}, + "data": "eyJuYW1lIjoibmFtZS0xIiwiYWRkcmVzcyI6IlN0cmVldCAxMyIsInRlbGVwaG9uZSI6IjY5NDg1Njc4ODkiCg==" + } + ] +}`, + expectedStatusCode: 400, + expectedResponse: `{ + "error": { + "code": 400, + "message": "Message 1 data is not valid JSON format,unexpected EOF", + "status": "INVALID_ARGUMENT" + } +}`, + msg: "Case where the one of the messages is not in valid json format", + }, + } + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + cfgKafka.PushEnabled = true + cfgKafka.PushWorkerToken = "push_token" + cfgKafka.ResAuth = false + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + pc := new(push.MockClient) + + for _, t := range testData { + + w := httptest.NewRecorder() + url := fmt.Sprintf("http://localhost:8080/v1/projects/ARGO/topics/%v", t.topic) + req, err := http.NewRequest("POST", url, strings.NewReader(t.postBody)) + if err != nil { + log.Fatal(err) + } + router.HandleFunc("/v1/projects/{project}/topics/{topic}", WrapMockAuthConfig(TopicPublish, cfgKafka, &brk, str, &mgr, pc)) + router.ServeHTTP(w, req) + + suite.Equal(t.expectedStatusCode, w.Code, t.msg) + suite.Equal(t.expectedResponse, w.Body.String(), t.msg) + } +} + +func (suite *TopicsHandlersTestSuite) TestTopicListAllFirstPage() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/topics?pageSize=2", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "topics": [ + { + "name": "/projects/ARGO/topics/topic4", + "created_on": "2020-11-19T00:00:00Z" + }, + { + "name": "/projects/ARGO/topics/topic3", + "schema": "projects/ARGO/schemas/schema-3", + "created_on": "2020-11-20T00:00:00Z" + } + ], + "nextPageToken": "MQ==", + "totalSize": 4 +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/topics", WrapMockAuthConfig(TopicListAll, cfgKafka, &brk, str, &mgr, nil, "project_admin")) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *TopicsHandlersTestSuite) TestTopicListAllNextPage() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/topics?pageSize=2&pageToken=MA==", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "topics": [ + { + "name": "/projects/ARGO/topics/topic1", + "created_on": "2020-11-22T00:00:00Z" + } + ], + "nextPageToken": "", + "totalSize": 4 +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/topics", WrapMockAuthConfig(TopicListAll, cfgKafka, &brk, str, &mgr, nil, "project_admin")) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *TopicsHandlersTestSuite) TestTopicListAllEmpty() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/topics?pageSize=2", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "topics": [], + "nextPageToken": "", + "totalSize": 0 +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + // empty the store + str.TopicList = []stores.QTopic{} + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/topics", WrapMockAuthConfig(TopicListAll, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *TopicsHandlersTestSuite) TestTopicListAllInvalidPageSize() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/topics?pageSize=invalid", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "error": { + "code": 400, + "message": "Invalid page size", + "status": "INVALID_ARGUMENT" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/topics", WrapMockAuthConfig(TopicListAll, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(400, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *TopicsHandlersTestSuite) TestTopicListAllInvalidPageToken() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/topics?pageToken=invalid", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "error": { + "code": 400, + "message": "Invalid page token", + "status": "INVALID_ARGUMENT" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/topics", WrapMockAuthConfig(TopicListAll, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(400, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *TopicsHandlersTestSuite) TestPublish() { + + postJSON := `{ + "messages": [ + { + "attributes": + { + "foo":"bar" + } + , + "data": "YmFzZTY0ZW5jb2RlZA==" + } + ] +}` + url := "http://localhost:8080/v1/projects/ARGO/topics/topic1:publish" + req, err := http.NewRequest("POST", url, bytes.NewBuffer([]byte(postJSON))) + if err != nil { + log.Fatal(err) + } + + expJSON := `{ + "messageIds": [ + "1" + ] +}` + tn := time.Now().UTC() + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/topics/{topic}:publish", WrapMockAuthConfig(TopicPublish, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expJSON, w.Body.String()) + tpc, _, _, _ := str.QueryTopics("argo_uuid", "", "topic1", "", 0) + suite.True(tn.Before(tpc[0].LatestPublish)) + suite.NotEqual(tpc[0].PublishRate, 10) + +} + +func (suite *TopicsHandlersTestSuite) TestPublishMultiple() { + + postJSON := `{ + "messages": [ + { + "attributes": + { + "foo":"bar" + } + , + "data": "YmFzZTY0ZW5jb2RlZA==" + }, + { + "attributes": + { + "foo2":"bar2" + } + , + "data": "YmFzZTY0ZW5jb2RlZA==" + }, + { + "attributes": + { + "foo2":"bar2" + } + , + "data": "YmFzZTY0ZW5jb2RlZA==" + } + ] +}` + url := "http://localhost:8080/v1/projects/ARGO/topics/topic1:publish" + req, err := http.NewRequest("POST", url, bytes.NewBuffer([]byte(postJSON))) + if err != nil { + log.Fatal(err) + } + + expJSON := `{ + "messageIds": [ + "1", + "2", + "3" + ] +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + brk.Initialize([]string{"localhost"}) + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/topics/{topic}:publish", WrapMockAuthConfig(TopicPublish, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expJSON, w.Body.String()) + +} + +func (suite *TopicsHandlersTestSuite) TestPublishError() { + + postJSON := `{ + "messages": [ + { + "attributes": [ + { + "key": "foo", + "valu2RlZA==" + }, + { + "attributes": [ + { + "key": "foo2", + "value": "bar2" + } + ], + "data": "YmFzZTY0ZW5jb2RlZA==" + } + ] +}` + url := "http://localhost:8080/v1/projects/ARGO/topics/topic1:publish" + req, err := http.NewRequest("POST", url, bytes.NewBuffer([]byte(postJSON))) + if err != nil { + log.Fatal(err) + } + + expJSON := `{ + "error": { + "code": 400, + "message": "Invalid Message Arguments", + "status": "INVALID_ARGUMENT" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/topics/{topic}:publish", WrapMockAuthConfig(TopicPublish, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(400, w.Code) + suite.Equal(expJSON, w.Body.String()) + +} + +func (suite *TopicsHandlersTestSuite) TestPublishNoTopic() { + + postJSON := `{ + "messages": [ + { + "attributes": [ + { + "key": "foo", + "value": "bar" + } + ], + "data": "YmFzZTY0ZW5jb2RlZA==" + }, + { + "attributes": [ + { + "key": "foo2", + "value": "bar2" + } + ], + "data": "YmFzZTY0ZW5jb2RlZA==" + } + ] +}` + url := "http://localhost:8080/v1/projects/ARGO/topics/FOO:publish" + req, err := http.NewRequest("POST", url, bytes.NewBuffer([]byte(postJSON))) + if err != nil { + log.Fatal(err) + } + + expJSON := `{ + "error": { + "code": 404, + "message": "Topic doesn't exist", + "status": "NOT_FOUND" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/topics/{topic}:publish", WrapMockAuthConfig(TopicPublish, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(404, w.Code) + suite.Equal(expJSON, w.Body.String()) +} + +func (suite *TopicsHandlersTestSuite) TestValidationInTopics() { + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + brk.Initialize([]string{"localhost"}) + brk.PopulateThree() // Add three messages to the broker queue + str := stores.NewMockStore("whatever", "argo_mgs") + + okResp := `{ + "name": "/projects/ARGO/topics/topic1", + "created_on": "2020-11-22T00:00:00Z" +}` + invProject := `{ + "error": { + "code": 400, + "message": "Invalid project name", + "status": "INVALID_ARGUMENT" + } +}` + + invTopic := `{ + "error": { + "code": 400, + "message": "Invalid topic name", + "status": "INVALID_ARGUMENT" + } +}` + + urls := []string{ + "http://localhost:8080/v1/projects/ARGO/topics/topic1", + "http://localhost:8080/v1/projects/AR:GO/topics/topic1", + "http://localhost:8080/v1/projects/ARGO/topics/top,ic1", + "http://localhost:8080/v1/projects/AR,GO/topics/top:ic1", + } + + codes := []int(nil) + responses := []string(nil) + + for _, url := range urls { + w := httptest.NewRecorder() + req, err := http.NewRequest("GET", url, bytes.NewBuffer([]byte(""))) + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + router.HandleFunc("/v1/projects/{project}/topics/{topic}", WrapValidate(WrapMockAuthConfig(TopicListOne, cfgKafka, &brk, str, &mgr, nil))) + + if err != nil { + log.Fatal(err) + } + + router.ServeHTTP(w, req) + codes = append(codes, w.Code) + responses = append(responses, w.Body.String()) + + } + + // First request is valid so response is ok + suite.Equal(200, codes[0]) + suite.Equal(okResp, responses[0]) + + // Second request has invalid project name + suite.Equal(400, codes[1]) + suite.Equal(invProject, responses[1]) + + // Third request has invalid topic name + suite.Equal(400, codes[2]) + suite.Equal(invTopic, responses[2]) + + // Fourth request has invalid project and topic names, but project is caught first + suite.Equal(400, codes[3]) + suite.Equal(invProject, responses[3]) + +} + +func TestTopicsHandlersTestSuite(t *testing.T) { + log.SetOutput(ioutil.Discard) + suite.Run(t, new(TopicsHandlersTestSuite)) +} diff --git a/handlers/users.go b/handlers/users.go new file mode 100644 index 00000000..aeccaf3c --- /dev/null +++ b/handlers/users.go @@ -0,0 +1,528 @@ +package handlers + +import ( + "fmt" + "github.com/ARGOeu/argo-messaging/auth" + "github.com/ARGOeu/argo-messaging/projects" + "github.com/ARGOeu/argo-messaging/stores" + gorillaContext "github.com/gorilla/context" + "github.com/gorilla/mux" + log "github.com/sirupsen/logrus" + "github.com/twinj/uuid" + "io/ioutil" + "net/http" + "strconv" + "strings" + "time" +) + +// UserProfile returns a user's profile based on the provided url parameter(key) +func UserProfile(w http.ResponseWriter, r *http.Request) { + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + + urlValues := r.URL.Query() + + // if the url parameter 'key' is empty or absent, end the request with an unauthorized response + if urlValues.Get("key") == "" { + err := APIErrorUnauthorized() + respondErr(w, err) + return + } + + result, err := auth.GetUserByToken(urlValues.Get("key"), refStr) + + if err != nil { + if err.Error() == "not found" { + err := APIErrorUnauthorized() + respondErr(w, err) + return + } + err := APIErrQueryDatastore() + respondErr(w, err) + return + } + + // Output result to JSON + resJSON, err := result.ExportJSON() + + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + // Write response + respondOK(w, []byte(resJSON)) + +} + +// RefreshToken (POST) refreshes user's token +func RefreshToken(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + urlUser := urlVars["user"] + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + + // Get Result Object + userUUID := auth.GetUUIDByName(urlUser, refStr) + token, err := auth.GenToken() // generate a new user token + + res, err := auth.UpdateUserToken(userUUID, token, refStr) + + if err != nil { + if err.Error() == "not found" { + err := APIErrorNotFound("User") + respondErr(w, err) + return + } + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + // Output result to JSON + resJSON, err := res.ExportJSON() + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + // Write response + output = []byte(resJSON) + respondOK(w, output) +} + +// UserUpdate (PUT) updates the user information +func UserUpdate(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + urlUser := urlVars["user"] + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + + // Read POST JSON body + body, err := ioutil.ReadAll(r.Body) + if err != nil { + err := APIErrorInvalidRequestBody() + respondErr(w, err) + return + } + + // Parse pull options + postBody, err := auth.GetUserFromJSON(body) + if err != nil { + err := APIErrorInvalidArgument("User") + respondErr(w, err) + return + } + + // Get Result Object + userUUID := auth.GetUUIDByName(urlUser, refStr) + modified := time.Now().UTC() + res, err := auth.UpdateUser(userUUID, postBody.FirstName, postBody.LastName, postBody.Organization, postBody.Description, + postBody.Name, postBody.Projects, postBody.Email, postBody.ServiceRoles, modified, true, refStr) + + if err != nil { + + // In case of invalid project or role in post body + + if err.Error() == "not found" { + err := APIErrorNotFound("User") + respondErr(w, err) + return + } + + if strings.HasPrefix(err.Error(), "invalid") { + err := APIErrorInvalidData(err.Error()) + respondErr(w, err) + return + } + + if strings.HasPrefix(err.Error(), "duplicate") { + err := APIErrorInvalidData(err.Error()) + respondErr(w, err) + return + } + + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + // Output result to JSON + resJSON, err := res.ExportJSON() + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + // Write response + output = []byte(resJSON) + respondOK(w, output) +} + +// UserCreate (POST) creates a new user inside a project +func UserCreate(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + urlUser := urlVars["user"] + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + refUserUUID := gorillaContext.Get(r, "auth_user_uuid").(string) + + // Read POST JSON body + body, err := ioutil.ReadAll(r.Body) + if err != nil { + err := APIErrorInvalidRequestBody() + respondErr(w, err) + return + } + + // Parse pull options + postBody, err := auth.GetUserFromJSON(body) + if err != nil { + err := APIErrorInvalidArgument("User") + respondErr(w, err) + log.Error(string(body[:])) + return + } + + uuid := uuid.NewV4().String() // generate a new uuid to attach to the new project + token, err := auth.GenToken() // generate a new user token + created := time.Now().UTC() + // Get Result Object + res, err := auth.CreateUser(uuid, urlUser, postBody.FirstName, postBody.LastName, postBody.Organization, postBody.Description, + postBody.Projects, token, postBody.Email, postBody.ServiceRoles, created, refUserUUID, refStr) + + if err != nil { + if err.Error() == "exists" { + err := APIErrorConflict("User") + respondErr(w, err) + return + } + + if strings.HasPrefix(err.Error(), "duplicate") { + err := APIErrorInvalidData(err.Error()) + respondErr(w, err) + return + } + + if strings.HasPrefix(err.Error(), "invalid") { + err := APIErrorInvalidData(err.Error()) + respondErr(w, err) + return + } + + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + // Output result to JSON + resJSON, err := res.ExportJSON() + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + // Write response + output = []byte(resJSON) + respondOK(w, output) +} + +// UserListByToken (GET) one user by his token +func UserListByToken(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + urlToken := urlVars["token"] + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + + // Get Results Object + result, err := auth.GetUserByToken(urlToken, refStr) + + if err != nil { + if err.Error() == "not found" { + err := APIErrorNotFound("User") + respondErr(w, err) + return + } + err := APIErrQueryDatastore() + respondErr(w, err) + return + } + + // Output result to JSON + resJSON, err := result.ExportJSON() + + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + // Write response + output = []byte(resJSON) + respondOK(w, output) + +} + +// UserListOne (GET) one user +func UserListOne(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + urlUser := urlVars["user"] + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + + // Get Results Object + results, err := auth.FindUsers("", "", urlUser, true, refStr) + + if err != nil { + if err.Error() == "not found" { + err := APIErrorNotFound("User") + respondErr(w, err) + return + } + + err := APIErrQueryDatastore() + respondErr(w, err) + return + } + + res := results.One() + + // Output result to JSON + resJSON, err := res.ExportJSON() + + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + // Write response + output = []byte(resJSON) + respondOK(w, output) +} + +// UserListByUUID (GET) one user by uuid +func UserListByUUID(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab url path variables + urlVars := mux.Vars(r) + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + + // Get Results Object + result, err := auth.GetUserByUUID(urlVars["uuid"], refStr) + + if err != nil { + if err.Error() == "not found" { + err := APIErrorNotFound("User") + respondErr(w, err) + return + } + + if err.Error() == "multiple uuids" { + err := APIErrGenericInternal("Multiple users found with the same uuid") + respondErr(w, err) + return + } + + err := APIErrQueryDatastore() + respondErr(w, err) + return + } + + // Output result to JSON + resJSON, err := result.ExportJSON() + + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + } + + // Write response + output = []byte(resJSON) + respondOK(w, output) +} + +// UserListAll (GET) all users - or users belonging to a project +func UserListAll(w http.ResponseWriter, r *http.Request) { + + var err error + var pageSize int + var paginatedUsers auth.PaginatedUsers + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + refRoles := gorillaContext.Get(r, "auth_roles").([]string) + usersDetailedView := false + + // Grab url path variables + urlValues := r.URL.Query() + pageToken := urlValues.Get("pageToken") + strPageSize := urlValues.Get("pageSize") + projectName := urlValues.Get("project") + details := urlValues.Get("details") + projectUUID := "" + + if details == "true" { + usersDetailedView = true + } + + if projectName != "" { + projectUUID = projects.GetUUIDByName(projectName, refStr) + if projectUUID == "" { + err := APIErrorNotFound("ProjectUUID") + respondErr(w, err) + return + } + } + + if strPageSize != "" { + if pageSize, err = strconv.Atoi(strPageSize); err != nil { + log.Errorf("Pagesize %v produced an error while being converted to int: %v", strPageSize, err.Error()) + err := APIErrorInvalidData("Invalid page size") + respondErr(w, err) + return + } + } + + // check that user is indeed a service admin in order to be priviledged to see full user info + priviledged := auth.IsServiceAdmin(refRoles) + + // Get Results Object - call is always priviledged because this handler is only accessible by service admins + if paginatedUsers, err = auth.PaginatedFindUsers(pageToken, int32(pageSize), projectUUID, priviledged, usersDetailedView, refStr); err != nil { + err := APIErrorInvalidData("Invalid page token") + respondErr(w, err) + return + } + + // Output result to JSON + resJSON, err := paginatedUsers.ExportJSON() + + if err != nil { + err := APIErrExportJSON() + respondErr(w, err) + return + } + + // Write response + output = []byte(resJSON) + respondOK(w, output) +} + +// UserDelete (DEL) deletes an existing user +func UserDelete(w http.ResponseWriter, r *http.Request) { + + // Init output + output := []byte("") + + // Add content type header to the response + contentType := "application/json" + charset := "utf-8" + w.Header().Add("Content-Type", fmt.Sprintf("%s; charset=%s", contentType, charset)) + + // Grab context references + refStr := gorillaContext.Get(r, "str").(stores.Store) + // Grab url path variables + urlVars := mux.Vars(r) + urlUser := urlVars["user"] + + userUUID := auth.GetUUIDByName(urlUser, refStr) + + err := auth.RemoveUser(userUUID, refStr) + if err != nil { + if err.Error() == "not found" { + err := APIErrorNotFound("User") + respondErr(w, err) + return + } + err := APIErrGenericInternal(err.Error()) + respondErr(w, err) + return + } + + // Write empty response if anything ok + respondOK(w, output) +} diff --git a/handlers/users_test.go b/handlers/users_test.go new file mode 100644 index 00000000..b28e6558 --- /dev/null +++ b/handlers/users_test.go @@ -0,0 +1,1608 @@ +package handlers + +import ( + "bytes" + "github.com/ARGOeu/argo-messaging/auth" + "github.com/ARGOeu/argo-messaging/brokers" + "github.com/ARGOeu/argo-messaging/config" + oldPush "github.com/ARGOeu/argo-messaging/push" + "github.com/ARGOeu/argo-messaging/stores" + "github.com/gorilla/mux" + log "github.com/sirupsen/logrus" + "github.com/stretchr/testify/suite" + "io/ioutil" + "net/http" + "net/http/httptest" + "testing" +) + +type UsersHandlersTestSuite struct { + suite.Suite + cfgStr string +} + +func (suite *UsersHandlersTestSuite) SetupTest() { + suite.cfgStr = `{ + "bind_ip":"", + "port":8080, + "zookeeper_hosts":["localhost"], + "kafka_znode":"", + "store_host":"localhost", + "store_db":"argo_msg", + "certificate":"/etc/pki/tls/certs/localhost.crt", + "certificate_key":"/etc/pki/tls/private/localhost.key", + "per_resource_auth":"true", + "push_enabled": "true", + "push_worker_token": "push_token" + }` +} + +func (suite *UsersHandlersTestSuite) TestUserProfile() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/users/profile?key=S3CR3T1", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "uuid": "uuid1", + "projects": [ + { + "project": "ARGO", + "roles": [ + "consumer", + "publisher" + ], + "topics": [ + "topic1", + "topic2" + ], + "subscriptions": [ + "sub1", + "sub2", + "sub3" + ] + } + ], + "name": "UserA", + "first_name": "FirstA", + "last_name": "LastA", + "organization": "OrgA", + "description": "DescA", + "token": "S3CR3T1", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z" +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/users/profile", WrapMockAuthConfig(UserProfile, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) +} + +func (suite *UsersHandlersTestSuite) TestUserProfileUnauthorized() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/users/profile?key=unknonwn", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "error": { + "code": 401, + "message": "Unauthorized", + "status": "UNAUTHORIZED" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + + // unknown key + router.HandleFunc("/v1/users/profile", WrapMockAuthConfig(UserProfile, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(401, w.Code) + suite.Equal(expResp, w.Body.String()) + + // empty key + w2 := httptest.NewRecorder() + req2, err2 := http.NewRequest("GET", "http://localhost:8080/v1/users/profile", nil) + if err2 != nil { + log.Fatal(err2) + } + router.HandleFunc("/v1/users/profile", WrapMockAuthConfig(UserProfile, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w2, req2) + suite.Equal(401, w2.Code) + suite.Equal(expResp, w2.Body.String()) + +} + +func (suite *UsersHandlersTestSuite) TestUserCreate() { + + postJSON := `{ + "email":"email@foo.com", + "first_name": "fname-1", + "last_name": "lname-1", + "organization": "org-1", + "description": "desc-1", + "projects":[{"project_uuid":"argo_uuid","roles":["admin","viewer"]}] +}` + + req, err := http.NewRequest("POST", "http://localhost:8080/v1/users/USERNEW", bytes.NewBuffer([]byte(postJSON))) + if err != nil { + log.Fatal(err) + } + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + w := httptest.NewRecorder() + router.HandleFunc("/v1/users/{user}", WrapMockAuthConfig(UserCreate, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + usrOut, _ := auth.GetUserFromJSON([]byte(w.Body.String())) + + suite.Equal("USERNEW", usrOut.Name) + // Check if the mock authenticated userA has been marked as the creator + suite.Equal("email@foo.com", usrOut.Email) + //suite.Equal([]string{"admin", "viewer"}, usrOut.Projects[0].Role) + suite.Equal("fname-1", usrOut.FirstName) + suite.Equal("lname-1", usrOut.LastName) + suite.Equal("org-1", usrOut.Organization) + suite.Equal("desc-1", usrOut.Description) +} + +func (suite *UsersHandlersTestSuite) TestUserCreateDuplicateRef() { + + postJSON := `{ + "email":"email@foo.com", + "projects":[{"project":"ARGO","roles":["admin","viewer"]},{"project":"ARGO","roles":["admin","viewer"]}] +}` + + req, err := http.NewRequest("POST", "http://localhost:8080/v1/users/USERNEW", bytes.NewBuffer([]byte(postJSON))) + if err != nil { + log.Fatal(err) + } + + expJSON := `{ + "error": { + "code": 400, + "message": "duplicate reference of project ARGO", + "status": "INVALID_ARGUMENT" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + w := httptest.NewRecorder() + router.HandleFunc("/v1/users/{user}", WrapMockAuthConfig(UserCreate, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(400, w.Code) + suite.Equal(expJSON, w.Body.String()) +} + +func (suite *UsersHandlersTestSuite) TestUserCreateInvalidServiceRole() { + + postJSON := `{ + "email":"email@foo.com", + "projects":[{"project":"ARGO","roles":["admin","viewer"]}], + "service_roles": ["unknown"] +}` + + req, err := http.NewRequest("POST", "http://localhost:8080/v1/users/USERNEW", bytes.NewBuffer([]byte(postJSON))) + if err != nil { + log.Fatal(err) + } + + expJSON := `{ + "error": { + "code": 400, + "message": "invalid role: unknown", + "status": "INVALID_ARGUMENT" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + w := httptest.NewRecorder() + router.HandleFunc("/v1/users/{user}", WrapMockAuthConfig(UserCreate, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(400, w.Code) + suite.Equal(expJSON, w.Body.String()) +} + +func (suite *UsersHandlersTestSuite) TestUserCreateInvalidProjectName() { + + postJSON := `{ + "email":"email@foo.com", + "projects":[{"project":"unknown","roles":["admin","viewer"]},{"project":"ARGO","roles":["admin","viewer"]}] +}` + + req, err := http.NewRequest("POST", "http://localhost:8080/v1/users/USERNEW", bytes.NewBuffer([]byte(postJSON))) + if err != nil { + log.Fatal(err) + } + + expJSON := `{ + "error": { + "code": 400, + "message": "invalid project: unknown", + "status": "INVALID_ARGUMENT" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + w := httptest.NewRecorder() + router.HandleFunc("/v1/users/{user}", WrapMockAuthConfig(UserCreate, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(400, w.Code) + suite.Equal(expJSON, w.Body.String()) +} + +func (suite *UsersHandlersTestSuite) TestUserCreateInvalidRoles() { + + postJSON := `{ + "email":"email@foo.com", + "projects":[{"project":"ARGO2","roles":["unknown","viewer"]},{"project":"ARGO","roles":["admin","viewer"]}] +}` + + req, err := http.NewRequest("POST", "http://localhost:8080/v1/users/USERNEW", bytes.NewBuffer([]byte(postJSON))) + if err != nil { + log.Fatal(err) + } + + expJSON := `{ + "error": { + "code": 400, + "message": "invalid role: unknown", + "status": "INVALID_ARGUMENT" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + w := httptest.NewRecorder() + router.HandleFunc("/v1/users/{user}", WrapMockAuthConfig(UserCreate, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(400, w.Code) + suite.Equal(expJSON, w.Body.String()) +} + +func (suite *UsersHandlersTestSuite) TestRefreshToken() { + + req, err := http.NewRequest("POST", "http://localhost:8080/v1/users/UserZ:refreshToken", nil) + if err != nil { + log.Fatal(err) + } + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + w := httptest.NewRecorder() + router.HandleFunc("/v1/users/{user}:refreshToken", WrapMockAuthConfig(RefreshToken, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + userOut, _ := auth.GetUserFromJSON([]byte(w.Body.String())) + suite.NotEqual("S3CR3T", userOut.Token) +} + +func (suite *UsersHandlersTestSuite) TestUserUpdate() { + + postJSON := `{ + "name":"UPDATED_NAME", + "service_roles":["service_admin"] +}` + + req, err := http.NewRequest("PUT", "http://localhost:8080/v1/users/UserZ", bytes.NewBuffer([]byte(postJSON))) + if err != nil { + log.Fatal(err) + } + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + w := httptest.NewRecorder() + router.HandleFunc("/v1/users/{user}", WrapMockAuthConfig(UserUpdate, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + userOut, _ := auth.GetUserFromJSON([]byte(w.Body.String())) + suite.Equal("UPDATED_NAME", userOut.Name) + suite.Equal([]string{"service_admin"}, userOut.ServiceRoles) + suite.Equal("UserA", userOut.CreatedBy) + +} + +func (suite *UsersHandlersTestSuite) TestUserUpdateInvalidProjectName() { + + postJSON := `{ + "name":"UPDATED_NAME", + "projects": [{"project": "unknown"}], + "service_roles":["service_admin"] +}` + + req, err := http.NewRequest("PUT", "http://localhost:8080/v1/users/UserZ", bytes.NewBuffer([]byte(postJSON))) + if err != nil { + log.Fatal(err) + } + + expJSON := `{ + "error": { + "code": 400, + "message": "invalid project: unknown", + "status": "INVALID_ARGUMENT" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + w := httptest.NewRecorder() + router.HandleFunc("/v1/users/{user}", WrapMockAuthConfig(UserUpdate, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(400, w.Code) + suite.Equal(expJSON, w.Body.String()) +} + +func (suite *UsersHandlersTestSuite) TestUserUpdateInvalidRoles() { + + postJSON := `{ + "name":"UPDATED_NAME", + "projects": [{"project": "ARGO2", "roles": ["unknown"]}], + "service_roles":["service_admin"] +}` + + req, err := http.NewRequest("PUT", "http://localhost:8080/v1/users/UserZ", bytes.NewBuffer([]byte(postJSON))) + if err != nil { + log.Fatal(err) + } + + expJSON := `{ + "error": { + "code": 400, + "message": "invalid role: unknown", + "status": "INVALID_ARGUMENT" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + w := httptest.NewRecorder() + router.HandleFunc("/v1/users/{user}", WrapMockAuthConfig(UserUpdate, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(400, w.Code) + suite.Equal(expJSON, w.Body.String()) +} + +func (suite *UsersHandlersTestSuite) TestUserUpdateInvalidServiceRoles() { + + postJSON := `{ + "name":"UPDATED_NAME", + "projects": [{"project": "ARGO2", "roles": ["consumer"]}], + "service_roles":["unknown"] +}` + + req, err := http.NewRequest("PUT", "http://localhost:8080/v1/users/UserZ", bytes.NewBuffer([]byte(postJSON))) + if err != nil { + log.Fatal(err) + } + + expJSON := `{ + "error": { + "code": 400, + "message": "invalid role: unknown", + "status": "INVALID_ARGUMENT" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + w := httptest.NewRecorder() + router.HandleFunc("/v1/users/{user}", WrapMockAuthConfig(UserUpdate, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(400, w.Code) + suite.Equal(expJSON, w.Body.String()) +} + +func (suite *UsersHandlersTestSuite) TestUserUpdateDuplicate() { + postJSON := `{ + "email":"email@foo.com", + "projects":[{"project":"ARGO","roles":["admin","viewer"]},{"project":"ARGO2","roles":["admin","viewer"]},{"project":"ARGO2","roles":["admin","viewer"]}] + }` + + expJSON := `{ + "error": { + "code": 400, + "message": "duplicate reference of project ARGO2", + "status": "INVALID_ARGUMENT" + } +}` + + req, err := http.NewRequest("PUT", "http://localhost:8080/v1/users/UserZ", bytes.NewBuffer([]byte(postJSON))) + if err != nil { + log.Fatal(err) + } + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + mgr := oldPush.Manager{} + w := httptest.NewRecorder() + router.HandleFunc("/v1/users/{user}", WrapMockAuthConfig(UserUpdate, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(400, w.Code) + suite.Equal(expJSON, w.Body.String()) + +} + +func (suite *UsersHandlersTestSuite) TestUserListByToken() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/users:byToken/S3CR3T1", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "uuid": "uuid1", + "projects": [ + { + "project": "ARGO", + "roles": [ + "consumer", + "publisher" + ], + "topics": [ + "topic1", + "topic2" + ], + "subscriptions": [ + "sub1", + "sub2", + "sub3" + ] + } + ], + "name": "UserA", + "first_name": "FirstA", + "last_name": "LastA", + "organization": "OrgA", + "description": "DescA", + "token": "S3CR3T1", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z" +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/users:byToken/{token}", WrapMockAuthConfig(UserListByToken, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *UsersHandlersTestSuite) TestUserListByUUID() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/users:byUUID/uuid4", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "uuid": "uuid4", + "projects": [ + { + "project": "ARGO", + "roles": [ + "publisher", + "consumer" + ], + "topics": [ + "topic2" + ], + "subscriptions": [ + "sub3", + "sub4" + ] + } + ], + "name": "UserZ", + "token": "S3CR3T4", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z", + "created_by": "UserA" +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/users:byUUID/{uuid}", WrapMockAuthConfig(UserListByUUID, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *UsersHandlersTestSuite) TestUserListByUUIDNotFound() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/users:byUUID/uuid10", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "error": { + "code": 404, + "message": "User doesn't exist", + "status": "NOT_FOUND" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/users:byUUID/{uuid}", WrapMockAuthConfig(UserListByUUID, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(404, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *UsersHandlersTestSuite) TestUserListByUUIDConflict() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/users:byUUID/same_uuid", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "error": { + "code": 500, + "message": "Multiple users found with the same uuid", + "status": "INTERNAL_SERVER_ERROR" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/users:byUUID/{uuid}", WrapMockAuthConfig(UserListByUUID, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(500, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *UsersHandlersTestSuite) TestUserListOne() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/users/UserA", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "uuid": "uuid1", + "projects": [ + { + "project": "ARGO", + "roles": [ + "consumer", + "publisher" + ], + "topics": [ + "topic1", + "topic2" + ], + "subscriptions": [ + "sub1", + "sub2", + "sub3" + ] + } + ], + "name": "UserA", + "first_name": "FirstA", + "last_name": "LastA", + "organization": "OrgA", + "description": "DescA", + "token": "S3CR3T1", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z" +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/users/{user}", WrapMockAuthConfig(UserListOne, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *UsersHandlersTestSuite) TestUserListAll() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/users?details=true", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "users": [ + { + "uuid": "uuid8", + "projects": [ + { + "project": "ARGO2", + "roles": [ + "consumer", + "publisher" + ], + "topics": [], + "subscriptions": [] + } + ], + "name": "UserZ", + "token": "S3CR3T1", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z" + }, + { + "uuid": "uuid7", + "name": "push_worker_0", + "token": "push_token", + "email": "foo-email", + "service_roles": [ + "push_worker" + ], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z" + }, + { + "uuid": "same_uuid", + "projects": [ + { + "project": "ARGO", + "roles": [ + "publisher", + "consumer" + ], + "topics": [], + "subscriptions": [] + } + ], + "name": "UserSame2", + "token": "S3CR3T42", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z", + "created_by": "UserA" + }, + { + "uuid": "same_uuid", + "projects": [ + { + "project": "ARGO", + "roles": [ + "publisher", + "consumer" + ], + "topics": [], + "subscriptions": [] + } + ], + "name": "UserSame1", + "token": "S3CR3T41", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z", + "created_by": "UserA" + }, + { + "uuid": "uuid4", + "projects": [ + { + "project": "ARGO", + "roles": [ + "publisher", + "consumer" + ], + "topics": [ + "topic2" + ], + "subscriptions": [ + "sub3", + "sub4" + ] + } + ], + "name": "UserZ", + "token": "S3CR3T4", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z", + "created_by": "UserA" + }, + { + "uuid": "uuid3", + "projects": [ + { + "project": "ARGO", + "roles": [ + "publisher", + "consumer" + ], + "topics": [ + "topic3" + ], + "subscriptions": [ + "sub2" + ] + } + ], + "name": "UserX", + "token": "S3CR3T3", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z", + "created_by": "UserA" + }, + { + "uuid": "uuid2", + "projects": [ + { + "project": "ARGO", + "roles": [ + "consumer", + "publisher" + ], + "topics": [ + "topic1", + "topic2" + ], + "subscriptions": [ + "sub1", + "sub3", + "sub4" + ] + } + ], + "name": "UserB", + "token": "S3CR3T2", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z", + "created_by": "UserA" + }, + { + "uuid": "uuid1", + "projects": [ + { + "project": "ARGO", + "roles": [ + "consumer", + "publisher" + ], + "topics": [ + "topic1", + "topic2" + ], + "subscriptions": [ + "sub1", + "sub2", + "sub3" + ] + } + ], + "name": "UserA", + "first_name": "FirstA", + "last_name": "LastA", + "organization": "OrgA", + "description": "DescA", + "token": "S3CR3T1", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z" + }, + { + "uuid": "uuid0", + "projects": [ + { + "project": "ARGO", + "roles": [ + "consumer", + "publisher" + ], + "topics": [], + "subscriptions": [] + } + ], + "name": "Test", + "token": "S3CR3T", + "email": "Test@test.com", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z" + } + ], + "nextPageToken": "", + "totalSize": 9 +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/users", WrapMockAuthConfig(UserListAll, cfgKafka, &brk, str, &mgr, nil, "service_admin")) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *UsersHandlersTestSuite) TestUserListAllStartingPage() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/users?pageSize=2&details=true", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "users": [ + { + "uuid": "uuid8", + "projects": [ + { + "project": "ARGO2", + "roles": [ + "consumer", + "publisher" + ], + "topics": [], + "subscriptions": [] + } + ], + "name": "UserZ", + "token": "S3CR3T1", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z" + }, + { + "uuid": "uuid7", + "name": "push_worker_0", + "token": "push_token", + "email": "foo-email", + "service_roles": [ + "push_worker" + ], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z" + } + ], + "nextPageToken": "Ng==", + "totalSize": 2 +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/users", WrapMockAuthConfig(UserListAll, cfgKafka, &brk, str, &mgr, nil, "service_admin")) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *UsersHandlersTestSuite) TestUserListAllProjectARGO() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/users?project=ARGO&details=true", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "users": [ + { + "uuid": "same_uuid", + "projects": [ + { + "project": "ARGO", + "roles": [ + "publisher", + "consumer" + ], + "topics": [], + "subscriptions": [] + } + ], + "name": "UserSame2", + "token": "S3CR3T42", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z", + "created_by": "UserA" + }, + { + "uuid": "same_uuid", + "projects": [ + { + "project": "ARGO", + "roles": [ + "publisher", + "consumer" + ], + "topics": [], + "subscriptions": [] + } + ], + "name": "UserSame1", + "token": "S3CR3T41", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z", + "created_by": "UserA" + }, + { + "uuid": "uuid4", + "projects": [ + { + "project": "ARGO", + "roles": [ + "publisher", + "consumer" + ], + "topics": [ + "topic2" + ], + "subscriptions": [ + "sub3", + "sub4" + ] + } + ], + "name": "UserZ", + "token": "S3CR3T4", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z", + "created_by": "UserA" + }, + { + "uuid": "uuid3", + "projects": [ + { + "project": "ARGO", + "roles": [ + "publisher", + "consumer" + ], + "topics": [ + "topic3" + ], + "subscriptions": [ + "sub2" + ] + } + ], + "name": "UserX", + "token": "S3CR3T3", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z", + "created_by": "UserA" + }, + { + "uuid": "uuid2", + "projects": [ + { + "project": "ARGO", + "roles": [ + "consumer", + "publisher" + ], + "topics": [ + "topic1", + "topic2" + ], + "subscriptions": [ + "sub1", + "sub3", + "sub4" + ] + } + ], + "name": "UserB", + "token": "S3CR3T2", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z", + "created_by": "UserA" + }, + { + "uuid": "uuid1", + "projects": [ + { + "project": "ARGO", + "roles": [ + "consumer", + "publisher" + ], + "topics": [ + "topic1", + "topic2" + ], + "subscriptions": [ + "sub1", + "sub2", + "sub3" + ] + } + ], + "name": "UserA", + "first_name": "FirstA", + "last_name": "LastA", + "organization": "OrgA", + "description": "DescA", + "token": "S3CR3T1", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z" + }, + { + "uuid": "uuid0", + "projects": [ + { + "project": "ARGO", + "roles": [ + "consumer", + "publisher" + ], + "topics": [], + "subscriptions": [] + } + ], + "name": "Test", + "token": "S3CR3T", + "email": "Test@test.com", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z" + } + ], + "nextPageToken": "", + "totalSize": 7 +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/users", WrapMockAuthConfig(UserListAll, cfgKafka, &brk, str, &mgr, nil, "service_admin")) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *UsersHandlersTestSuite) TestUserListAllProjectARGO2() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/users?project=ARGO2&details=true", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "users": [ + { + "uuid": "uuid8", + "projects": [ + { + "project": "ARGO2", + "roles": [ + "consumer", + "publisher" + ], + "topics": [], + "subscriptions": [] + } + ], + "name": "UserZ", + "token": "S3CR3T1", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z" + } + ], + "nextPageToken": "", + "totalSize": 1 +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/users", WrapMockAuthConfig(UserListAll, cfgKafka, &brk, str, &mgr, nil, "service_admin")) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *UsersHandlersTestSuite) TestUserListAllProjectUNKNOWN() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/users?project=UNKNOWN", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "error": { + "code": 404, + "message": "ProjectUUID doesn't exist", + "status": "NOT_FOUND" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/users", WrapMockAuthConfig(UserListAll, cfgKafka, &brk, str, &mgr, nil, "service_admin")) + router.ServeHTTP(w, req) + suite.Equal(404, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *UsersHandlersTestSuite) TestUserListAllStartingAtSecond() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/users?pageSize=2&pageToken=Nw==&details=true", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "users": [ + { + "uuid": "uuid7", + "name": "push_worker_0", + "token": "push_token", + "email": "foo-email", + "service_roles": [ + "push_worker" + ], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z" + }, + { + "uuid": "same_uuid", + "projects": [ + { + "project": "ARGO", + "roles": [ + "publisher", + "consumer" + ], + "topics": [], + "subscriptions": [] + } + ], + "name": "UserSame2", + "token": "S3CR3T42", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z", + "created_by": "UserA" + } + ], + "nextPageToken": "NQ==", + "totalSize": 2 +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/users", WrapMockAuthConfig(UserListAll, cfgKafka, &brk, str, &mgr, nil, "service_admin")) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) +} + +func (suite *UsersHandlersTestSuite) TestUserListAllStartingAtSecondNoUserDetails() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/users?pageSize=2&pageToken=Nw==&details=false", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "users": [ + { + "uuid": "uuid7", + "name": "push_worker_0", + "token": "push_token", + "email": "foo-email", + "service_roles": [ + "push_worker" + ], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z" + }, + { + "uuid": "same_uuid", + "name": "UserSame2", + "token": "S3CR3T42", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z", + "created_by": "UserA" + } + ], + "nextPageToken": "NQ==", + "totalSize": 2 +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/users", WrapMockAuthConfig(UserListAll, cfgKafka, &brk, str, &mgr, nil, "service_admin")) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) +} + +func (suite *UsersHandlersTestSuite) TestUserListAllEmptyCollection() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/users", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "users": [], + "nextPageToken": "", + "totalSize": 0 +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + // empty the store + str.UserList = []stores.QUser{} + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/users", WrapMockAuthConfig(UserListAll, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *UsersHandlersTestSuite) TestUserListAllIntermediatePage() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/users?pageToken=NA==&pageSize=2&details=true", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "users": [ + { + "uuid": "uuid4", + "projects": [ + { + "project": "ARGO", + "roles": [ + "publisher", + "consumer" + ], + "topics": [ + "topic2" + ], + "subscriptions": [ + "sub3", + "sub4" + ] + } + ], + "name": "UserZ", + "token": "S3CR3T4", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z", + "created_by": "UserA" + }, + { + "uuid": "uuid3", + "projects": [ + { + "project": "ARGO", + "roles": [ + "publisher", + "consumer" + ], + "topics": [ + "topic3" + ], + "subscriptions": [ + "sub2" + ] + } + ], + "name": "UserX", + "token": "S3CR3T3", + "email": "foo-email", + "service_roles": [], + "created_on": "2009-11-10T23:00:00Z", + "modified_on": "2009-11-10T23:00:00Z", + "created_by": "UserA" + } + ], + "nextPageToken": "Mg==", + "totalSize": 2 +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/users", WrapMockAuthConfig(UserListAll, cfgKafka, &brk, str, &mgr, nil, "service_admin")) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *UsersHandlersTestSuite) TestUserListAllInvalidPageSize() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/users?pageSize=invalid", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "error": { + "code": 400, + "message": "Invalid page size", + "status": "INVALID_ARGUMENT" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/users", WrapMockAuthConfig(UserListAll, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(400, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *UsersHandlersTestSuite) TestUserListAllInvalidPageToken() { + + req, err := http.NewRequest("GET", "http://localhost:8080/v1/users?pageToken=invalid", nil) + if err != nil { + log.Fatal(err) + } + + expResp := `{ + "error": { + "code": 400, + "message": "Invalid page token", + "status": "INVALID_ARGUMENT" + } +}` + + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/users", WrapMockAuthConfig(UserListAll, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(400, w.Code) + suite.Equal(expResp, w.Body.String()) + +} + +func (suite *UsersHandlersTestSuite) TestUserDelete() { + + req, err := http.NewRequest("DELETE", "http://localhost:8080/v1/users/UserA", nil) + + if err != nil { + log.Fatal(err) + } + + expResp := "" + cfgKafka := config.NewAPICfg() + cfgKafka.LoadStrJSON(suite.cfgStr) + brk := brokers.MockBroker{} + str := stores.NewMockStore("whatever", "argo_mgs") + router := mux.NewRouter().StrictSlash(true) + w := httptest.NewRecorder() + mgr := oldPush.Manager{} + router.HandleFunc("/v1/users/{user}", WrapMockAuthConfig(UserDelete, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(200, w.Code) + suite.Equal(expResp, w.Body.String()) + + // Search the deleted user + + req, err = http.NewRequest("GET", "http://localhost:8080/v1/users/UserA", nil) + if err != nil { + log.Fatal(err) + } + + expResp2 := `{ + "error": { + "code": 404, + "message": "User doesn't exist", + "status": "NOT_FOUND" + } +}` + + router = mux.NewRouter().StrictSlash(true) + w = httptest.NewRecorder() + router.HandleFunc("/v1/users/{user}", WrapMockAuthConfig(UserListOne, cfgKafka, &brk, str, &mgr, nil)) + router.ServeHTTP(w, req) + suite.Equal(404, w.Code) + suite.Equal(expResp2, w.Body.String()) + +} + +func TestUsersHandlersTestSuite(t *testing.T) { + log.SetOutput(ioutil.Discard) + suite.Run(t, new(UsersHandlersTestSuite)) +} diff --git a/handlers_test.go b/handlers_test.go deleted file mode 100644 index 8cc9a161..00000000 --- a/handlers_test.go +++ /dev/null @@ -1,8036 +0,0 @@ -package main - -import ( - "bytes" - "io/ioutil" - "net/http" - "net/http/httptest" - "strconv" - "strings" - "testing" - "time" - - log "github.com/sirupsen/logrus" - - "encoding/json" - "fmt" - "github.com/ARGOeu/argo-messaging/auth" - "github.com/ARGOeu/argo-messaging/brokers" - "github.com/ARGOeu/argo-messaging/config" - "github.com/ARGOeu/argo-messaging/metrics" - "github.com/ARGOeu/argo-messaging/projects" - oldPush "github.com/ARGOeu/argo-messaging/push" - push "github.com/ARGOeu/argo-messaging/push/grpc/client" - "github.com/ARGOeu/argo-messaging/schemas" - "github.com/ARGOeu/argo-messaging/stores" - "github.com/gorilla/mux" - "github.com/stretchr/testify/suite" -) - -type HandlerTestSuite struct { - suite.Suite - cfgStr string -} - -func (suite *HandlerTestSuite) SetupTest() { - suite.cfgStr = `{ - "bind_ip":"", - "port":8080, - "zookeeper_hosts":["localhost"], - "kafka_znode":"", - "store_host":"localhost", - "store_db":"argo_msg", - "certificate":"/etc/pki/tls/certs/localhost.crt", - "certificate_key":"/etc/pki/tls/private/localhost.key", - "per_resource_auth":"true", - "push_enabled": "true", - "push_worker_token": "push_token" - }` - - log.SetOutput(ioutil.Discard) -} - -func (suite *HandlerTestSuite) TestValidHTTPS() { - suite.Equal(false, isValidHTTPS("ht")) - suite.Equal(false, isValidHTTPS("www.example.com")) - suite.Equal(false, isValidHTTPS("https:www.example.com")) - suite.Equal(false, isValidHTTPS("http://www.example.com")) - suite.Equal(true, isValidHTTPS("https://www.example.com")) - -} - -func (suite *HandlerTestSuite) TestValidation() { - // nameValidations - suite.Equal(true, validName("topic101")) - suite.Equal(true, validName("topic_101")) - suite.Equal(true, validName("topic_101_another_thing")) - suite.Equal(true, validName("topic___343_random")) - suite.Equal(true, validName("topic_dc1cc538-1361-4317-a235-0bf383d4a69f")) - suite.Equal(false, validName("topic_dc1cc538.1361-4317-a235-0bf383d4a69f")) - suite.Equal(false, validName("topic.not.valid")) - suite.Equal(false, validName("spaces are not valid")) - suite.Equal(false, validName("topic/A")) - suite.Equal(false, validName("topic/B")) - - // ackID validations - suite.Equal(true, validAckID("ARGO", "sub101", "projects/ARGO/subscriptions/sub101:5")) - suite.Equal(false, validAckID("ARGO", "sub101", "projects/ARGO/subscriptions/sub101:aaa")) - suite.Equal(false, validAckID("ARGO", "sub101", "projects/FARGO/subscriptions/sub101:5")) - suite.Equal(false, validAckID("ARGO", "sub101", "projects/ARGO/subscriptions/subF00:5")) - suite.Equal(false, validAckID("ARGO", "sub101", "falsepath/ARGO/subscriptions/sub101:5")) - suite.Equal(true, validAckID("FOO", "BAR", "projects/FOO/subscriptions/BAR:11155")) - suite.Equal(false, validAckID("FOO", "BAR", "projects/FOO//subscriptions/BAR:11155")) - -} - -func (suite *HandlerTestSuite) TestUserProfile() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/users/profile?key=S3CR3T1", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "uuid": "uuid1", - "projects": [ - { - "project": "ARGO", - "roles": [ - "consumer", - "publisher" - ], - "topics": [ - "topic1", - "topic2" - ], - "subscriptions": [ - "sub1", - "sub2", - "sub3" - ] - } - ], - "name": "UserA", - "first_name": "FirstA", - "last_name": "LastA", - "organization": "OrgA", - "description": "DescA", - "token": "S3CR3T1", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z" -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/users/profile", WrapMockAuthConfig(UserProfile, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestUserProfileUnauthorized() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/users/profile?key=unknonwn", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "error": { - "code": 401, - "message": "Unauthorized", - "status": "UNAUTHORIZED" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - - // unknown key - router.HandleFunc("/v1/users/profile", WrapMockAuthConfig(UserProfile, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(401, w.Code) - suite.Equal(expResp, w.Body.String()) - - // empty key - w2 := httptest.NewRecorder() - req2, err2 := http.NewRequest("GET", "http://localhost:8080/v1/users/profile", nil) - if err2 != nil { - log.Fatal(err2) - } - router.HandleFunc("/v1/users/profile", WrapMockAuthConfig(UserProfile, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w2, req2) - suite.Equal(401, w2.Code) - suite.Equal(expResp, w2.Body.String()) - -} - -func (suite *HandlerTestSuite) TestUserCreate() { - - postJSON := `{ - "email":"email@foo.com", - "first_name": "fname-1", - "last_name": "lname-1", - "organization": "org-1", - "description": "desc-1", - "projects":[{"project_uuid":"argo_uuid","roles":["admin","viewer"]}] -}` - - req, err := http.NewRequest("POST", "http://localhost:8080/v1/users/USERNEW", bytes.NewBuffer([]byte(postJSON))) - if err != nil { - log.Fatal(err) - } - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - w := httptest.NewRecorder() - router.HandleFunc("/v1/users/{user}", WrapMockAuthConfig(UserCreate, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - usrOut, _ := auth.GetUserFromJSON([]byte(w.Body.String())) - - suite.Equal("USERNEW", usrOut.Name) - // Check if the mock authenticated userA has been marked as the creator - suite.Equal("email@foo.com", usrOut.Email) - //suite.Equal([]string{"admin", "viewer"}, usrOut.Projects[0].Role) - suite.Equal("fname-1", usrOut.FirstName) - suite.Equal("lname-1", usrOut.LastName) - suite.Equal("org-1", usrOut.Organization) - suite.Equal("desc-1", usrOut.Description) -} - -func (suite *HandlerTestSuite) TestUserCreateDuplicateRef() { - - postJSON := `{ - "email":"email@foo.com", - "projects":[{"project":"ARGO","roles":["admin","viewer"]},{"project":"ARGO","roles":["admin","viewer"]}] -}` - - req, err := http.NewRequest("POST", "http://localhost:8080/v1/users/USERNEW", bytes.NewBuffer([]byte(postJSON))) - if err != nil { - log.Fatal(err) - } - - expJSON := `{ - "error": { - "code": 400, - "message": "duplicate reference of project ARGO", - "status": "INVALID_ARGUMENT" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - w := httptest.NewRecorder() - router.HandleFunc("/v1/users/{user}", WrapMockAuthConfig(UserCreate, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(400, w.Code) - suite.Equal(expJSON, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestUserCreateInvalidServiceRole() { - - postJSON := `{ - "email":"email@foo.com", - "projects":[{"project":"ARGO","roles":["admin","viewer"]}], - "service_roles": ["unknown"] -}` - - req, err := http.NewRequest("POST", "http://localhost:8080/v1/users/USERNEW", bytes.NewBuffer([]byte(postJSON))) - if err != nil { - log.Fatal(err) - } - - expJSON := `{ - "error": { - "code": 400, - "message": "invalid role: unknown", - "status": "INVALID_ARGUMENT" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - w := httptest.NewRecorder() - router.HandleFunc("/v1/users/{user}", WrapMockAuthConfig(UserCreate, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(400, w.Code) - suite.Equal(expJSON, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestUserCreateInvalidProjectName() { - - postJSON := `{ - "email":"email@foo.com", - "projects":[{"project":"unknown","roles":["admin","viewer"]},{"project":"ARGO","roles":["admin","viewer"]}] -}` - - req, err := http.NewRequest("POST", "http://localhost:8080/v1/users/USERNEW", bytes.NewBuffer([]byte(postJSON))) - if err != nil { - log.Fatal(err) - } - - expJSON := `{ - "error": { - "code": 400, - "message": "invalid project: unknown", - "status": "INVALID_ARGUMENT" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - w := httptest.NewRecorder() - router.HandleFunc("/v1/users/{user}", WrapMockAuthConfig(UserCreate, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(400, w.Code) - suite.Equal(expJSON, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestUserCreateInvalidRoles() { - - postJSON := `{ - "email":"email@foo.com", - "projects":[{"project":"ARGO2","roles":["unknown","viewer"]},{"project":"ARGO","roles":["admin","viewer"]}] -}` - - req, err := http.NewRequest("POST", "http://localhost:8080/v1/users/USERNEW", bytes.NewBuffer([]byte(postJSON))) - if err != nil { - log.Fatal(err) - } - - expJSON := `{ - "error": { - "code": 400, - "message": "invalid role: unknown", - "status": "INVALID_ARGUMENT" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - w := httptest.NewRecorder() - router.HandleFunc("/v1/users/{user}", WrapMockAuthConfig(UserCreate, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(400, w.Code) - suite.Equal(expJSON, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestRefreshToken() { - - req, err := http.NewRequest("POST", "http://localhost:8080/v1/users/UserZ:refreshToken", nil) - if err != nil { - log.Fatal(err) - } - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - w := httptest.NewRecorder() - router.HandleFunc("/v1/users/{user}:refreshToken", WrapMockAuthConfig(RefreshToken, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - userOut, _ := auth.GetUserFromJSON([]byte(w.Body.String())) - suite.NotEqual("S3CR3T", userOut.Token) -} - -func (suite *HandlerTestSuite) TestUserUpdate() { - - postJSON := `{ - "name":"UPDATED_NAME", - "service_roles":["service_admin"] -}` - - req, err := http.NewRequest("PUT", "http://localhost:8080/v1/users/UserZ", bytes.NewBuffer([]byte(postJSON))) - if err != nil { - log.Fatal(err) - } - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - w := httptest.NewRecorder() - router.HandleFunc("/v1/users/{user}", WrapMockAuthConfig(UserUpdate, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - userOut, _ := auth.GetUserFromJSON([]byte(w.Body.String())) - suite.Equal("UPDATED_NAME", userOut.Name) - suite.Equal([]string{"service_admin"}, userOut.ServiceRoles) - suite.Equal("UserA", userOut.CreatedBy) - -} - -func (suite *HandlerTestSuite) TestUserUpdateInvalidProjectName() { - - postJSON := `{ - "name":"UPDATED_NAME", - "projects": [{"project": "unknown"}], - "service_roles":["service_admin"] -}` - - req, err := http.NewRequest("PUT", "http://localhost:8080/v1/users/UserZ", bytes.NewBuffer([]byte(postJSON))) - if err != nil { - log.Fatal(err) - } - - expJSON := `{ - "error": { - "code": 400, - "message": "invalid project: unknown", - "status": "INVALID_ARGUMENT" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - w := httptest.NewRecorder() - router.HandleFunc("/v1/users/{user}", WrapMockAuthConfig(UserUpdate, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(400, w.Code) - suite.Equal(expJSON, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestUserUpdateInvalidRoles() { - - postJSON := `{ - "name":"UPDATED_NAME", - "projects": [{"project": "ARGO2", "roles": ["unknown"]}], - "service_roles":["service_admin"] -}` - - req, err := http.NewRequest("PUT", "http://localhost:8080/v1/users/UserZ", bytes.NewBuffer([]byte(postJSON))) - if err != nil { - log.Fatal(err) - } - - expJSON := `{ - "error": { - "code": 400, - "message": "invalid role: unknown", - "status": "INVALID_ARGUMENT" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - w := httptest.NewRecorder() - router.HandleFunc("/v1/users/{user}", WrapMockAuthConfig(UserUpdate, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(400, w.Code) - suite.Equal(expJSON, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestUserUpdateInvalidServiceRoles() { - - postJSON := `{ - "name":"UPDATED_NAME", - "projects": [{"project": "ARGO2", "roles": ["consumer"]}], - "service_roles":["unknown"] -}` - - req, err := http.NewRequest("PUT", "http://localhost:8080/v1/users/UserZ", bytes.NewBuffer([]byte(postJSON))) - if err != nil { - log.Fatal(err) - } - - expJSON := `{ - "error": { - "code": 400, - "message": "invalid role: unknown", - "status": "INVALID_ARGUMENT" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - w := httptest.NewRecorder() - router.HandleFunc("/v1/users/{user}", WrapMockAuthConfig(UserUpdate, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(400, w.Code) - suite.Equal(expJSON, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestUserUpdateDuplicate() { - postJSON := `{ - "email":"email@foo.com", - "projects":[{"project":"ARGO","roles":["admin","viewer"]},{"project":"ARGO2","roles":["admin","viewer"]},{"project":"ARGO2","roles":["admin","viewer"]}] - }` - - expJSON := `{ - "error": { - "code": 400, - "message": "duplicate reference of project ARGO2", - "status": "INVALID_ARGUMENT" - } -}` - - req, err := http.NewRequest("PUT", "http://localhost:8080/v1/users/UserZ", bytes.NewBuffer([]byte(postJSON))) - if err != nil { - log.Fatal(err) - } - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - w := httptest.NewRecorder() - router.HandleFunc("/v1/users/{user}", WrapMockAuthConfig(UserUpdate, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(400, w.Code) - suite.Equal(expJSON, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestUserListByToken() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/users:byToken/S3CR3T1", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "uuid": "uuid1", - "projects": [ - { - "project": "ARGO", - "roles": [ - "consumer", - "publisher" - ], - "topics": [ - "topic1", - "topic2" - ], - "subscriptions": [ - "sub1", - "sub2", - "sub3" - ] - } - ], - "name": "UserA", - "first_name": "FirstA", - "last_name": "LastA", - "organization": "OrgA", - "description": "DescA", - "token": "S3CR3T1", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z" -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/users:byToken/{token}", WrapMockAuthConfig(UserListByToken, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestUserListByUUID() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/users:byUUID/uuid4", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "uuid": "uuid4", - "projects": [ - { - "project": "ARGO", - "roles": [ - "publisher", - "consumer" - ], - "topics": [ - "topic2" - ], - "subscriptions": [ - "sub3", - "sub4" - ] - } - ], - "name": "UserZ", - "token": "S3CR3T4", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z", - "created_by": "UserA" -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/users:byUUID/{uuid}", WrapMockAuthConfig(UserListByUUID, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestUserListByUUIDNotFound() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/users:byUUID/uuid10", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "error": { - "code": 404, - "message": "User doesn't exist", - "status": "NOT_FOUND" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/users:byUUID/{uuid}", WrapMockAuthConfig(UserListByUUID, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(404, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestUserListByUUIDConflict() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/users:byUUID/same_uuid", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "error": { - "code": 500, - "message": "Multiple users found with the same uuid", - "status": "INTERNAL_SERVER_ERROR" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/users:byUUID/{uuid}", WrapMockAuthConfig(UserListByUUID, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(500, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestProjectUserListOne() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/members/UserZ", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "uuid": "uuid4", - "projects": [ - { - "project": "ARGO", - "roles": [ - "publisher", - "consumer" - ], - "topics": [ - "topic2" - ], - "subscriptions": [ - "sub3", - "sub4" - ] - } - ], - "name": "UserZ", - "token": "S3CR3T4", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z", - "created_by": "UserA" -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/members/{user}", WrapMockAuthConfig(ProjectUserListOne, cfgKafka, &brk, str, &mgr, nil, "service_admin")) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestProjectUserListOneUnpriv() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/members/UserZ", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "uuid": "uuid4", - "projects": [ - { - "project": "ARGO", - "roles": [ - "publisher", - "consumer" - ], - "topics": [ - "topic2" - ], - "subscriptions": [ - "sub3", - "sub4" - ] - } - ], - "name": "UserZ", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z" -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/members/{user}", WrapMockAuthConfig(ProjectUserListOne, cfgKafka, &brk, str, &mgr, nil, "project_admin")) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestUserListOne() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/users/UserA", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "uuid": "uuid1", - "projects": [ - { - "project": "ARGO", - "roles": [ - "consumer", - "publisher" - ], - "topics": [ - "topic1", - "topic2" - ], - "subscriptions": [ - "sub1", - "sub2", - "sub3" - ] - } - ], - "name": "UserA", - "first_name": "FirstA", - "last_name": "LastA", - "organization": "OrgA", - "description": "DescA", - "token": "S3CR3T1", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z" -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/users/{user}", WrapMockAuthConfig(UserListOne, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestUserListAll() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/users", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "users": [ - { - "uuid": "uuid8", - "projects": [ - { - "project": "ARGO2", - "roles": [ - "consumer", - "publisher" - ], - "topics": [], - "subscriptions": [] - } - ], - "name": "UserZ", - "token": "S3CR3T1", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z" - }, - { - "uuid": "uuid7", - "projects": [], - "name": "push_worker_0", - "token": "push_token", - "email": "foo-email", - "service_roles": [ - "push_worker" - ], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z" - }, - { - "uuid": "same_uuid", - "projects": [ - { - "project": "ARGO", - "roles": [ - "publisher", - "consumer" - ], - "topics": [], - "subscriptions": [] - } - ], - "name": "UserSame2", - "token": "S3CR3T42", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z", - "created_by": "UserA" - }, - { - "uuid": "same_uuid", - "projects": [ - { - "project": "ARGO", - "roles": [ - "publisher", - "consumer" - ], - "topics": [], - "subscriptions": [] - } - ], - "name": "UserSame1", - "token": "S3CR3T41", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z", - "created_by": "UserA" - }, - { - "uuid": "uuid4", - "projects": [ - { - "project": "ARGO", - "roles": [ - "publisher", - "consumer" - ], - "topics": [ - "topic2" - ], - "subscriptions": [ - "sub3", - "sub4" - ] - } - ], - "name": "UserZ", - "token": "S3CR3T4", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z", - "created_by": "UserA" - }, - { - "uuid": "uuid3", - "projects": [ - { - "project": "ARGO", - "roles": [ - "publisher", - "consumer" - ], - "topics": [ - "topic3" - ], - "subscriptions": [ - "sub2" - ] - } - ], - "name": "UserX", - "token": "S3CR3T3", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z", - "created_by": "UserA" - }, - { - "uuid": "uuid2", - "projects": [ - { - "project": "ARGO", - "roles": [ - "consumer", - "publisher" - ], - "topics": [ - "topic1", - "topic2" - ], - "subscriptions": [ - "sub1", - "sub3", - "sub4" - ] - } - ], - "name": "UserB", - "token": "S3CR3T2", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z", - "created_by": "UserA" - }, - { - "uuid": "uuid1", - "projects": [ - { - "project": "ARGO", - "roles": [ - "consumer", - "publisher" - ], - "topics": [ - "topic1", - "topic2" - ], - "subscriptions": [ - "sub1", - "sub2", - "sub3" - ] - } - ], - "name": "UserA", - "first_name": "FirstA", - "last_name": "LastA", - "organization": "OrgA", - "description": "DescA", - "token": "S3CR3T1", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z" - }, - { - "uuid": "uuid0", - "projects": [ - { - "project": "ARGO", - "roles": [ - "consumer", - "publisher" - ], - "topics": [], - "subscriptions": [] - } - ], - "name": "Test", - "token": "S3CR3T", - "email": "Test@test.com", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z" - } - ], - "nextPageToken": "", - "totalSize": 9 -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/users", WrapMockAuthConfig(UserListAll, cfgKafka, &brk, str, &mgr, nil, "service_admin")) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestUserListAllStartingPage() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/users?pageSize=2", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "users": [ - { - "uuid": "uuid8", - "projects": [ - { - "project": "ARGO2", - "roles": [ - "consumer", - "publisher" - ], - "topics": [], - "subscriptions": [] - } - ], - "name": "UserZ", - "token": "S3CR3T1", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z" - }, - { - "uuid": "uuid7", - "projects": [], - "name": "push_worker_0", - "token": "push_token", - "email": "foo-email", - "service_roles": [ - "push_worker" - ], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z" - } - ], - "nextPageToken": "Ng==", - "totalSize": 9 -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/users", WrapMockAuthConfig(UserListAll, cfgKafka, &brk, str, &mgr, nil, "service_admin")) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestUserListAllProjectARGO() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/users?project=ARGO", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "users": [ - { - "uuid": "same_uuid", - "projects": [ - { - "project": "ARGO", - "roles": [ - "publisher", - "consumer" - ], - "topics": [], - "subscriptions": [] - } - ], - "name": "UserSame2", - "token": "S3CR3T42", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z", - "created_by": "UserA" - }, - { - "uuid": "same_uuid", - "projects": [ - { - "project": "ARGO", - "roles": [ - "publisher", - "consumer" - ], - "topics": [], - "subscriptions": [] - } - ], - "name": "UserSame1", - "token": "S3CR3T41", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z", - "created_by": "UserA" - }, - { - "uuid": "uuid4", - "projects": [ - { - "project": "ARGO", - "roles": [ - "publisher", - "consumer" - ], - "topics": [ - "topic2" - ], - "subscriptions": [ - "sub3", - "sub4" - ] - } - ], - "name": "UserZ", - "token": "S3CR3T4", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z", - "created_by": "UserA" - }, - { - "uuid": "uuid3", - "projects": [ - { - "project": "ARGO", - "roles": [ - "publisher", - "consumer" - ], - "topics": [ - "topic3" - ], - "subscriptions": [ - "sub2" - ] - } - ], - "name": "UserX", - "token": "S3CR3T3", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z", - "created_by": "UserA" - }, - { - "uuid": "uuid2", - "projects": [ - { - "project": "ARGO", - "roles": [ - "consumer", - "publisher" - ], - "topics": [ - "topic1", - "topic2" - ], - "subscriptions": [ - "sub1", - "sub3", - "sub4" - ] - } - ], - "name": "UserB", - "token": "S3CR3T2", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z", - "created_by": "UserA" - }, - { - "uuid": "uuid1", - "projects": [ - { - "project": "ARGO", - "roles": [ - "consumer", - "publisher" - ], - "topics": [ - "topic1", - "topic2" - ], - "subscriptions": [ - "sub1", - "sub2", - "sub3" - ] - } - ], - "name": "UserA", - "first_name": "FirstA", - "last_name": "LastA", - "organization": "OrgA", - "description": "DescA", - "token": "S3CR3T1", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z" - }, - { - "uuid": "uuid0", - "projects": [ - { - "project": "ARGO", - "roles": [ - "consumer", - "publisher" - ], - "topics": [], - "subscriptions": [] - } - ], - "name": "Test", - "token": "S3CR3T", - "email": "Test@test.com", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z" - } - ], - "nextPageToken": "", - "totalSize": 7 -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/users", WrapMockAuthConfig(UserListAll, cfgKafka, &brk, str, &mgr, nil, "service_admin")) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestProjectUserListARGO() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/users", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "users": [ - { - "uuid": "same_uuid", - "projects": [ - { - "project": "ARGO", - "roles": [ - "publisher", - "consumer" - ], - "topics": [], - "subscriptions": [] - } - ], - "name": "UserSame2", - "token": "S3CR3T42", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z", - "created_by": "UserA" - }, - { - "uuid": "same_uuid", - "projects": [ - { - "project": "ARGO", - "roles": [ - "publisher", - "consumer" - ], - "topics": [], - "subscriptions": [] - } - ], - "name": "UserSame1", - "token": "S3CR3T41", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z", - "created_by": "UserA" - }, - { - "uuid": "uuid4", - "projects": [ - { - "project": "ARGO", - "roles": [ - "publisher", - "consumer" - ], - "topics": [ - "topic2" - ], - "subscriptions": [ - "sub3", - "sub4" - ] - } - ], - "name": "UserZ", - "token": "S3CR3T4", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z", - "created_by": "UserA" - }, - { - "uuid": "uuid3", - "projects": [ - { - "project": "ARGO", - "roles": [ - "publisher", - "consumer" - ], - "topics": [ - "topic3" - ], - "subscriptions": [ - "sub2" - ] - } - ], - "name": "UserX", - "token": "S3CR3T3", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z", - "created_by": "UserA" - }, - { - "uuid": "uuid2", - "projects": [ - { - "project": "ARGO", - "roles": [ - "consumer", - "publisher" - ], - "topics": [ - "topic1", - "topic2" - ], - "subscriptions": [ - "sub1", - "sub3", - "sub4" - ] - } - ], - "name": "UserB", - "token": "S3CR3T2", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z", - "created_by": "UserA" - }, - { - "uuid": "uuid1", - "projects": [ - { - "project": "ARGO", - "roles": [ - "consumer", - "publisher" - ], - "topics": [ - "topic1", - "topic2" - ], - "subscriptions": [ - "sub1", - "sub2", - "sub3" - ] - } - ], - "name": "UserA", - "first_name": "FirstA", - "last_name": "LastA", - "organization": "OrgA", - "description": "DescA", - "token": "S3CR3T1", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z" - }, - { - "uuid": "uuid0", - "projects": [ - { - "project": "ARGO", - "roles": [ - "consumer", - "publisher" - ], - "topics": [], - "subscriptions": [] - } - ], - "name": "Test", - "token": "S3CR3T", - "email": "Test@test.com", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z" - } - ], - "nextPageToken": "", - "totalSize": 7 -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/users", WrapMockAuthConfig(ProjectListUsers, cfgKafka, &brk, str, &mgr, nil, "service_admin")) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestProjectUserListUnprivARGO() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/members", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "users": [ - { - "uuid": "same_uuid", - "projects": [ - { - "project": "ARGO", - "roles": [ - "publisher", - "consumer" - ], - "topics": [], - "subscriptions": [] - } - ], - "name": "UserSame2", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z" - }, - { - "uuid": "same_uuid", - "projects": [ - { - "project": "ARGO", - "roles": [ - "publisher", - "consumer" - ], - "topics": [], - "subscriptions": [] - } - ], - "name": "UserSame1", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z" - }, - { - "uuid": "uuid4", - "projects": [ - { - "project": "ARGO", - "roles": [ - "publisher", - "consumer" - ], - "topics": [ - "topic2" - ], - "subscriptions": [ - "sub3", - "sub4" - ] - } - ], - "name": "UserZ", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z" - }, - { - "uuid": "uuid3", - "projects": [ - { - "project": "ARGO", - "roles": [ - "publisher", - "consumer" - ], - "topics": [ - "topic3" - ], - "subscriptions": [ - "sub2" - ] - } - ], - "name": "UserX", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z" - }, - { - "uuid": "uuid2", - "projects": [ - { - "project": "ARGO", - "roles": [ - "consumer", - "publisher" - ], - "topics": [ - "topic1", - "topic2" - ], - "subscriptions": [ - "sub1", - "sub3", - "sub4" - ] - } - ], - "name": "UserB", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z" - }, - { - "uuid": "uuid1", - "projects": [ - { - "project": "ARGO", - "roles": [ - "consumer", - "publisher" - ], - "topics": [ - "topic1", - "topic2" - ], - "subscriptions": [ - "sub1", - "sub2", - "sub3" - ] - } - ], - "name": "UserA", - "first_name": "FirstA", - "last_name": "LastA", - "organization": "OrgA", - "description": "DescA", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z" - }, - { - "uuid": "uuid0", - "projects": [ - { - "project": "ARGO", - "roles": [ - "consumer", - "publisher" - ], - "topics": [], - "subscriptions": [] - } - ], - "name": "Test", - "email": "Test@test.com", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z" - } - ], - "nextPageToken": "", - "totalSize": 7 -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/members", WrapMockAuthConfig(ProjectListUsers, cfgKafka, &brk, str, &mgr, nil, "project_admin")) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestRegisterUser() { - - type td struct { - postBody string - expectedResponse string - expectedStatusCode int - msg string - } - - testData := []td{ - { - postBody: `{ - "name": "new-register-user", - "first_name": "first-name", - "last_name": "last-name", - "email": "test@example.com", - "organization": "org1", - "description": "desc1" - }`, - expectedResponse: `{ - "uuid": "{{UUID}}", - "name": "new-register-user", - "first_name": "first-name", - "last_name": "last-name", - "organization": "org1", - "description": "desc1", - "email": "test@example.com", - "status": "pending", - "activation_token": "{{ATKN}}", - "registered_at": "{{REAT}}" -}`, - expectedStatusCode: 200, - msg: "User registration successful", - }, - { - postBody: `{ - "name": "UserA", - "first_name": "new-name", - "last_name": "last-name", - "email": "test@example.com", - "organization": "org1", - "description": "desc1" - }`, - expectedResponse: `{ - "error": { - "code": 409, - "message": "User already exists", - "status": "ALREADY_EXISTS" - } -}`, - expectedStatusCode: 409, - msg: "user already exists", - }, - } - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - cfgKafka.PushEnabled = true - cfgKafka.PushWorkerToken = "push_token" - cfgKafka.ResAuth = false - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - - for _, t := range testData { - - w := httptest.NewRecorder() - req, err := http.NewRequest("POST", "http://localhost:8080/v1/registrations", strings.NewReader(t.postBody)) - if err != nil { - log.Fatal(err) - } - router.HandleFunc("/v1/registrations", WrapMockAuthConfig(RegisterUser, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - if t.expectedStatusCode == 200 { - t.expectedResponse = strings.Replace(t.expectedResponse, "{{UUID}}", str.UserRegistrations[1].UUID, 1) - t.expectedResponse = strings.Replace(t.expectedResponse, "{{REAT}}", str.UserRegistrations[1].RegisteredAt, 1) - t.expectedResponse = strings.Replace(t.expectedResponse, "{{ATKN}}", str.UserRegistrations[1].ActivationToken, 1) - } - suite.Equal(t.expectedStatusCode, w.Code, t.msg) - suite.Equal(t.expectedResponse, w.Body.String(), t.msg) - } - -} - -func (suite *HandlerTestSuite) TestAcceptRegisterUser() { - - type td struct { - ruuid string - uname string - expectedResponse string - expectedStatusCode int - msg string - } - - testData := []td{{ - ruuid: "ur-uuid1", - uname: "urname", - expectedResponse: `{ - "uuid": "{{UUID}}", - "projects": [], - "name": "urname", - "first_name": "urfname", - "last_name": "urlname", - "organization": "urorg", - "description": "urdesc", - "token": "{{TOKEN}}", - "email": "uremail", - "service_roles": [], - "created_on": "{{CON}}", - "modified_on": "{{MON}}", - "created_by": "UserA" -}`, - expectedStatusCode: 200, - msg: "Successfully accepted a user's registration", - }, - { - ruuid: "ur-uuid1", - uname: "urname", - expectedResponse: `{ - "error": { - "code": 404, - "message": "User registration doesn't exist", - "status": "NOT_FOUND" - } -}`, - expectedStatusCode: 404, - msg: "User registration doesn't exist", - }} - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - cfgKafka.PushEnabled = true - cfgKafka.PushWorkerToken = "push_token" - cfgKafka.ResAuth = false - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - - for _, t := range testData { - - w := httptest.NewRecorder() - url := fmt.Sprintf("http://localhost:8080/v1/registrations/%v:accept", t.ruuid) - req, err := http.NewRequest("POST", url, nil) - if err != nil { - log.Fatal(err) - } - router.HandleFunc("/v1/registrations/{uuid}:accept", WrapMockAuthConfig(AcceptRegisterUser, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - if t.expectedStatusCode == 200 { - u, _ := auth.FindUsers("", "", t.uname, true, str) - t.expectedResponse = strings.Replace(t.expectedResponse, "{{UUID}}", u.List[0].UUID, 1) - t.expectedResponse = strings.Replace(t.expectedResponse, "{{TOKEN}}", u.List[0].Token, 1) - t.expectedResponse = strings.Replace(t.expectedResponse, "{{CON}}", u.List[0].CreatedOn, 1) - t.expectedResponse = strings.Replace(t.expectedResponse, "{{MON}}", u.List[0].ModifiedOn, 1) - } - suite.Equal(t.expectedStatusCode, w.Code, t.msg) - suite.Equal(t.expectedResponse, w.Body.String(), t.msg) - } - -} - -func (suite *HandlerTestSuite) TestDeclineRegisterUser() { - - type td struct { - regUUID string - expectedResponse string - expectedStatusCode int - msg string - } - - testData := []td{{ - regUUID: "ur-uuid1", - expectedResponse: `{}`, - expectedStatusCode: 200, - msg: "Successfully declined a user's registration", - }, - { - regUUID: "unknown", - expectedResponse: `{ - "error": { - "code": 404, - "message": "User registration doesn't exist", - "status": "NOT_FOUND" - } -}`, - expectedStatusCode: 404, - msg: "User registration doesn't exist", - }} - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - cfgKafka.PushEnabled = true - cfgKafka.PushWorkerToken = "push_token" - cfgKafka.ResAuth = false - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - - for _, t := range testData { - - w := httptest.NewRecorder() - url := fmt.Sprintf("http://localhost:8080/v1/registrations/%v:decline", t.regUUID) - req, err := http.NewRequest("POST", url, nil) - if err != nil { - log.Fatal(err) - } - router.HandleFunc("/v1/registrations/{uuid}:decline", WrapMockAuthConfig(DeclineRegisterUser, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - if t.expectedStatusCode == 200 { - suite.Equal(auth.DeclinedRegistrationStatus, str.UserRegistrations[0].Status) - } - suite.Equal(t.expectedStatusCode, w.Code, t.msg) - suite.Equal(t.expectedResponse, w.Body.String(), t.msg) - } - -} - -func (suite *HandlerTestSuite) TestListOneRegistration() { - - type td struct { - regUUID string - expectedResponse string - expectedStatusCode int - msg string - } - - testData := []td{ - { - regUUID: "ur-uuid1", - expectedResponse: `{ - "uuid": "ur-uuid1", - "name": "urname", - "first_name": "urfname", - "last_name": "urlname", - "organization": "urorg", - "description": "urdesc", - "email": "uremail", - "status": "pending", - "activation_token": "uratkn-1", - "registered_at": "2019-05-12T22:26:58Z", - "modified_by": "UserA", - "modified_at": "2020-05-15T22:26:58Z" -}`, - expectedStatusCode: 200, - msg: "User registration retrieved successfully", - }, - { - regUUID: "unknown", - expectedResponse: `{ - "error": { - "code": 404, - "message": "User registration doesn't exist", - "status": "NOT_FOUND" - } -}`, - expectedStatusCode: 404, - msg: "User registration doesn't exist", - }, - } - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - cfgKafka.PushEnabled = true - cfgKafka.PushWorkerToken = "push_token" - cfgKafka.ResAuth = false - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - - for _, t := range testData { - - w := httptest.NewRecorder() - url := fmt.Sprintf("http://localhost:8080/v1/registrations/%v", t.regUUID) - req, err := http.NewRequest("GET", url, nil) - if err != nil { - log.Fatal(err) - } - router.HandleFunc("/v1/registrations/{uuid}", WrapMockAuthConfig(ListOneRegistration, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - suite.Equal(t.expectedStatusCode, w.Code, t.msg) - suite.Equal(t.expectedResponse, w.Body.String(), t.msg) - } - -} - -func (suite *HandlerTestSuite) TestListManyregistrations() { - - type td struct { - urlPath string - expectedResponse string - expectedStatusCode int - msg string - } - - testData := []td{ - { - urlPath: "registrations", - expectedResponse: `{ - "user_registrations": [ - { - "uuid": "ur-uuid1", - "name": "urname", - "first_name": "urfname", - "last_name": "urlname", - "organization": "urorg", - "description": "urdesc", - "email": "uremail", - "status": "pending", - "activation_token": "uratkn-1", - "registered_at": "2019-05-12T22:26:58Z", - "modified_by": "UserA", - "modified_at": "2020-05-15T22:26:58Z" - } - ] -}`, - expectedStatusCode: 200, - msg: "Retrieve all available user registrations without any filters", - }, - { - urlPath: "registrations?status=pending&name=urname&activation_token=uratkn-1&email=uremail&organization=urorg", - expectedResponse: `{ - "user_registrations": [ - { - "uuid": "ur-uuid1", - "name": "urname", - "first_name": "urfname", - "last_name": "urlname", - "organization": "urorg", - "description": "urdesc", - "email": "uremail", - "status": "pending", - "activation_token": "uratkn-1", - "registered_at": "2019-05-12T22:26:58Z", - "modified_by": "UserA", - "modified_at": "2020-05-15T22:26:58Z" - } - ] -}`, - expectedStatusCode: 200, - msg: "Retrieve all available user registrations with filters", - }, - } - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - cfgKafka.PushEnabled = true - cfgKafka.PushWorkerToken = "push_token" - cfgKafka.ResAuth = false - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - - for _, t := range testData { - - w := httptest.NewRecorder() - url := fmt.Sprintf("http://localhost:8080/v1/%v", t.urlPath) - req, err := http.NewRequest("GET", url, nil) - if err != nil { - log.Fatal(err) - } - router.HandleFunc("/v1/registrations", WrapMockAuthConfig(ListAllRegistrations, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - suite.Equal(t.expectedStatusCode, w.Code, t.msg) - suite.Equal(t.expectedResponse, w.Body.String(), t.msg) - } - -} - -func (suite *HandlerTestSuite) TestProjectUserCreate() { - - type td struct { - user string - postBody string - expectedResponse string - expectedStatusCode int - msg string - } - - testData := []td{ - { - user: "member-user", - postBody: `{ - "email": "test@example.com", - "service_roles": ["service_admin"], - "projects": [ - { - "project": "ARGO", - "roles": ["project_admin", "publisher", "consumer"] - }, - { - "project": "unknown" - } - ] - }`, - expectedResponse: `{ - "uuid": "{{UUID}}", - "projects": [ - { - "project": "ARGO", - "roles": [ - "project_admin", - "publisher", - "consumer" - ], - "topics": [], - "subscriptions": [] - } - ], - "name": "member-user", - "token": "{{TOKEN}}", - "email": "test@example.com", - "service_roles": [], - "created_on": "{{CON}}", - "modified_on": "{{MON}}", - "created_by": "UserA" -}`, - expectedStatusCode: 200, - msg: "Create a member of a project(ignore other projects & service roles)", - }, - { - user: "member-user-2", - postBody: `{ - "email": "test@example.com", - "service_roles": ["service_admin"], - "projects": [] - }`, - expectedResponse: `{ - "uuid": "{{UUID}}", - "projects": [ - { - "project": "ARGO", - "roles": [], - "topics": [], - "subscriptions": [] - } - ], - "name": "member-user-2", - "token": "{{TOKEN}}", - "email": "test@example.com", - "service_roles": [], - "created_on": "{{CON}}", - "modified_on": "{{MON}}", - "created_by": "UserA" -}`, - expectedStatusCode: 200, - msg: "Create a member/user that automatically gets assigned to the respective project", - }, - { - user: "member-user-unknown", - postBody: `{ - "email": "test@example.com", - "service_roles": ["service_admin"], - "projects": [ - { - "project": "ARGO", - "roles": ["unknown"] - }, - { - "project": "unknown" - } - ] - }`, - expectedResponse: `{ - "error": { - "code": 400, - "message": "invalid role: unknown", - "status": "INVALID_ARGUMENT" - } -}`, - expectedStatusCode: 400, - msg: "Invalid user role", - }, - { - user: "member-user", - postBody: `{ - "email": "test@example.com", - "service_roles": ["service_admin"], - "projects": [ - { - "project": "ARGO", - "roles": ["unknown"] - }, - { - "project": "unknown" - } - ] - }`, - expectedResponse: `{ - "error": { - "code": 409, - "message": "User already exists", - "status": "ALREADY_EXISTS" - } -}`, - expectedStatusCode: 409, - msg: "user already exists", - }, - } - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - cfgKafka.PushEnabled = true - cfgKafka.PushWorkerToken = "push_token" - cfgKafka.ResAuth = false - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - - for _, t := range testData { - - w := httptest.NewRecorder() - url := fmt.Sprintf("http://localhost:8080/v1/projects/ARGO/members/%v", t.user) - req, err := http.NewRequest("POST", url, strings.NewReader(t.postBody)) - if err != nil { - log.Fatal(err) - } - router.HandleFunc("/v1/projects/{project}/members/{user}", WrapMockAuthConfig(ProjectUserCreate, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - if t.expectedStatusCode == 200 { - u, _ := auth.FindUsers("argo_uuid", "", t.user, true, str) - t.expectedResponse = strings.Replace(t.expectedResponse, "{{UUID}}", u.List[0].UUID, 1) - t.expectedResponse = strings.Replace(t.expectedResponse, "{{TOKEN}}", u.List[0].Token, 1) - t.expectedResponse = strings.Replace(t.expectedResponse, "{{CON}}", u.List[0].CreatedOn, 1) - t.expectedResponse = strings.Replace(t.expectedResponse, "{{MON}}", u.List[0].ModifiedOn, 1) - } - suite.Equal(t.expectedStatusCode, w.Code, t.msg) - suite.Equal(t.expectedResponse, w.Body.String(), t.msg) - } - -} - -func (suite *HandlerTestSuite) TestProjectUserUpdate() { - - type td struct { - user string - postBody string - authRole string - expectedResponse string - expectedStatusCode int - msg string - } - - testData := []td{ - { - user: "UserA", - postBody: `{ - "email": "test@example.com", - "name": "new-name", - "service_roles": ["service_admin"], - "projects": [ - { - "project": "ARGO", - "roles": ["project_admin", "publisher"] - }, - { - "project": "unknown" - } - ] - }`, - authRole: "project_admin", - expectedResponse: `{ - "uuid": "{{UUID}}", - "projects": [ - { - "project": "ARGO", - "roles": [ - "project_admin", - "publisher" - ], - "topics": [ - "topic1", - "topic2" - ], - "subscriptions": [ - "sub1", - "sub2", - "sub3" - ] - } - ], - "name": "UserA", - "first_name": "FirstA", - "last_name": "LastA", - "organization": "OrgA", - "description": "DescA", - "email": "foo-email", - "service_roles": [], - "created_on": "{{CON}}", - "modified_on": "{{MON}}" -}`, - expectedStatusCode: 200, - msg: "Update a member of a project(ignore other projects & service roles & email & name)(project_admin)", - }, - { - user: "UserA", - postBody: `{ - "email": "test@example.com", - "name": "new-name", - "service_roles": ["service_admin"], - "projects": [ - { - "project": "ARGO", - "roles": ["project_admin", "publisher"] - }, - { - "project": "unknown" - } - ] - }`, - authRole: "service_admin", - expectedResponse: `{ - "uuid": "{{UUID}}", - "projects": [ - { - "project": "ARGO", - "roles": [ - "project_admin", - "publisher" - ], - "topics": [ - "topic1", - "topic2" - ], - "subscriptions": [ - "sub1", - "sub2", - "sub3" - ] - } - ], - "name": "UserA", - "first_name": "FirstA", - "last_name": "LastA", - "organization": "OrgA", - "description": "DescA", - "token": "{{TOKEN}}", - "email": "foo-email", - "service_roles": [], - "created_on": "{{CON}}", - "modified_on": "{{MON}}" -}`, - expectedStatusCode: 200, - msg: "Update a member of a project(ignore other projects & service roles & email & name)(service_admin)", - }, - { - user: "UserA", - postBody: `{ - "email": "test@example.com", - "service_roles": ["service_admin"], - "projects": [ - { - "project": "ARGO", - "roles": ["unknown"] - } - ] - }`, - authRole: "project_admin", - expectedResponse: `{ - "error": { - "code": 400, - "message": "invalid role: unknown", - "status": "INVALID_ARGUMENT" - } -}`, - expectedStatusCode: 400, - msg: "Invalid user role", - }, - { - user: "UserA", - postBody: `{ - "email": "test@example.com", - "service_roles": ["service_admin"], - "projects": [ - { - "project": "ARGO2", - "roles": ["publisher"] - } - ] - }`, - authRole: "project_admin", - expectedResponse: `{ - "error": { - "code": 403, - "message": "Access to this resource is forbidden. User is not a member of the project", - "status": "FORBIDDEN" - } -}`, - expectedStatusCode: 403, - msg: "user is not a member of the project", - }, - { - user: "unknown", - postBody: `{ - "email": "test@example.com", - "service_roles": ["service_admin"], - "projects": [ - { - "project": "ARGO", - "roles": ["publisher"] - } - ] - }`, - authRole: "project_admin", - expectedResponse: `{ - "error": { - "code": 404, - "message": "User doesn't exist", - "status": "NOT_FOUND" - } -}`, - expectedStatusCode: 404, - msg: "user doesn't exist" + - "", - }, - } - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - cfgKafka.PushEnabled = true - cfgKafka.PushWorkerToken = "push_token" - cfgKafka.ResAuth = true - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - mgr := oldPush.Manager{} - pc := new(push.MockClient) - - for _, t := range testData { - - w := httptest.NewRecorder() - url := fmt.Sprintf("http://localhost:8080/v1/projects/ARGO/members/%v", t.user) - req, err := http.NewRequest("PUT", url, strings.NewReader(t.postBody)) - if err != nil { - log.Fatal(err) - } - router := mux.NewRouter().StrictSlash(true) - router.HandleFunc("/v1/projects/{project}/members/{user}", WrapMockAuthConfig(ProjectUserUpdate, cfgKafka, &brk, str, &mgr, pc, t.authRole)) - router.ServeHTTP(w, req) - if t.expectedStatusCode == 200 { - u, _ := auth.FindUsers("argo_uuid", "", t.user, true, str) - t.expectedResponse = strings.Replace(t.expectedResponse, "{{UUID}}", u.List[0].UUID, 1) - t.expectedResponse = strings.Replace(t.expectedResponse, "{{TOKEN}}", u.List[0].Token, 1) - t.expectedResponse = strings.Replace(t.expectedResponse, "{{CON}}", u.List[0].CreatedOn, 1) - t.expectedResponse = strings.Replace(t.expectedResponse, "{{MON}}", u.List[0].ModifiedOn, 1) - } - suite.Equal(t.expectedStatusCode, w.Code, t.msg) - suite.Equal(t.expectedResponse, w.Body.String(), t.msg) - } - -} - -func (suite *HandlerTestSuite) TestProjectUserRemove() { - - type td struct { - user string - expectedResponse string - expectedStatusCode int - msg string - } - - testData := []td{ - { - user: "UserA", - expectedResponse: `{}`, - expectedStatusCode: 200, - msg: "Remove a member from the project", - }, - { - user: "UserA", - expectedResponse: `{ - "error": { - "code": 403, - "message": "Access to this resource is forbidden. User is not a member of the project", - "status": "FORBIDDEN" - } -}`, - expectedStatusCode: 403, - msg: "user is not a member of the project", - }, - { - user: "unknown", - expectedResponse: `{ - "error": { - "code": 404, - "message": "User doesn't exist", - "status": "NOT_FOUND" - } -}`, - expectedStatusCode: 404, - msg: "user doesn't exist" + - "", - }, - } - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - cfgKafka.PushEnabled = true - cfgKafka.PushWorkerToken = "push_token" - cfgKafka.ResAuth = true - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - - for _, t := range testData { - - w := httptest.NewRecorder() - url := fmt.Sprintf("http://localhost:8080/v1/projects/ARGO/members/%v:remove", t.user) - req, err := http.NewRequest("POST", url, nil) - if err != nil { - log.Fatal(err) - } - router.HandleFunc("/v1/projects/{project}/members/{user}:remove", WrapMockAuthConfig(ProjectUserRemove, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - suite.Equal(t.expectedStatusCode, w.Code, t.msg) - suite.Equal(t.expectedResponse, w.Body.String(), t.msg) - } -} - -func (suite *HandlerTestSuite) TestProjectUserAdd() { - - type td struct { - user string - project string - authRole string - postBody string - expectedResponse string - expectedStatusCode int - msg string - } - - testData := []td{ - { - user: "UserA", - project: "ARGO2", - postBody: `{ - "roles": ["unknown"] - }`, - expectedResponse: `{ - "error": { - "code": 400, - "message": "invalid role: unknown", - "status": "INVALID_ARGUMENT" - } -}`, - expectedStatusCode: 400, - msg: "Invalid user role", - }, - { - user: "UserA", - project: "ARGO2", - postBody: `{ - "roles": ["project_admin", "publisher", "consumer"] - }`, - authRole: "project_admin", - expectedResponse: `{ - "uuid": "{{UUID}}", - "projects": [ - { - "project": "ARGO2", - "roles": [ - "project_admin", - "publisher", - "consumer" - ], - "topics": [], - "subscriptions": [] - } - ], - "name": "UserA", - "first_name": "FirstA", - "last_name": "LastA", - "organization": "OrgA", - "description": "DescA", - "email": "foo-email", - "service_roles": [], - "created_on": "{{CON}}", - "modified_on": "{{MON}}" -}`, - expectedStatusCode: 200, - msg: "Add user to project(project_admin)", - }, - { - user: "UserA", - project: "ARGO2", - postBody: `{ - "roles": ["project_admin", "consumer", "publisher"] - }`, - authRole: "service_admin", - expectedResponse: `{ - "uuid": "{{UUID}}", - "projects": [ - { - "project": "ARGO", - "roles": [ - "consumer", - "publisher" - ], - "topics": [ - "topic1", - "topic2" - ], - "subscriptions": [ - "sub1", - "sub2", - "sub3" - ] - }, - { - "project": "ARGO2", - "roles": [ - "project_admin", - "consumer", - "publisher" - ], - "topics": [], - "subscriptions": [] - } - ], - "name": "UserA", - "first_name": "FirstA", - "last_name": "LastA", - "organization": "OrgA", - "description": "DescA", - "token": "{{TOKEN}}", - "email": "foo-email", - "service_roles": [], - "created_on": "{{CON}}", - "modified_on": "{{MON}}" -}`, - expectedStatusCode: 200, - msg: "Add user to project(service_admin)", - }, - { - user: "UserA", - project: "ARGO", - postBody: `{ - "roles": ["project_admin"] - }`, - expectedResponse: `{ - "error": { - "code": 409, - "message": "User is already a member of the project", - "status": "CONFLICT" - } -}`, - expectedStatusCode: 409, - msg: "user already member of the project", - }, - } - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - cfgKafka.PushEnabled = true - cfgKafka.PushWorkerToken = "push_token" - cfgKafka.ResAuth = false - brk := brokers.MockBroker{} - mgr := oldPush.Manager{} - pc := new(push.MockClient) - - for _, t := range testData { - str := stores.NewMockStore("whatever", "argo_mgs") - w := httptest.NewRecorder() - url := fmt.Sprintf("http://localhost:8080/v1/projects/%v/members/%v:add", t.project, t.user) - req, err := http.NewRequest("POST", url, strings.NewReader(t.postBody)) - if err != nil { - log.Fatal(err) - } - router := mux.NewRouter().StrictSlash(true) - router.HandleFunc("/v1/projects/{project}/members/{user}:add", WrapMockAuthConfig(ProjectUserAdd, cfgKafka, &brk, str, &mgr, pc, t.authRole)) - router.ServeHTTP(w, req) - if t.expectedStatusCode == 200 { - u, _ := auth.FindUsers("argo_uuid", "", t.user, true, str) - t.expectedResponse = strings.Replace(t.expectedResponse, "{{UUID}}", u.List[0].UUID, 1) - t.expectedResponse = strings.Replace(t.expectedResponse, "{{TOKEN}}", u.List[0].Token, 1) - t.expectedResponse = strings.Replace(t.expectedResponse, "{{CON}}", u.List[0].CreatedOn, 1) - t.expectedResponse = strings.Replace(t.expectedResponse, "{{MON}}", u.List[0].ModifiedOn, 1) - } - suite.Equal(t.expectedStatusCode, w.Code, t.msg) - suite.Equal(t.expectedResponse, w.Body.String(), t.msg) - } - -} - -func (suite *HandlerTestSuite) TestUserListAllProjectARGO2() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/users?project=ARGO2", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "users": [ - { - "uuid": "uuid8", - "projects": [ - { - "project": "ARGO2", - "roles": [ - "consumer", - "publisher" - ], - "topics": [], - "subscriptions": [] - } - ], - "name": "UserZ", - "token": "S3CR3T1", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z" - } - ], - "nextPageToken": "", - "totalSize": 1 -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/users", WrapMockAuthConfig(UserListAll, cfgKafka, &brk, str, &mgr, nil, "service_admin")) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestUserListAllProjectUNKNOWN() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/users?project=UNKNOWN", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "error": { - "code": 404, - "message": "ProjectUUID doesn't exist", - "status": "NOT_FOUND" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/users", WrapMockAuthConfig(UserListAll, cfgKafka, &brk, str, &mgr, nil, "service_admin")) - router.ServeHTTP(w, req) - suite.Equal(404, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestUserListAllStartingAtSecond() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/users?pageSize=2&pageToken=Nw==", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "users": [ - { - "uuid": "uuid7", - "projects": [], - "name": "push_worker_0", - "token": "push_token", - "email": "foo-email", - "service_roles": [ - "push_worker" - ], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z" - }, - { - "uuid": "same_uuid", - "projects": [ - { - "project": "ARGO", - "roles": [ - "publisher", - "consumer" - ], - "topics": [], - "subscriptions": [] - } - ], - "name": "UserSame2", - "token": "S3CR3T42", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z", - "created_by": "UserA" - } - ], - "nextPageToken": "NQ==", - "totalSize": 9 -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/users", WrapMockAuthConfig(UserListAll, cfgKafka, &brk, str, &mgr, nil, "service_admin")) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestUserListAllEmptyCollection() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/users", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "users": [], - "nextPageToken": "", - "totalSize": 0 -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - // empty the store - str.UserList = []stores.QUser{} - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/users", WrapMockAuthConfig(UserListAll, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestUserListAllIntermediatePage() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/users?pageToken=NA==&pageSize=2", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "users": [ - { - "uuid": "uuid4", - "projects": [ - { - "project": "ARGO", - "roles": [ - "publisher", - "consumer" - ], - "topics": [ - "topic2" - ], - "subscriptions": [ - "sub3", - "sub4" - ] - } - ], - "name": "UserZ", - "token": "S3CR3T4", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z", - "created_by": "UserA" - }, - { - "uuid": "uuid3", - "projects": [ - { - "project": "ARGO", - "roles": [ - "publisher", - "consumer" - ], - "topics": [ - "topic3" - ], - "subscriptions": [ - "sub2" - ] - } - ], - "name": "UserX", - "token": "S3CR3T3", - "email": "foo-email", - "service_roles": [], - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z", - "created_by": "UserA" - } - ], - "nextPageToken": "Mg==", - "totalSize": 9 -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/users", WrapMockAuthConfig(UserListAll, cfgKafka, &brk, str, &mgr, nil, "service_admin")) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestUserListAllInvalidPageSize() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/users?pageSize=invalid", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "error": { - "code": 400, - "message": "Invalid page size", - "status": "INVALID_ARGUMENT" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/users", WrapMockAuthConfig(UserListAll, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(400, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestUserListAllInvalidPageToken() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/users?pageToken=invalid", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "error": { - "code": 400, - "message": "Invalid page token", - "status": "INVALID_ARGUMENT" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/users", WrapMockAuthConfig(UserListAll, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(400, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestUserDelete() { - - req, err := http.NewRequest("DELETE", "http://localhost:8080/v1/users/UserA", nil) - - if err != nil { - log.Fatal(err) - } - - expResp := "" - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/users/{user}", WrapMockAuthConfig(UserDelete, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) - - // Search the deleted user - - req, err = http.NewRequest("GET", "http://localhost:8080/v1/users/UserA", nil) - if err != nil { - log.Fatal(err) - } - - expResp2 := `{ - "error": { - "code": 404, - "message": "User doesn't exist", - "status": "NOT_FOUND" - } -}` - - router = mux.NewRouter().StrictSlash(true) - w = httptest.NewRecorder() - router.HandleFunc("/v1/users/{user}", WrapMockAuthConfig(UserListOne, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(404, w.Code) - suite.Equal(expResp2, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestProjectDelete() { - - req, err := http.NewRequest("DELETE", "http://localhost:8080/v1/projects/ARGO", nil) - - if err != nil { - log.Fatal(err) - } - - expResp := "" - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}", WrapMockAuthConfig(ProjectDelete, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestProjectUpdate() { - - postJSON := `{ - "name":"NEWARGO", - "description":"time to change the description mates and the name" -}` - - req, err := http.NewRequest("PUT", "http://localhost:8080/v1/projects/ARGO", bytes.NewBuffer([]byte(postJSON))) - if err != nil { - log.Fatal(err) - } - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - w := httptest.NewRecorder() - router.HandleFunc("/v1/projects/{project}", WrapMockAuthConfig(ProjectUpdate, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - projOut, _ := projects.GetFromJSON([]byte(w.Body.String())) - suite.Equal("NEWARGO", projOut.Name) - // Check if the mock authenticated userA has been marked as the creator - suite.Equal("UserA", projOut.CreatedBy) - suite.Equal("time to change the description mates and the name", projOut.Description) -} - -func (suite *HandlerTestSuite) TestProjectCreate() { - - postJSON := `{ - "description":"This is a newly created project" -}` - - req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGONEW", bytes.NewBuffer([]byte(postJSON))) - if err != nil { - log.Fatal(err) - } - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - w := httptest.NewRecorder() - router.HandleFunc("/v1/projects/{project}", WrapMockAuthConfig(ProjectCreate, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - projOut, _ := projects.GetFromJSON([]byte(w.Body.String())) - suite.Equal("ARGONEW", projOut.Name) - // Check if the mock authenticated userA has been marked as the creator - suite.Equal("UserA", projOut.CreatedBy) - suite.Equal("This is a newly created project", projOut.Description) -} - -func (suite *HandlerTestSuite) TestProjectListAll() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "projects": [ - { - "name": "ARGO", - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z", - "created_by": "UserA", - "description": "simple project" - }, - { - "name": "ARGO2", - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z", - "created_by": "UserA", - "description": "simple project" - } - ] -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - - router.HandleFunc("/v1/projects", WrapMockAuthConfig(ProjectListAll, cfgKafka, &brk, str, &mgr, nil)) - - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestProjectListOneNotFound() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGONAUFTS", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "error": { - "code": 404, - "message": "ProjectUUID doesn't exist", - "status": "NOT_FOUND" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}", WrapMockAuthConfig(ProjectListOne, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(404, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestProjectListOne() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "name": "ARGO", - "created_on": "2009-11-10T23:00:00Z", - "modified_on": "2009-11-10T23:00:00Z", - "created_by": "UserA", - "description": "simple project" -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}", WrapMockAuthConfig(ProjectListOne, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestSubModPushConfigError() { - - postJSON := `{ - "topic":"projects/ARGO/topics/topic1", - "pushConfig": { - "pushEndpoint": "http://www.example.com", - "retryPolicy": {} - } -}` - - req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub1:modifyPushConfig", bytes.NewBuffer([]byte(postJSON))) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "error": { - "code": 400, - "message": "Push endpoint should be addressed by a valid https url", - "status": "INVALID_ARGUMENT" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - w := httptest.NewRecorder() - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:modifyPushConfig", WrapMockAuthConfig(SubModPush, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(400, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestSubModPushInvalidRetPol() { - - postJSON := `{ - "topic":"projects/ARGO/topics/topic1", - "pushConfig": { - "pushEndpoint": "https://www.example.com", - "retryPolicy": { - "type": "unknown" - } - } -}` - - req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub1:modifyPushConfig", bytes.NewBuffer([]byte(postJSON))) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "error": { - "code": 400, - "message": "Retry policy can only be of 'linear' or 'slowstart' type", - "status": "INVALID_ARGUMENT" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - w := httptest.NewRecorder() - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:modifyPushConfig", WrapMockAuthConfig(SubModPush, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(400, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -// TestSubModPushConfigToActive tests the case where the user modifies the push configuration, -// in order to activate the subscription on the push server -// the push configuration was empty before the api call -func (suite *HandlerTestSuite) TestSubModPushConfigToActive() { - - postJSON := `{ - "pushConfig": { - "pushEndpoint": "https://www.example.com", - "retryPolicy": {} - } -}` - - req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub1:modifyPushConfig", strings.NewReader(postJSON)) - if err != nil { - log.Fatal(err) - } - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - w := httptest.NewRecorder() - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:modifyPushConfig", WrapMockAuthConfig(SubModPush, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - sub, _ := str.QueryOneSub("argo_uuid", "sub1") - suite.Equal(200, w.Code) - suite.Equal("", w.Body.String()) - suite.Equal("https://www.example.com", sub.PushEndpoint) - suite.Equal(int64(1), sub.MaxMessages) - suite.Equal(3000, sub.RetPeriod) - suite.Equal("linear", sub.RetPolicy) - suite.False(sub.Verified) - suite.NotEqual("", sub.VerificationHash) -} - -// TestSubModPushConfigToInactive tests the use case where the user modifies the push configuration -// in order to deactivate the subscription on the push server -// the push configuration has values before the call and turns into an empty one by the end of the call -func (suite *HandlerTestSuite) TestSubModPushConfigToInactive() { - - postJSON := `{ - "pushConfig": {} -}` - - req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub4:modifyPushConfig", strings.NewReader(postJSON)) - if err != nil { - log.Fatal(err) - } - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - w := httptest.NewRecorder() - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:modifyPushConfig", WrapMockAuthConfig(SubModPush, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - sub, _ := str.QueryOneSub("argo_uuid", "sub4") - suite.Equal(200, w.Code) - suite.Equal("", w.Body.String()) - suite.Equal("", sub.PushEndpoint) - suite.Equal(0, sub.RetPeriod) - suite.Equal("", sub.RetPolicy) - suite.Equal("", sub.VerificationHash) - suite.False(sub.Verified) - // check to see that the push worker user has been removed from the subscription's acl - a1, _ := str.QueryACL("argo_uuid", "subscriptions", "sub4") - suite.Equal([]string{"uuid2", "uuid4"}, a1.ACL) -} - -// TestSubModPushConfigToInactivePushDisabled tests the use case where the user modifies the push configuration -// in order to deactivate the subscription on the push server -// the push configuration has values before the call and turns into an empty one by the end of the call -// push enabled is false, but turning a subscription from push to pull should always be available as an api action -func (suite *HandlerTestSuite) TestSubModPushConfigToInactivePushDisabled() { - - postJSON := `{ - "pushConfig": {} -}` - - req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub4:modifyPushConfig", strings.NewReader(postJSON)) - if err != nil { - log.Fatal(err) - } - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - cfgKafka.PushEnabled = false - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - w := httptest.NewRecorder() - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:modifyPushConfig", WrapMockAuthConfig(SubModPush, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - sub, _ := str.QueryOneSub("argo_uuid", "sub4") - suite.Equal(200, w.Code) - suite.Equal("", w.Body.String()) - suite.Equal("", sub.PushEndpoint) - suite.Equal(0, sub.RetPeriod) - suite.Equal("", sub.RetPolicy) -} - -// TestSubModPushConfigToInactiveMissingPushWorker tests the use case where the user modifies the push configuration -// in order to deactivate the subscription on the push server -// the push configuration has values before the call and turns into an empty one by the end of the call -// push enabled is true, we cannot retrieve the push worker user in order to remove him from the subscription's acl -// but turning a subscription from push to pull should always be available as an api action -func (suite *HandlerTestSuite) TestSubModPushConfigToInactiveMissingPushWorker() { - - postJSON := `{ - "pushConfig": {} -}` - - req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub4:modifyPushConfig", strings.NewReader(postJSON)) - if err != nil { - log.Fatal(err) - } - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - cfgKafka.PushWorkerToken = "missing" - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - w := httptest.NewRecorder() - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:modifyPushConfig", WrapMockAuthConfig(SubModPush, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - sub, _ := str.QueryOneSub("argo_uuid", "sub4") - suite.Equal(200, w.Code) - suite.Equal("", w.Body.String()) - suite.Equal("", sub.PushEndpoint) - suite.Equal(0, sub.RetPeriod) - suite.Equal("", sub.RetPolicy) -} - -// TestSubModPushConfigToActive tests the case where the user modifies the push configuration, -// in order to activate the subscription on the push server -// the push configuration was empty before the api call -// since the push endpoint that has been registered is different from the previous verified one -// the sub will be deactivated on the push server and turn into unverified -func (suite *HandlerTestSuite) TestSubModPushConfigUpdate() { - - postJSON := `{ - "pushConfig": { - "pushEndpoint": "https://www.example2.com", - "maxMessages": 5, - "retryPolicy": { - "type":"linear", - "period": 5000 - } - } -}` - - req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub4:modifyPushConfig", strings.NewReader(postJSON)) - if err != nil { - log.Fatal(err) - } - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - w := httptest.NewRecorder() - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:modifyPushConfig", WrapMockAuthConfig(SubModPush, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - sub, _ := str.QueryOneSub("argo_uuid", "sub4") - suite.Equal(200, w.Code) - suite.Equal("", w.Body.String()) - suite.Equal("https://www.example2.com", sub.PushEndpoint) - suite.Equal(int64(5), sub.MaxMessages) - suite.Equal(5000, sub.RetPeriod) - suite.Equal("linear", sub.RetPolicy) - suite.False(sub.Verified) - suite.NotEqual("", sub.VerificationHash) - suite.NotEqual("push-id-1", sub.VerificationHash) -} - -// TestSubModPushConfigToActiveORUpdatePushDisabled tests the case where the user modifies the push configuration, -// in order to activate the subscription on the push server -// the push enabled config option is set to false -func (suite *HandlerTestSuite) TestSubModPushConfigToActiveORUpdatePushDisabled() { - - postJSON := `{ - "pushConfig": { - "pushEndpoint": "https://www.example2.com", - "retryPolicy": { - "type":"linear", - "period": 5000 - } - } -}` - - req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub4:modifyPushConfig", strings.NewReader(postJSON)) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "error": { - "code": 409, - "message": "Push functionality is currently disabled", - "status": "CONFLICT" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - cfgKafka.PushEnabled = false - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - w := httptest.NewRecorder() - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:modifyPushConfig", WrapMockAuthConfig(SubModPush, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - suite.Equal(409, w.Code) - suite.Equal(expResp, w.Body.String()) -} - -// TestSubModPushConfigToActiveORUpdateMissingWorker tests the case where the user modifies the push configuration, -// in order to activate the subscription on the push server -// push enabled is true, but ams can't retrieve the push worker user -func (suite *HandlerTestSuite) TestSubModPushConfigToActiveORUpdateMissingWorker() { - - postJSON := `{ - "pushConfig": { - "pushEndpoint": "https://www.example2.com", - "retryPolicy": { - "type":"linear", - "period": 5000 - } - } -}` - - req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub4:modifyPushConfig", strings.NewReader(postJSON)) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "error": { - "code": 500, - "message": "Push functionality is currently unavailable", - "status": "INTERNAL_SERVER_ERROR" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - cfgKafka.PushWorkerToken = "missing" - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - w := httptest.NewRecorder() - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:modifyPushConfig", WrapMockAuthConfig(SubModPush, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - suite.Equal(500, w.Code) - suite.Equal(expResp, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestVerifyPushEndpoint() { - - ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusOK) - w.Write([]byte("vhash-1")) - })) - - defer ts.Close() - - req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscriptions/push-sub-v1:verifyPushEndpoint", nil) - if err != nil { - log.Fatal(err) - } - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - - // add a temporary subscription - q1 := stores.QSub{ - Name: "push-sub-v1", - ProjectUUID: "argo_uuid", - PushEndpoint: ts.URL, - VerificationHash: "vhash-1", - Verified: false, - } - - str.SubList = append(str.SubList, q1) - str.SubsACL["push-sub-v1"] = stores.QAcl{} - - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - w := httptest.NewRecorder() - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:verifyPushEndpoint", WrapMockAuthConfig(SubVerifyPushEndpoint, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal("", w.Body.String()) - // check to see that the push worker user has been added to the subscription's acl - a1, _ := str.QueryACL("argo_uuid", "subscriptions", "push-sub-v1") - suite.Equal([]string{"uuid7"}, a1.ACL) -} - -func (suite *HandlerTestSuite) TestVerifyPushEndpointHashMisMatch() { - - ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusOK) - w.Write([]byte("unknown_hash")) - })) - - defer ts.Close() - - req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscriptions/push-sub-v1:verifyPushEndpoint", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "error": { - "code": 401, - "message": "Endpoint verification failed.Wrong verification hash", - "status": "UNAUTHORIZED" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - - // add a temporary subscription - q1 := stores.QSub{ - Name: "push-sub-v1", - ProjectUUID: "argo_uuid", - PushEndpoint: ts.URL, - VerificationHash: "vhash-1", - Verified: false, - } - - str.SubList = append(str.SubList, q1) - str.SubsACL["push-sub-v1"] = stores.QAcl{} - - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - w := httptest.NewRecorder() - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:verifyPushEndpoint", WrapMockAuthConfig(SubVerifyPushEndpoint, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - suite.Equal(401, w.Code) - suite.Equal(expResp, w.Body.String()) - // check to see that the push worker user has NOT been added to the subscription's acl - a1, _ := str.QueryACL("argo_uuid", "subscriptions", "push-sub-v1") - suite.Equal(0, len(a1.ACL)) -} - -func (suite *HandlerTestSuite) TestVerifyPushEndpointUnknownResponse() { - - ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusInternalServerError) - w.Write([]byte("unknown_hash")) - })) - - defer ts.Close() - - req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscriptions/push-sub-v1:verifyPushEndpoint", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "error": { - "code": 401, - "message": "Endpoint verification failed.Wrong response status code", - "status": "UNAUTHORIZED" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - - // add a temporary subscription - q1 := stores.QSub{ - Name: "push-sub-v1", - ProjectUUID: "argo_uuid", - PushEndpoint: ts.URL, - VerificationHash: "vhash-1", - Verified: false, - } - - str.SubList = append(str.SubList, q1) - str.SubsACL["push-sub-v1"] = stores.QAcl{} - - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - w := httptest.NewRecorder() - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:verifyPushEndpoint", WrapMockAuthConfig(SubVerifyPushEndpoint, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - suite.Equal(401, w.Code) - suite.Equal(expResp, w.Body.String()) - // check to see that the push worker user has NOT been added to the subscription's acl - a1, _ := str.QueryACL("argo_uuid", "subscriptions", "push-sub-v1") - suite.Equal(0, len(a1.ACL)) -} - -// TestVerifyPushEndpointPushServerError tests the case where the endpoint is verified, the push worker is moved to -// the sub's acl despite the push server being unavailable for now -func (suite *HandlerTestSuite) TestVerifyPushEndpointPushServerError() { - - ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusOK) - w.Write([]byte("vhash-1")) - })) - - defer ts.Close() - - req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscriptions/errorSub:verifyPushEndpoint", nil) - if err != nil { - log.Fatal(err) - } - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - - // add a temporary subscription - q1 := stores.QSub{ - Name: "errorSub", - ProjectUUID: "argo_uuid", - PushEndpoint: ts.URL, - VerificationHash: "vhash-1", - Verified: false, - } - - str.SubList = append(str.SubList, q1) - str.SubsACL["errorSub"] = stores.QAcl{} - - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - w := httptest.NewRecorder() - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:verifyPushEndpoint", WrapMockAuthConfig(SubVerifyPushEndpoint, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal("", w.Body.String()) - // check to see that the push worker user has been added to the subscription's acl - a1, _ := str.QueryACL("argo_uuid", "subscriptions", "errorSub") - suite.Equal([]string{"uuid7"}, a1.ACL) -} - -func (suite *HandlerTestSuite) TestVerifyPushEndpointAlreadyVerified() { - - req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscriptions/push-sub-v1:verifyPushEndpoint", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "error": { - "code": 409, - "message": "Push endpoint is already verified", - "status": "CONFLICT" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - - // add a temporary subscription - q1 := stores.QSub{ - Name: "push-sub-v1", - ProjectUUID: "argo_uuid", - PushEndpoint: "https://example.com/receive_here", - VerificationHash: "vhash-1", - Verified: true, - } - - str.SubList = append(str.SubList, q1) - - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - w := httptest.NewRecorder() - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:verifyPushEndpoint", WrapMockAuthConfig(SubVerifyPushEndpoint, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - suite.Equal(409, w.Code) - suite.Equal(expResp, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestVerifyPushEndpointNotPushEnabled() { - - req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscriptions/push-sub-v1:verifyPushEndpoint", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "error": { - "code": 409, - "message": "Subscription is not in push mode", - "status": "CONFLICT" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - - // add a temporary subscription - q1 := stores.QSub{ - Name: "push-sub-v1", - ProjectUUID: "argo_uuid", - } - - str.SubList = append(str.SubList, q1) - - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - w := httptest.NewRecorder() - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:verifyPushEndpoint", WrapMockAuthConfig(SubVerifyPushEndpoint, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - suite.Equal(409, w.Code) - suite.Equal(expResp, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestSubCreatePushConfig() { - - postJSON := `{ - "topic":"projects/ARGO/topics/topic1", - "pushConfig": { - "pushEndpoint": "https://www.example.com", - "retryPolicy": {} - } -}` - - req, err := http.NewRequest("PUT", "http://localhost:8080/v1/projects/ARGO/subscriptions/subNew", bytes.NewBuffer([]byte(postJSON))) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "name": "/projects/ARGO/subscriptions/subNew", - "topic": "/projects/ARGO/topics/topic1", - "pushConfig": { - "pushEndpoint": "https://www.example.com", - "maxMessages": 1, - "retryPolicy": { - "type": "linear", - "period": 3000 - }, - "verification_hash": "{{VHASH}}", - "verified": false - }, - "ackDeadlineSeconds": 10 -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - w := httptest.NewRecorder() - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}", WrapMockAuthConfig(SubCreate, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - sub, _ := str.QueryOneSub("argo_uuid", "subNew") - expResp = strings.Replace(expResp, "{{VHASH}}", sub.VerificationHash, 1) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestSubCreatePushConfigSlowStart() { - - postJSON := `{ - "topic":"projects/ARGO/topics/topic1", - "pushConfig": { - "pushEndpoint": "https://www.example.com", - "retryPolicy": { - "type": "slowstart" - } - } -}` - - req, err := http.NewRequest("PUT", "http://localhost:8080/v1/projects/ARGO/subscriptions/subNew", bytes.NewBuffer([]byte(postJSON))) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "name": "/projects/ARGO/subscriptions/subNew", - "topic": "/projects/ARGO/topics/topic1", - "pushConfig": { - "pushEndpoint": "https://www.example.com", - "maxMessages": 1, - "retryPolicy": { - "type": "slowstart" - }, - "verification_hash": "{{VHASH}}", - "verified": false - }, - "ackDeadlineSeconds": 10 -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - w := httptest.NewRecorder() - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}", WrapMockAuthConfig(SubCreate, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - sub, _ := str.QueryOneSub("argo_uuid", "subNew") - expResp = strings.Replace(expResp, "{{VHASH}}", sub.VerificationHash, 1) - suite.Equal(0, sub.RetPeriod) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestSubCreatePushConfigMissingPushWorker() { - - postJSON := `{ - "topic":"projects/ARGO/topics/topic1", - "pushConfig": { - "pushEndpoint": "https://www.example.com", - "retryPolicy": {} - } -}` - - req, err := http.NewRequest("PUT", "http://localhost:8080/v1/projects/ARGO/subscriptions/subNew", bytes.NewBuffer([]byte(postJSON))) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "error": { - "code": 500, - "message": "Push functionality is currently unavailable", - "status": "INTERNAL_SERVER_ERROR" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - cfgKafka.PushWorkerToken = "missing" - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - w := httptest.NewRecorder() - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}", WrapMockAuthConfig(SubCreate, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - // subscription should not have been inserted to the store if it has push configuration - // but we can't retrieve the push worker - _, errSub := str.QueryOneSub("argo_uuid", "subNew") - suite.Equal(500, w.Code) - suite.Equal(expResp, w.Body.String()) - suite.Equal("empty", errSub.Error()) -} - -func (suite *HandlerTestSuite) TestSubCreatePushConfigPushDisabled() { - - postJSON := `{ - "topic":"projects/ARGO/topics/topic1", - "pushConfig": { - "pushEndpoint": "https://www.example.com", - "retryPolicy": {} - } -}` - - req, err := http.NewRequest("PUT", "http://localhost:8080/v1/projects/ARGO/subscriptions/subNew", bytes.NewBuffer([]byte(postJSON))) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "error": { - "code": 409, - "message": "Push functionality is currently disabled", - "status": "CONFLICT" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - cfgKafka.PushEnabled = false - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - w := httptest.NewRecorder() - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}", WrapMockAuthConfig(SubCreate, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - // subscription should not have been inserted to the store if it has push configuration - // but push enables is false - _, errSub := str.QueryOneSub("argo_uuid", "subNew") - suite.Equal(409, w.Code) - suite.Equal(expResp, w.Body.String()) - suite.Equal("empty", errSub.Error()) -} - -func (suite *HandlerTestSuite) TestSubCreateInvalidRetPol() { - - postJSON := `{ - "topic":"projects/ARGO/topics/topic1", - "pushConfig": { - "pushEndpoint": "https://www.example.com", - "retryPolicy": { - "type": "unknown" - } - } -}` - - req, err := http.NewRequest("PUT", "http://localhost:8080/v1/projects/ARGO/subscriptions/subNew", bytes.NewBuffer([]byte(postJSON))) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "error": { - "code": 400, - "message": "Retry policy can only be of 'linear' or 'slowstart' type", - "status": "INVALID_ARGUMENT" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - w := httptest.NewRecorder() - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}", WrapMockAuthConfig(SubCreate, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - suite.Equal(400, w.Code) - suite.Equal(expResp, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestSubCreatePushConfigError() { - - postJSON := `{ - "topic":"projects/ARGO/topics/topic1", - "pushConfig": { - "pushEndpoint": "http://www.example.com", - "retryPolicy": {} - } -}` - - req, err := http.NewRequest("PUT", "http://localhost:8080/v1/projects/ARGO/subscriptions/subNew", bytes.NewBuffer([]byte(postJSON))) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "error": { - "code": 400, - "message": "Push endpoint should be addressed by a valid https url", - "status": "INVALID_ARGUMENT" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - w := httptest.NewRecorder() - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}", WrapMockAuthConfig(SubCreate, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(400, w.Code) - suite.Equal(expResp, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestSubCreate() { - - postJSON := `{ - "topic":"projects/ARGO/topics/topic1" -}` - - req, err := http.NewRequest("PUT", "http://localhost:8080/v1/projects/ARGO/subscriptions/subNew", bytes.NewBuffer([]byte(postJSON))) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "name": "/projects/ARGO/subscriptions/subNew", - "topic": "/projects/ARGO/topics/topic1", - "pushConfig": { - "pushEndpoint": "", - "maxMessages": 0, - "retryPolicy": {}, - "verification_hash": "", - "verified": false - }, - "ackDeadlineSeconds": 10 -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - w := httptest.NewRecorder() - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}", WrapMockAuthConfig(SubCreate, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestSubCreateExists() { - - postJSON := `{ - "topic":"projects/ARGO/topics/topic1" -}` - - req, err := http.NewRequest("PUT", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub1", bytes.NewBuffer([]byte(postJSON))) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "error": { - "code": 409, - "message": "Subscription already exists", - "status": "ALREADY_EXISTS" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}", WrapMockAuthConfig(SubCreate, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(409, w.Code) - suite.Equal(expResp, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestSubCreateErrorTopic() { - - postJSON := `{ - "topic":"projects/ARGO/topics/topicFoo" -}` - - req, err := http.NewRequest("PUT", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub1", bytes.NewBuffer([]byte(postJSON))) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "error": { - "code": 404, - "message": "Topic doesn't exist", - "status": "NOT_FOUND" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - mgr := oldPush.Manager{} - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}", WrapMockAuthConfig(SubCreate, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(404, w.Code) - suite.Equal(expResp, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestSubDelete() { - - req, err := http.NewRequest("DELETE", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub1", nil) - if err != nil { - log.Fatal(err) - } - - expResp := "" - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - mgr := oldPush.Manager{} - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}", WrapMockAuthConfig(SubDelete, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestSubWithPushConfigDelete() { - - req, err := http.NewRequest("DELETE", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub4", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{"message":"Subscription /projects/ARGO/subscriptions/sub4 deactivated"}` - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - mgr := oldPush.Manager{} - router := mux.NewRouter().StrictSlash(true) - pc := new(push.MockClient) - w := httptest.NewRecorder() - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}", WrapMockAuthConfig(SubDelete, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestSubWithPushConfigDeletePushServerError() { - - req, err := http.NewRequest("DELETE", "http://localhost:8080/v1/projects/ARGO/subscriptions/errorSub", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{"message":"Subscription /projects/ARGO/subscriptions/errorSub is not active"}` - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - str.SubList = append(str.SubList, stores.QSub{ - Name: "errorSub", - ProjectUUID: "argo_uuid", - PushEndpoint: "example.com", - // sub needs to be verified in order to perform the call to the push server - Verified: true, - }) - mgr := oldPush.Manager{} - router := mux.NewRouter().StrictSlash(true) - pc := new(push.MockClient) - w := httptest.NewRecorder() - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}", WrapMockAuthConfig(SubDelete, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestSubListOne() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub1", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "name": "/projects/ARGO/subscriptions/sub1", - "topic": "/projects/ARGO/topics/topic1", - "pushConfig": { - "pushEndpoint": "", - "maxMessages": 0, - "retryPolicy": {}, - "verification_hash": "", - "verified": false - }, - "ackDeadlineSeconds": 10 -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}", WrapMockAuthConfig(SubListOne, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestSubListAll() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/subscriptions", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "subscriptions": [ - { - "name": "/projects/ARGO/subscriptions/sub4", - "topic": "/projects/ARGO/topics/topic4", - "pushConfig": { - "pushEndpoint": "endpoint.foo", - "maxMessages": 1, - "retryPolicy": { - "type": "linear", - "period": 300 - }, - "verification_hash": "push-id-1", - "verified": true - }, - "ackDeadlineSeconds": 10 - }, - { - "name": "/projects/ARGO/subscriptions/sub3", - "topic": "/projects/ARGO/topics/topic3", - "pushConfig": { - "pushEndpoint": "", - "maxMessages": 0, - "retryPolicy": {}, - "verification_hash": "", - "verified": false - }, - "ackDeadlineSeconds": 10 - }, - { - "name": "/projects/ARGO/subscriptions/sub2", - "topic": "/projects/ARGO/topics/topic2", - "pushConfig": { - "pushEndpoint": "", - "maxMessages": 0, - "retryPolicy": {}, - "verification_hash": "", - "verified": false - }, - "ackDeadlineSeconds": 10 - }, - { - "name": "/projects/ARGO/subscriptions/sub1", - "topic": "/projects/ARGO/topics/topic1", - "pushConfig": { - "pushEndpoint": "", - "maxMessages": 0, - "retryPolicy": {}, - "verification_hash": "", - "verified": false - }, - "ackDeadlineSeconds": 10 - } - ], - "nextPageToken": "", - "totalSize": 4 -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - w := httptest.NewRecorder() - router.HandleFunc("/v1/projects/{project}/subscriptions", WrapMockAuthConfig(SubListAll, cfgKafka, &brk, str, &mgr, nil, "project_admin")) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestSubListAllFirstPage() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/subscriptions?pageSize=2", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "subscriptions": [ - { - "name": "/projects/ARGO/subscriptions/sub4", - "topic": "/projects/ARGO/topics/topic4", - "pushConfig": { - "pushEndpoint": "endpoint.foo", - "maxMessages": 1, - "retryPolicy": { - "type": "linear", - "period": 300 - }, - "verification_hash": "push-id-1", - "verified": true - }, - "ackDeadlineSeconds": 10 - }, - { - "name": "/projects/ARGO/subscriptions/sub3", - "topic": "/projects/ARGO/topics/topic3", - "pushConfig": { - "pushEndpoint": "", - "maxMessages": 0, - "retryPolicy": {}, - "verification_hash": "", - "verified": false - }, - "ackDeadlineSeconds": 10 - } - ], - "nextPageToken": "MQ==", - "totalSize": 4 -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - w := httptest.NewRecorder() - router.HandleFunc("/v1/projects/{project}/subscriptions", WrapMockAuthConfig(SubListAll, cfgKafka, &brk, str, &mgr, nil, "project_admin")) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestSubListAllNextPage() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/subscriptions?pageSize=2&pageToken=MQ==", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "subscriptions": [ - { - "name": "/projects/ARGO/subscriptions/sub2", - "topic": "/projects/ARGO/topics/topic2", - "pushConfig": { - "pushEndpoint": "", - "maxMessages": 0, - "retryPolicy": {}, - "verification_hash": "", - "verified": false - }, - "ackDeadlineSeconds": 10 - }, - { - "name": "/projects/ARGO/subscriptions/sub1", - "topic": "/projects/ARGO/topics/topic1", - "pushConfig": { - "pushEndpoint": "", - "maxMessages": 0, - "retryPolicy": {}, - "verification_hash": "", - "verified": false - }, - "ackDeadlineSeconds": 10 - } - ], - "nextPageToken": "", - "totalSize": 4 -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - w := httptest.NewRecorder() - router.HandleFunc("/v1/projects/{project}/subscriptions", WrapMockAuthConfig(SubListAll, cfgKafka, &brk, str, &mgr, nil, "project_admin")) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestSubListAllEmpty() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/subscriptions", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "subscriptions": [], - "nextPageToken": "", - "totalSize": 0 -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - // empty the store - str.SubList = []stores.QSub{} - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - w := httptest.NewRecorder() - router.HandleFunc("/v1/projects/{project}/subscriptions", WrapMockAuthConfig(SubListAll, cfgKafka, &brk, str, &mgr, nil, "project_admin")) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestSubListAllConsumer() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/subscriptions", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "subscriptions": [ - { - "name": "/projects/ARGO/subscriptions/sub4", - "topic": "/projects/ARGO/topics/topic4", - "pushConfig": { - "pushEndpoint": "endpoint.foo", - "maxMessages": 1, - "retryPolicy": { - "type": "linear", - "period": 300 - }, - "verification_hash": "push-id-1", - "verified": true - }, - "ackDeadlineSeconds": 10 - }, - { - "name": "/projects/ARGO/subscriptions/sub3", - "topic": "/projects/ARGO/topics/topic3", - "pushConfig": { - "pushEndpoint": "", - "maxMessages": 0, - "retryPolicy": {}, - "verification_hash": "", - "verified": false - }, - "ackDeadlineSeconds": 10 - }, - { - "name": "/projects/ARGO/subscriptions/sub2", - "topic": "/projects/ARGO/topics/topic2", - "pushConfig": { - "pushEndpoint": "", - "maxMessages": 0, - "retryPolicy": {}, - "verification_hash": "", - "verified": false - }, - "ackDeadlineSeconds": 10 - } - ], - "nextPageToken": "", - "totalSize": 3 -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - w := httptest.NewRecorder() - router.HandleFunc("/v1/projects/{project}/subscriptions", WrapMockAuthConfig(SubListAll, cfgKafka, &brk, str, &mgr, nil, "consumer")) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestSubListAllConsumerWithPagination() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/subscriptions?pageSize=2", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "subscriptions": [ - { - "name": "/projects/ARGO/subscriptions/sub4", - "topic": "/projects/ARGO/topics/topic4", - "pushConfig": { - "pushEndpoint": "endpoint.foo", - "maxMessages": 1, - "retryPolicy": { - "type": "linear", - "period": 300 - }, - "verification_hash": "push-id-1", - "verified": true - }, - "ackDeadlineSeconds": 10 - }, - { - "name": "/projects/ARGO/subscriptions/sub3", - "topic": "/projects/ARGO/topics/topic3", - "pushConfig": { - "pushEndpoint": "", - "maxMessages": 0, - "retryPolicy": {}, - "verification_hash": "", - "verified": false - }, - "ackDeadlineSeconds": 10 - } - ], - "nextPageToken": "MQ==", - "totalSize": 3 -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - w := httptest.NewRecorder() - router.HandleFunc("/v1/projects/{project}/subscriptions", WrapMockAuthConfig(SubListAll, cfgKafka, &brk, str, &mgr, nil, "consumer")) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestSubListAllInvalidPageSize() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/subscriptions?pageSize=invalid", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "error": { - "code": 400, - "message": "Invalid page size", - "status": "INVALID_ARGUMENT" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/subscriptions", WrapMockAuthConfig(SubListAll, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(400, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestSubListAllInvalidPageToken() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/subscriptions?pageToken=invalid", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "error": { - "code": 400, - "message": "Invalid page token", - "status": "INVALID_ARGUMENT" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/subscriptions", WrapMockAuthConfig(SubListAll, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(400, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestTopicDelete() { - - req, err := http.NewRequest("DELETE", "http://localhost:8080/v1/projects/ARGO/topics/topic1", nil) - - if err != nil { - log.Fatal(err) - } - - expResp := "" - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - brk.Topics = map[string]string{} - brk.Topics["argo_uuid.topic1"] = "" - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/topics/{topic}", WrapMockAuthConfig(TopicDelete, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) - // make sure the topic got deleted - suite.Equal(0, len(brk.Topics)) -} -func (suite *HandlerTestSuite) TestSubTimeToOffset() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub1?time=2019-06-10T9:38:30.500Z", nil) - - if err != nil { - log.Fatal(err) - } - - expResp := `{"offset":93204}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - brk.TopicTimeIndices = map[string][]brokers.TimeToOffset{} - - brk.TopicTimeIndices["argo_uuid.topic1"] = []brokers.TimeToOffset{ - {Timestamp: time.Date(2019, 6, 11, 0, 0, 0, 0, time.UTC), Offset: 93204}, - {Timestamp: time.Date(2019, 6, 12, 0, 0, 0, 0, time.UTC), Offset: 94000}, - } - - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}", WrapMockAuthConfig(SubTimeToOffset, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestSubTimeToOffsetOutOfBounds() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub1?time=2020-06-10T9:38:30.500Z", nil) - - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "error": { - "code": 409, - "message": "Timestamp is out of bounds for the subscription's topic/partition", - "status": "CONFLICT" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - brk.TopicTimeIndices = map[string][]brokers.TimeToOffset{} - brk.TopicTimeIndices["argo_uuid.topic1"] = []brokers.TimeToOffset{ - {Timestamp: time.Date(2019, 6, 11, 0, 0, 0, 0, time.UTC), Offset: 93204}, - {Timestamp: time.Date(2019, 6, 12, 0, 0, 0, 0, time.UTC), Offset: 94000}, - } - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}", WrapMockAuthConfig(SubTimeToOffset, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(409, w.Code) - suite.Equal(expResp, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestSubDeleteNotFound() { - - req, err := http.NewRequest("DELETE", "http://localhost:8080/v1/projects/ARGO/subscriptions/subFoo", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "error": { - "code": 404, - "message": "Subscription doesn't exist", - "status": "NOT_FOUND" - } -}` - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}", WrapMockAuthConfig(SubDelete, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(404, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestTopicDeleteNotfound() { - - req, err := http.NewRequest("DELETE", "http://localhost:8080/v1/projects/ARGO/topics/topicFoo", nil) - - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "error": { - "code": 404, - "message": "Topic doesn't exist", - "status": "NOT_FOUND" - } -}` - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/topics/{topic}", WrapMockAuthConfig(TopicDelete, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(404, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestTopicCreate() { - - req, err := http.NewRequest("PUT", "http://localhost:8080/v1/projects/ARGO/topics/topicNew", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "name": "/projects/ARGO/topics/topicNew" -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - - router.HandleFunc("/v1/projects/{project}/topics/{topic}", WrapMockAuthConfig(TopicCreate, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestTopicCreateExists() { - - req, err := http.NewRequest("PUT", "http://localhost:8080/v1/projects/ARGO/topics/topic1", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "error": { - "code": 409, - "message": "Topic already exists", - "status": "ALREADY_EXISTS" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/topics/{topic}", WrapMockAuthConfig(TopicCreate, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(409, w.Code) - suite.Equal(expResp, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestTopicListOne() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/topics/topic1", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "name": "/projects/ARGO/topics/topic1" -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/topics/{topic}", WrapMockAuthConfig(TopicListOne, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestTopicListSubscriptions() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/topics/topic1/subscriptions", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{"subscriptions":["/projects/ARGO/subscriptions/sub1"]}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/topics/{topic}/subscriptions", WrapMockAuthConfig(ListSubsByTopic, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestTopicListSubscriptionsEmpty() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/topics/topic1/subscriptions", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{"subscriptions":[]}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - str.SubList = nil - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/topics/{topic}/subscriptions", WrapMockAuthConfig(ListSubsByTopic, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestProjectMessageCount() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/metrics/daily-message-average?start_date=2018-10-01&end_date=2018-10-04", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "projects": [ - { - "project": "ARGO", - "message_count": 30, - "average_daily_messages": 10 - } - ], - "total_message_count": 30, - "average_daily_messages": 10 -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/metrics/daily-message-average", WrapMockAuthConfig(DailyMessageAverage, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestProjectMessageCountErrors() { - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects-message-count", WrapMockAuthConfig(DailyMessageAverage, cfgKafka, &brk, str, &mgr, nil)) - - // wrong start date - expResp1 := `{ - "error": { - "code": 400, - "message": "Start date is not in valid format", - "status": "INVALID_ARGUMENT" - } -}` - req1, err := http.NewRequest("GET", "http://localhost:8080/v1/projects-message-count?start_date=ffff", nil) - if err != nil { - log.Fatal(err) - } - router.ServeHTTP(w, req1) - suite.Equal(400, w.Code) - suite.Equal(expResp1, w.Body.String()) - w.Body.Reset() - - // wrong end date - expResp2 := `{ - "error": { - "code": 400, - "message": "End date is not in valid format", - "status": "INVALID_ARGUMENT" - } -}` - req2, err := http.NewRequest("GET", "http://localhost:8080/v1/projects-message-count?end_date=ffff", nil) - if err != nil { - log.Fatal(err) - } - router.ServeHTTP(w, req2) - suite.Equal(400, w.Code) - suite.Equal(expResp2, w.Body.String()) - w.Body.Reset() - - // one of the projects doesn't exist end date - expResp3 := `{ - "error": { - "code": 404, - "message": "Project ffff doesn't exist", - "status": "NOT_FOUND" - } -}` - req3, err := http.NewRequest("GET", "http://localhost:8080/v1/projects-message-count?projects=ARGO,ffff", nil) - if err != nil { - log.Fatal(err) - } - router.ServeHTTP(w, req3) - suite.Equal(400, w.Code) - suite.Equal(expResp3, w.Body.String()) - w.Body.Reset() - - // start date is off - expResp4 := `{ - "error": { - "code": 400, - "message": "Start date cannot be after the end date", - "status": "INVALID_ARGUMENT" - } -}` - req4, err := http.NewRequest("GET", "http://localhost:8080/v1/projects-message-count?start_date=2019-04-04&end_date=2018-01-01", nil) - if err != nil { - log.Fatal(err) - } - router.ServeHTTP(w, req4) - suite.Equal(400, w.Code) - suite.Equal(expResp4, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestSubMetrics() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub1:metrics", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "metrics": [ - { - "metric": "subscription.number_of_messages", - "metric_type": "counter", - "value_type": "int64", - "resource_type": "subscription", - "resource_name": "sub1", - "timeseries": [ - { - "timestamp": "{{TS1}}", - "value": 0 - } - ], - "description": "Counter that displays the number of messages consumed from the specific subscription" - }, - { - "metric": "subscription.number_of_bytes", - "metric_type": "counter", - "value_type": "int64", - "resource_type": "subscription", - "resource_name": "sub1", - "timeseries": [ - { - "timestamp": "{{TS2}}", - "value": 0 - } - ], - "description": "Counter that displays the total size of data (in bytes) consumed from the specific subscription" - }, - { - "metric": "subscription.consumption_rate", - "metric_type": "rate", - "value_type": "float64", - "resource_type": "subscription", - "resource_name": "sub1", - "timeseries": [ - { - "timestamp": "2019-05-06T00:00:00Z", - "value": 10 - } - ], - "description": "A rate that displays how many messages were consumed per second between the last two consume events" - } - ] -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:metrics", WrapMockAuthConfig(SubMetrics, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - - metricOut, _ := metrics.GetMetricsFromJSON([]byte(w.Body.String())) - ts1 := metricOut.Metrics[0].Timeseries[0].Timestamp - ts2 := metricOut.Metrics[1].Timeseries[0].Timestamp - expResp = strings.Replace(expResp, "{{TS1}}", ts1, -1) - expResp = strings.Replace(expResp, "{{TS2}}", ts2, -1) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestSubMetricsNotFound() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/subscriptions/unknown_sub:metrics", nil) - if err != nil { - log.Fatal(err) - } - - expRes := `{ - "error": { - "code": 404, - "message": "Subscription doesn't exist", - "status": "NOT_FOUND" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - // temporarily disable auth for this test case - cfgKafka.ResAuth = false - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:metrics", WrapMockAuthConfig(SubMetrics, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(404, w.Code) - suite.Equal(expRes, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestProjectMetrics() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO:metrics", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "metrics": [ - { - "metric": "project.number_of_topics", - "metric_type": "counter", - "value_type": "int64", - "resource_type": "project", - "resource_name": "ARGO", - "timeseries": [ - { - "timestamp": "{{TS1}}", - "value": 4 - } - ], - "description": "Counter that displays the number of topics belonging to the specific project" - }, - { - "metric": "project.number_of_subscriptions", - "metric_type": "counter", - "value_type": "int64", - "resource_type": "project", - "resource_name": "ARGO", - "timeseries": [ - { - "timestamp": "{{TS2}}", - "value": 4 - } - ], - "description": "Counter that displays the number of subscriptions belonging to the specific project" - }, - { - "metric": "project.user.number_of_topics", - "metric_type": "counter", - "value_type": "int64", - "resource_type": "project.user", - "resource_name": "ARGO.UserA", - "timeseries": [ - { - "timestamp": "{{TS3}}", - "value": 2 - } - ], - "description": "Counter that displays the number of topics that a user has access to the specific project" - }, - { - "metric": "project.user.number_of_topics", - "metric_type": "counter", - "value_type": "int64", - "resource_type": "project.user", - "resource_name": "ARGO.UserB", - "timeseries": [ - { - "timestamp": "{{TS4}}", - "value": 2 - } - ], - "description": "Counter that displays the number of topics that a user has access to the specific project" - }, - { - "metric": "project.user.number_of_topics", - "metric_type": "counter", - "value_type": "int64", - "resource_type": "project.user", - "resource_name": "ARGO.UserX", - "timeseries": [ - { - "timestamp": "{{TS5}}", - "value": 1 - } - ], - "description": "Counter that displays the number of topics that a user has access to the specific project" - }, - { - "metric": "project.user.number_of_topics", - "metric_type": "counter", - "value_type": "int64", - "resource_type": "project.user", - "resource_name": "ARGO.UserZ", - "timeseries": [ - { - "timestamp": "{{TS6}}", - "value": 1 - } - ], - "description": "Counter that displays the number of topics that a user has access to the specific project" - }, - { - "metric": "project.user.number_of_subscriptions", - "metric_type": "counter", - "value_type": "int64", - "resource_type": "project.user", - "resource_name": "ARGO.UserA", - "timeseries": [ - { - "timestamp": "{{TS7}}", - "value": 3 - } - ], - "description": "Counter that displays the number of subscriptions that a user has access to the specific project" - }, - { - "metric": "project.user.number_of_subscriptions", - "metric_type": "counter", - "value_type": "int64", - "resource_type": "project.user", - "resource_name": "ARGO.UserB", - "timeseries": [ - { - "timestamp": "{{TS8}}", - "value": 3 - } - ], - "description": "Counter that displays the number of subscriptions that a user has access to the specific project" - }, - { - "metric": "project.user.number_of_subscriptions", - "metric_type": "counter", - "value_type": "int64", - "resource_type": "project.user", - "resource_name": "ARGO.UserX", - "timeseries": [ - { - "timestamp": "{{TS9}}", - "value": 1 - } - ], - "description": "Counter that displays the number of subscriptions that a user has access to the specific project" - }, - { - "metric": "project.user.number_of_subscriptions", - "metric_type": "counter", - "value_type": "int64", - "resource_type": "project.user", - "resource_name": "ARGO.UserZ", - "timeseries": [ - { - "timestamp": "{{TS10}}", - "value": 2 - } - ], - "description": "Counter that displays the number of subscriptions that a user has access to the specific project" - }, - { - "metric": "project.number_of_daily_messages", - "metric_type": "counter", - "value_type": "int64", - "resource_type": "project", - "resource_name": "ARGO", - "timeseries": [ - { - "timestamp": "{{TS11}}", - "value": 30 - }, - { - "timestamp": "{{TS12}}", - "value": 110 - } - ], - "description": "A collection of counters that represents the total number of messages published each day to all of the project's topics" - } - ] -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}:metrics", WrapMockAuthConfig(ProjectMetrics, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - metricOut, _ := metrics.GetMetricsFromJSON([]byte(w.Body.String())) - ts1 := metricOut.Metrics[0].Timeseries[0].Timestamp - ts2 := metricOut.Metrics[1].Timeseries[0].Timestamp - ts3 := metricOut.Metrics[2].Timeseries[0].Timestamp - ts4 := metricOut.Metrics[3].Timeseries[0].Timestamp - ts5 := metricOut.Metrics[4].Timeseries[0].Timestamp - ts6 := metricOut.Metrics[5].Timeseries[0].Timestamp - ts7 := metricOut.Metrics[6].Timeseries[0].Timestamp - ts8 := metricOut.Metrics[7].Timeseries[0].Timestamp - ts9 := metricOut.Metrics[8].Timeseries[0].Timestamp - ts10 := metricOut.Metrics[9].Timeseries[0].Timestamp - ts11 := metricOut.Metrics[10].Timeseries[0].Timestamp - ts12 := metricOut.Metrics[10].Timeseries[1].Timestamp - expResp = strings.Replace(expResp, "{{TS1}}", ts1, -1) - expResp = strings.Replace(expResp, "{{TS2}}", ts2, -1) - expResp = strings.Replace(expResp, "{{TS3}}", ts3, -1) - expResp = strings.Replace(expResp, "{{TS4}}", ts4, -1) - expResp = strings.Replace(expResp, "{{TS5}}", ts5, -1) - expResp = strings.Replace(expResp, "{{TS6}}", ts6, -1) - expResp = strings.Replace(expResp, "{{TS7}}", ts7, -1) - expResp = strings.Replace(expResp, "{{TS8}}", ts8, -1) - expResp = strings.Replace(expResp, "{{TS9}}", ts9, -1) - expResp = strings.Replace(expResp, "{{TS10}}", ts10, -1) - expResp = strings.Replace(expResp, "{{TS11}}", ts11, -1) - expResp = strings.Replace(expResp, "{{TS12}}", ts12, -1) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestOpMetrics() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/metrics", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "metrics": [ - { - "metric": "ams_node.cpu_usage", - "metric_type": "percentage", - "value_type": "float64", - "resource_type": "ams_node", - "resource_name": "{{HOST}}", - "timeseries": [ - { - "timestamp": "{{TS1}}", - "value": {{VAL1}} - } - ], - "description": "Percentage value that displays the CPU usage of ams service in the specific node" - }, - { - "metric": "ams_node.memory_usage", - "metric_type": "percentage", - "value_type": "float64", - "resource_type": "ams_node", - "resource_name": "{{HOST}}", - "timeseries": [ - { - "timestamp": "{{TS1}}", - "value": {{VAL2}} - } - ], - "description": "Percentage value that displays the Memory usage of ams service in the specific node" - } - ] -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/metrics", WrapMockAuthConfig(OpMetrics, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - metricOut, _ := metrics.GetMetricsFromJSON([]byte(w.Body.String())) - ts1 := metricOut.Metrics[0].Timeseries[0].Timestamp - val1 := metricOut.Metrics[0].Timeseries[0].Value.(float64) - ts2 := metricOut.Metrics[1].Timeseries[0].Timestamp - val2 := metricOut.Metrics[1].Timeseries[0].Value.(float64) - host := metricOut.Metrics[0].Resource - expResp = strings.Replace(expResp, "{{TS1}}", ts1, -1) - expResp = strings.Replace(expResp, "{{TS2}}", ts2, -1) - expResp = strings.Replace(expResp, "{{VAL1}}", strconv.FormatFloat(val1, 'g', 1, 64), -1) - expResp = strings.Replace(expResp, "{{VAL2}}", strconv.FormatFloat(val2, 'g', 1, 64), -1) - expResp = strings.Replace(expResp, "{{HOST}}", host, -1) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestTopicMetrics() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/topics/topic1:metrics", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "metrics": [ - { - "metric": "topic.number_of_subscriptions", - "metric_type": "counter", - "value_type": "int64", - "resource_type": "topic", - "resource_name": "topic1", - "timeseries": [ - { - "timestamp": "{{TIMESTAMP1}}", - "value": 1 - } - ], - "description": "Counter that displays the number of subscriptions belonging to a specific topic" - }, - { - "metric": "topic.number_of_messages", - "metric_type": "counter", - "value_type": "int64", - "resource_type": "topic", - "resource_name": "topic1", - "timeseries": [ - { - "timestamp": "{{TIMESTAMP2}}", - "value": 0 - } - ], - "description": "Counter that displays the number of messages published to the specific topic" - }, - { - "metric": "topic.number_of_bytes", - "metric_type": "counter", - "value_type": "int64", - "resource_type": "topic", - "resource_name": "topic1", - "timeseries": [ - { - "timestamp": "{{TIMESTAMP3}}", - "value": 0 - } - ], - "description": "Counter that displays the total size of data (in bytes) published to the specific topic" - }, - { - "metric": "topic.number_of_daily_messages", - "metric_type": "counter", - "value_type": "int64", - "resource_type": "topic", - "resource_name": "topic1", - "timeseries": [ - { - "timestamp": "{{TIMESTAMP4}}", - "value": 30 - }, - { - "timestamp": "{{TIMESTAMP5}}", - "value": 40 - } - ], - "description": "A collection of counters that represents the total number of messages published each day to a specific topic" - }, - { - "metric": "topic.publishing_rate", - "metric_type": "rate", - "value_type": "float64", - "resource_type": "topic", - "resource_name": "topic1", - "timeseries": [ - { - "timestamp": "2019-05-06T00:00:00Z", - "value": 10 - } - ], - "description": "A rate that displays how many messages were published per second between the last two publish events" - } - ] -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/topics/{topic}:metrics", WrapMockAuthConfig(TopicMetrics, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - metricOut, _ := metrics.GetMetricsFromJSON([]byte(w.Body.String())) - ts1 := metricOut.Metrics[0].Timeseries[0].Timestamp - ts2 := metricOut.Metrics[1].Timeseries[0].Timestamp - ts3 := metricOut.Metrics[2].Timeseries[0].Timestamp - ts4 := metricOut.Metrics[3].Timeseries[0].Timestamp - ts5 := metricOut.Metrics[3].Timeseries[1].Timestamp - expResp = strings.Replace(expResp, "{{TIMESTAMP1}}", ts1, -1) - expResp = strings.Replace(expResp, "{{TIMESTAMP2}}", ts2, -1) - expResp = strings.Replace(expResp, "{{TIMESTAMP3}}", ts3, -1) - expResp = strings.Replace(expResp, "{{TIMESTAMP4}}", ts4, -1) - expResp = strings.Replace(expResp, "{{TIMESTAMP5}}", ts5, -1) - - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestTopicMetricsNotFound() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/topics/topic_not_found:metrics", nil) - if err != nil { - log.Fatal(err) - } - - expRes := `{ - "error": { - "code": 404, - "message": "Topic doesn't exist", - "status": "NOT_FOUND" - } -}` - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - // deactivate auth for this specific test case - cfgKafka.ResAuth = false - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/topics/{topic}:metrics", WrapMockAuthConfig(TopicMetrics, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(404, w.Code) - suite.Equal(expRes, w.Body.String()) - -} -func (suite *HandlerTestSuite) TestTopicACL01() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/topics/topic1:acl", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "authorized_users": [ - "UserA", - "UserB" - ] -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/topics/{topic}:acl", WrapMockAuthConfig(TopicACL, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestTopicACL02() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/topics/topic3:acl", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "authorized_users": [ - "UserX" - ] -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/topics/{topic}:acl", WrapMockAuthConfig(TopicACL, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestModTopicACLWrong() { - - postExp := `{"authorized_users":["UserX","UserFoo"]}` - - expRes := `{ - "error": { - "code": 404, - "message": "User(s): UserFoo do not exist", - "status": "NOT_FOUND" - } -}` - - req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/topics/topic1:modAcl", bytes.NewBuffer([]byte(postExp))) - if err != nil { - log.Fatal(err) - } - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/topics/{topic}:modAcl", WrapMockAuthConfig(TopicModACL, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(404, w.Code) - suite.Equal(expRes, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestModSubACLWrong() { - - postExp := `{"authorized_users":["UserX","UserFoo"]}` - - expRes := `{ - "error": { - "code": 404, - "message": "User(s): UserFoo do not exist", - "status": "NOT_FOUND" - } -}` - - req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub101:modAcl", bytes.NewBuffer([]byte(postExp))) - if err != nil { - log.Fatal(err) - } - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:modAcl", WrapMockAuthConfig(SubModACL, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(404, w.Code) - suite.Equal(expRes, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestModTopicACL01() { - - postExp := `{"authorized_users":["UserX","UserZ"]}` - - req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/topics/topic1:modAcl", bytes.NewBuffer([]byte(postExp))) - if err != nil { - log.Fatal(err) - } - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/topics/{topic}:modAcl", WrapMockAuthConfig(TopicModACL, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal("", w.Body.String()) - - req2, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/topics/topic1:acl", nil) - if err != nil { - log.Fatal(err) - } - router.HandleFunc("/v1/projects/{project}/topics/{topic}:acl", WrapMockAuthConfig(TopicACL, cfgKafka, &brk, str, &mgr, nil)) - w2 := httptest.NewRecorder() - router.ServeHTTP(w2, req2) - suite.Equal(200, w2.Code) - - expResp := `{ - "authorized_users": [ - "UserX", - "UserZ" - ] -}` - - suite.Equal(expResp, w2.Body.String()) - -} - -func (suite *HandlerTestSuite) TestModSubACL01() { - - postExp := `{"authorized_users":["UserX","UserZ"]}` - - req, err := http.NewRequest("POST", "http://localhost:8080/v1/projects/ARGO/subscription/sub1:modAcl", bytes.NewBuffer([]byte(postExp))) - if err != nil { - log.Fatal(err) - } - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/subscription/{subscription}:modAcl", WrapMockAuthConfig(SubModACL, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal("", w.Body.String()) - - req2, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/subscription/sub1:acl", nil) - if err != nil { - log.Fatal(err) - } - router.HandleFunc("/v1/projects/{project}/subscription/{subscription}:acl", WrapMockAuthConfig(SubACL, cfgKafka, &brk, str, &mgr, nil)) - w2 := httptest.NewRecorder() - router.ServeHTTP(w2, req2) - suite.Equal(200, w2.Code) - - expResp := `{ - "authorized_users": [ - "UserX", - "UserZ" - ] -}` - - suite.Equal(expResp, w2.Body.String()) - -} - -func (suite *HandlerTestSuite) TestSubACL01() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/subscription/sub1:acl", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "authorized_users": [ - "UserA", - "UserB" - ] -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/subscription/{subscription}:acl", WrapMockAuthConfig(SubACL, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestSubACL02() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/subscriptions/sub3:acl", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "authorized_users": [ - "UserZ", - "UserB", - "UserA" - ] -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:acl", WrapMockAuthConfig(SubACL, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestTopicListAll() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/topics", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "topics": [ - { - "name": "/projects/ARGO/topics/topic4" - }, - { - "name": "/projects/ARGO/topics/topic3", - "schema": "projects/ARGO/schemas/schema-3" - }, - { - "name": "/projects/ARGO/topics/topic2", - "schema": "projects/ARGO/schemas/schema-1" - }, - { - "name": "/projects/ARGO/topics/topic1" - } - ], - "nextPageToken": "", - "totalSize": 4 -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/topics", WrapMockAuthConfig(TopicListAll, cfgKafka, &brk, str, &mgr, nil, "project_admin")) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestTopicListAllPublisher() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/topics", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "topics": [ - { - "name": "/projects/ARGO/topics/topic2", - "schema": "projects/ARGO/schemas/schema-1" - }, - { - "name": "/projects/ARGO/topics/topic1" - } - ], - "nextPageToken": "", - "totalSize": 2 -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/topics", WrapMockAuthConfig(TopicListAll, cfgKafka, &brk, str, &mgr, nil, "publisher")) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestTopicListAllPublisherWithPagination() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/topics?pageSize=1", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "topics": [ - { - "name": "/projects/ARGO/topics/topic2", - "schema": "projects/ARGO/schemas/schema-1" - } - ], - "nextPageToken": "MA==", - "totalSize": 2 -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/topics", WrapMockAuthConfig(TopicListAll, cfgKafka, &brk, str, &mgr, nil, "publisher")) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestPublishWithSchema() { - - type td struct { - topic string - postBody string - expectedResponse string - expectedStatusCode int - msg string - } - - testData := []td{ - { - topic: "topic2", - postBody: `{ - "messages" : [ - - { - "attributes": {}, - "data": "eyJuYW1lIjoibmFtZS0xIiwgImVtYWlsIjogInRlc3RAZXhhbXBsZS5jb20ifQ==" - }, - - { - "attributes": {}, - "data": "eyJuYW1lIjoibmFtZS0xIiwgImVtYWlsIjogInRlc3RAZXhhbXBsZS5jb20iLCAiYWRkcmVzcyI6IlN0cmVldCAxMyIsInRlbGVwaG9uZSI6IjY5NDg1Njc4ODkifQ==" - } - ] -}`, - expectedStatusCode: 200, - expectedResponse: `{ - "messageIds": [ - "1", - "2" - ] -}`, - msg: "Case where the messages are validated successfully(JSON)", - }, - { - topic: "topic3", - postBody: `{ - "messages" : [ - - { - "attributes": {}, - "data": "DGFnZWxvc8T8Cg==" - }, - { - "attributes": {}, - "data": "DGFnZWxvc8T8Cg==" - } - ] -}`, - expectedStatusCode: 200, - expectedResponse: `{ - "messageIds": [ - "3", - "4" - ] -}`, - msg: "Case where the messages are validated successfully(AVRO)", - }, - { - topic: "topic2", - postBody: `{ - "messages" : [ - - { - "attributes": {}, - "data": "eyJuYW1lIjoibmFtZS0xIiwiYWRkcmVzcyI6IlN0cmVldCAxMyIsInRlbGVwaG9uZSI6Njk0ODU2Nzg4OX0=" - }, - - { - "attributes": {}, - "data": "eyJuYW1lIjoibmFtZS0xIiwgImVtYWlsIjogInRlc3RAZXhhbXBsZS5jb20iLCAiYWRkcmVzcyI6IlN0cmVldCAxMyIsInRlbGVwaG9uZSI6IjY5NDg1Njc4ODkifQ==" - } - ] -}`, - expectedStatusCode: 400, - expectedResponse: `{ - "error": { - "code": 400, - "message": "Message 0 data is not valid.1)(root): email is required.2)telephone: Invalid type. Expected: string, given: integer.", - "status": "INVALID_ARGUMENT" - } -}`, - msg: "Case where one of the messages is not successfully validated(2 errors)", - }, - { - topic: "topic3", - postBody: `{ - "messages" : [ - - { - "attributes": {}, - "data": "T2JqAQQWYXZyby5zY2hlbWGYAnsidHlwZSI6InJlY29yZCIsIm5hbWUiOiJQbGFjZSIsIm5hbWVzcGFjZSI6InBsYWNlLmF2cm8iLCJmaWVsZHMiOlt7Im5hbWUiOiJwbGFjZW5hbWUiLCJ0eXBlIjoic3RyaW5nIn0seyJuYW1lIjoiYWRkcmVzcyIsInR5cGUiOiJzdHJpbmcifV19FGF2cm8uY29kZWMIbnVsbABM1P4b0GpYaCg9tqxa+YDZAiQSc3RyZWV0IDIyDnBsYWNlIGFM1P4b0GpYaCg9tqxa+YDZ" - }, - - { - "attributes": {}, - "data": "DGFnZWxvc8T8Cg==" - } - ] -}`, - expectedStatusCode: 400, - expectedResponse: `{ - "error": { - "code": 400, - "message": "Message 0 is not valid.cannot decode binary record \"user.avro.User\" field \"username\": cannot decode binary string: cannot decode binary bytes: negative size: -40", - "status": "INVALID_ARGUMENT" - } -}`, - msg: "Case where one of the messages is not successfully validated(1 error)(AVRO)", - }, - - { - topic: "topic2", - postBody: `{ - "messages" : [ - - { - "attributes": {}, - "data": "eyJuYW1lIjoibmFtZS0xIiwiYWRkcmVzcyI6IlN0cmVldCAxMyIsInRlbGVwaG9uZSI6IjY5NDg1Njc4ODkifQo=" - }, - - { - "attributes": {}, - "data": "eyJuYW1lIjoibmFtZS0xIiwgImVtYWlsIjogInRlc3RAZXhhbXBsZS5jb20iLCAiYWRkcmVzcyI6IlN0cmVldCAxMyIsInRlbGVwaG9uZSI6IjY5NDg1Njc4ODkifQ==" - } - ] -}`, - expectedStatusCode: 400, - expectedResponse: `{ - "error": { - "code": 400, - "message": "Message 0 data is not valid,(root): email is required", - "status": "INVALID_ARGUMENT" - } -}`, - msg: "Case where the one of the messages is not successfully validated(1 error)", - }, - { - topic: "topic2", - postBody: `{ - "messages" : [ - - { - "attributes": {}, - "data": "eyJuYW1lIjoibmFtZS0xIiwgImVtYWlsIjogInRlc3RAZXhhbXBsZS5jb20iLCAiYWRkcmVzcyI6IlN0cmVldCAxMyIsInRlbGVwaG9uZSI6IjY5NDg1Njc4ODkifQ==" - }, - - { - "attributes": {}, - "data": "eyJuYW1lIjoibmFtZS0xIiwiYWRkcmVzcyI6IlN0cmVldCAxMyIsInRlbGVwaG9uZSI6IjY5NDg1Njc4ODkiCg==" - } - ] -}`, - expectedStatusCode: 400, - expectedResponse: `{ - "error": { - "code": 400, - "message": "Message 1 data is not valid JSON format,unexpected EOF", - "status": "INVALID_ARGUMENT" - } -}`, - msg: "Case where the one of the messages is not in valid json format", - }, - } - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - cfgKafka.PushEnabled = true - cfgKafka.PushWorkerToken = "push_token" - cfgKafka.ResAuth = false - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - - for _, t := range testData { - - w := httptest.NewRecorder() - url := fmt.Sprintf("http://localhost:8080/v1/projects/ARGO/topics/%v", t.topic) - req, err := http.NewRequest("POST", url, strings.NewReader(t.postBody)) - if err != nil { - log.Fatal(err) - } - router.HandleFunc("/v1/projects/{project}/topics/{topic}", WrapMockAuthConfig(TopicPublish, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - - suite.Equal(t.expectedStatusCode, w.Code, t.msg) - suite.Equal(t.expectedResponse, w.Body.String(), t.msg) - } -} - -func (suite *HandlerTestSuite) TestTopicListAllFirstPage() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/topics?pageSize=2", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "topics": [ - { - "name": "/projects/ARGO/topics/topic4" - }, - { - "name": "/projects/ARGO/topics/topic3", - "schema": "projects/ARGO/schemas/schema-3" - } - ], - "nextPageToken": "MQ==", - "totalSize": 4 -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/topics", WrapMockAuthConfig(TopicListAll, cfgKafka, &brk, str, &mgr, nil, "project_admin")) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestTopicListAllNextPage() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/topics?pageSize=2&pageToken=MA==", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "topics": [ - { - "name": "/projects/ARGO/topics/topic1" - } - ], - "nextPageToken": "", - "totalSize": 4 -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/topics", WrapMockAuthConfig(TopicListAll, cfgKafka, &brk, str, &mgr, nil, "project_admin")) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestTopicListAllEmpty() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/topics?pageSize=2", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "topics": [], - "nextPageToken": "", - "totalSize": 0 -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - // empty the store - str.TopicList = []stores.QTopic{} - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/topics", WrapMockAuthConfig(TopicListAll, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestTopicListAllInvalidPageSize() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/topics?pageSize=invalid", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "error": { - "code": 400, - "message": "Invalid page size", - "status": "INVALID_ARGUMENT" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/topics", WrapMockAuthConfig(TopicListAll, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(400, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestTopicListAllInvalidPageToken() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/projects/ARGO/topics?pageToken=invalid", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "error": { - "code": 400, - "message": "Invalid page token", - "status": "INVALID_ARGUMENT" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/topics", WrapMockAuthConfig(TopicListAll, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(400, w.Code) - suite.Equal(expResp, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestPublish() { - - postJSON := `{ - "messages": [ - { - "attributes": - { - "foo":"bar" - } - , - "data": "YmFzZTY0ZW5jb2RlZA==" - } - ] -}` - url := "http://localhost:8080/v1/projects/ARGO/topics/topic1:publish" - req, err := http.NewRequest("POST", url, bytes.NewBuffer([]byte(postJSON))) - if err != nil { - log.Fatal(err) - } - - expJSON := `{ - "messageIds": [ - "1" - ] -}` - tn := time.Now().UTC() - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/topics/{topic}:publish", WrapMockAuthConfig(TopicPublish, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expJSON, w.Body.String()) - tpc, _, _, _ := str.QueryTopics("argo_uuid", "", "topic1", "", 0) - suite.True(tn.Before(tpc[0].LatestPublish)) - suite.NotEqual(tpc[0].PublishRate, 10) - -} - -func (suite *HandlerTestSuite) TestPublishMultiple() { - - postJSON := `{ - "messages": [ - { - "attributes": - { - "foo":"bar" - } - , - "data": "YmFzZTY0ZW5jb2RlZA==" - }, - { - "attributes": - { - "foo2":"bar2" - } - , - "data": "YmFzZTY0ZW5jb2RlZA==" - }, - { - "attributes": - { - "foo2":"bar2" - } - , - "data": "YmFzZTY0ZW5jb2RlZA==" - } - ] -}` - url := "http://localhost:8080/v1/projects/ARGO/topics/topic1:publish" - req, err := http.NewRequest("POST", url, bytes.NewBuffer([]byte(postJSON))) - if err != nil { - log.Fatal(err) - } - - expJSON := `{ - "messageIds": [ - "1", - "2", - "3" - ] -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - brk.Initialize([]string{"localhost"}) - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/topics/{topic}:publish", WrapMockAuthConfig(TopicPublish, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expJSON, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestPublishError() { - - postJSON := `{ - "messages": [ - { - "attributes": [ - { - "key": "foo", - "valu2RlZA==" - }, - { - "attributes": [ - { - "key": "foo2", - "value": "bar2" - } - ], - "data": "YmFzZTY0ZW5jb2RlZA==" - } - ] -}` - url := "http://localhost:8080/v1/projects/ARGO/topics/topic1:publish" - req, err := http.NewRequest("POST", url, bytes.NewBuffer([]byte(postJSON))) - if err != nil { - log.Fatal(err) - } - - expJSON := `{ - "error": { - "code": 400, - "message": "Invalid Message Arguments", - "status": "INVALID_ARGUMENT" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/topics/{topic}:publish", WrapMockAuthConfig(TopicPublish, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(400, w.Code) - suite.Equal(expJSON, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestPublishNoTopic() { - - postJSON := `{ - "messages": [ - { - "attributes": [ - { - "key": "foo", - "value": "bar" - } - ], - "data": "YmFzZTY0ZW5jb2RlZA==" - }, - { - "attributes": [ - { - "key": "foo2", - "value": "bar2" - } - ], - "data": "YmFzZTY0ZW5jb2RlZA==" - } - ] -}` - url := "http://localhost:8080/v1/projects/ARGO/topics/FOO:publish" - req, err := http.NewRequest("POST", url, bytes.NewBuffer([]byte(postJSON))) - if err != nil { - log.Fatal(err) - } - - expJSON := `{ - "error": { - "code": 404, - "message": "Topic doesn't exist", - "status": "NOT_FOUND" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/topics/{topic}:publish", WrapMockAuthConfig(TopicPublish, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(404, w.Code) - suite.Equal(expJSON, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestSubPullOne() { - - postJSON := `{ - "maxMessages":"1" -}` - url := "http://localhost:8080/v1/projects/ARGO/subscriptions/sub1:pull" - req, err := http.NewRequest("POST", url, bytes.NewBuffer([]byte(postJSON))) - if err != nil { - log.Fatal(err) - } - - expJSON := `{ - "receivedMessages": [ - { - "ackId": "projects/ARGO/subscriptions/sub1:0", - "message": { - "messageId": "0", - "attributes": { - "foo": "bar" - }, - "data": "YmFzZTY0ZW5jb2RlZA==", - "publishTime": "2016-02-24T11:55:09.786127994Z" - } - } - ] -}` - tn := time.Now().UTC() - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - brk.Initialize([]string{"localhost"}) - brk.PopulateThree() // Add three messages to the broker queue - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:pull", WrapMockAuthConfig(SubPull, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expJSON, w.Body.String()) - spc, _, _, _ := str.QuerySubs("argo_uuid", "", "sub1", "", 0) - suite.True(tn.Before(spc[0].LatestConsume)) - suite.NotEqual(spc[0].ConsumeRate, 10) - -} - -func (suite *HandlerTestSuite) TestSubPullFromPushEnabledAsPushWorker() { - - postJSON := `{ - "maxMessages":"1" -}` - url := "http://localhost:8080/v1/projects/ARGO/subscriptions/sub4:pull" - req, err := http.NewRequest("POST", url, strings.NewReader(postJSON)) - if err != nil { - log.Fatal(err) - } - - expJSON := `{ - "receivedMessages": [ - { - "ackId": "projects/ARGO/subscriptions/sub4:0", - "message": { - "messageId": "0", - "attributes": { - "foo": "bar" - }, - "data": "YmFzZTY0ZW5jb2RlZA==", - "publishTime": "2016-02-24T11:55:09.786127994Z" - } - } - ] -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - brk.Initialize([]string{"localhost"}) - brk.PopulateThree() // Add three messages to the broker queue - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:pull", WrapMockAuthConfig(SubPull, cfgKafka, &brk, str, &mgr, nil, "push_worker")) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expJSON, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestSubPullFromPushEnabledAsPushWorkerDISABLED() { - - postJSON := `{ - "maxMessages":"1" -}` - url := "http://localhost:8080/v1/projects/ARGO/subscriptions/sub4:pull" - req, err := http.NewRequest("POST", url, strings.NewReader(postJSON)) - if err != nil { - log.Fatal(err) - } - - expJSON := `{ - "error": { - "code": 409, - "message": "Push functionality is currently disabled", - "status": "CONFLICT" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - // disable push functionality - cfgKafka.PushEnabled = false - brk := brokers.MockBroker{} - brk.Initialize([]string{"localhost"}) - brk.PopulateThree() // Add three messages to the broker queue - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:pull", WrapMockAuthConfig(SubPull, cfgKafka, &brk, str, &mgr, nil, "push_worker")) - router.ServeHTTP(w, req) - suite.Equal(409, w.Code) - suite.Equal(expJSON, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestSubPullFromPushEnabledAsServiceAdmin() { - - postJSON := `{ - "maxMessages":"1" -}` - url := "http://localhost:8080/v1/projects/ARGO/subscriptions/sub4:pull" - req, err := http.NewRequest("POST", url, strings.NewReader(postJSON)) - if err != nil { - log.Fatal(err) - } - - expJSON := `{ - "receivedMessages": [ - { - "ackId": "projects/ARGO/subscriptions/sub4:0", - "message": { - "messageId": "0", - "attributes": { - "foo": "bar" - }, - "data": "YmFzZTY0ZW5jb2RlZA==", - "publishTime": "2016-02-24T11:55:09.786127994Z" - } - } - ] -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - brk.Initialize([]string{"localhost"}) - brk.PopulateThree() // Add three messages to the broker queue - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:pull", WrapMockAuthConfig(SubPull, cfgKafka, &brk, str, &mgr, nil, "service_admin")) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expJSON, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestSubPullFromPushEnabledNoPushWorker() { - - postJSON := `{ - "maxMessages":"1" -}` - url := "http://localhost:8080/v1/projects/ARGO/subscriptions/sub4:pull" - req, err := http.NewRequest("POST", url, strings.NewReader(postJSON)) - if err != nil { - log.Fatal(err) - } - - expJSON := `{ - "error": { - "code": 403, - "message": "Access to this resource is forbidden", - "status": "FORBIDDEN" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - brk.Initialize([]string{"localhost"}) - brk.PopulateThree() // Add three messages to the broker queue - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:pull", WrapMockAuthConfig(SubPull, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(403, w.Code) - suite.Equal(expJSON, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestSubModAck() { - - postJSON := `{ - "ackDeadlineSeconds":33 -}` - - postJSON2 := `{ - "ackDeadlineSeconds":700 -}` - - postJSON3 := `{ - "ackDeadlineSeconds":-22 -}` - - url := "http://localhost:8080/v1/projects/ARGO/subscriptions/sub1:modifyAckDeadline" - req, err := http.NewRequest("POST", url, bytes.NewBuffer([]byte(postJSON))) - if err != nil { - log.Fatal(err) - } - - expJSON1 := `` - - expJSON2 := `{ - "error": { - "code": 400, - "message": "Invalid ackDeadlineSeconds(needs value between 0 and 600) Arguments", - "status": "INVALID_ARGUMENT" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - brk.Initialize([]string{"localhost"}) - brk.PopulateThree() // Add three messages to the broker queue - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:modifyAckDeadline", WrapMockAuthConfig(SubModAck, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expJSON1, w.Body.String()) - - subRes, err := str.QueryOneSub("argo_uuid", "sub1") - suite.Equal(33, subRes.Ack) - - req2, err := http.NewRequest("POST", url, bytes.NewBuffer([]byte(postJSON2))) - router2 := mux.NewRouter().StrictSlash(true) - w2 := httptest.NewRecorder() - mgr = oldPush.Manager{} - router2.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:modifyAckDeadline", WrapMockAuthConfig(SubModAck, cfgKafka, &brk, str, &mgr, nil)) - router2.ServeHTTP(w2, req2) - suite.Equal(400, w2.Code) - suite.Equal(expJSON2, w2.Body.String()) - - req3, err := http.NewRequest("POST", url, bytes.NewBuffer([]byte(postJSON3))) - router3 := mux.NewRouter().StrictSlash(true) - w3 := httptest.NewRecorder() - mgr = oldPush.Manager{} - router3.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:modifyAckDeadline", WrapMockAuthConfig(SubModAck, cfgKafka, &brk, str, &mgr, nil)) - router3.ServeHTTP(w3, req3) - suite.Equal(400, w3.Code) - suite.Equal(expJSON2, w3.Body.String()) - -} - -func (suite *HandlerTestSuite) TestSubAck() { - - postJSON := `{ - "ackIds":["projects/ARGO/subscriptions/sub2:1"] -}` - - postJSON2 := `{ -"ackIds":["projects/ARGO/subscriptions/sub1:2"] -}` - - postJSON3 := `{ -"ackIds":["projects/ARGO/subscriptions/sub1:2"] -}` - - url := "http://localhost:8080/v1/projects/ARGO/subscriptions/sub1:acknowledge" - req, err := http.NewRequest("POST", url, bytes.NewBuffer([]byte(postJSON))) - if err != nil { - log.Fatal(err) - } - - expJSON1 := `{ - "error": { - "code": 400, - "message": "Invalid ack id", - "status": "INVALID_ARGUMENT" - } -}` - - expJSON2 := `{ - "error": { - "code": 408, - "message": "ack timeout", - "status": "TIMEOUT" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - brk.Initialize([]string{"localhost"}) - brk.PopulateThree() // Add three messages to the broker queue - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:acknowledge", WrapMockAuthConfig(SubAck, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(400, w.Code) - suite.Equal(expJSON1, w.Body.String()) - - // grab sub1 - zSec := "2006-01-02T15:04:05Z" - t := time.Now().UTC() - ts := t.Format(zSec) - str.SubList[0].PendingAck = ts - str.SubList[0].NextOffset = 3 - - req2, err := http.NewRequest("POST", url, bytes.NewBuffer([]byte(postJSON2))) - router2 := mux.NewRouter().StrictSlash(true) - w2 := httptest.NewRecorder() - mgr = oldPush.Manager{} - router2.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:acknowledge", WrapMockAuthConfig(SubAck, cfgKafka, &brk, str, &mgr, nil)) - router2.ServeHTTP(w2, req2) - suite.Equal(200, w2.Code) - suite.Equal("{}", w2.Body.String()) - - // mess with the timeout - t2 := time.Now().UTC().Add(-11 * time.Second) - ts2 := t2.Format(zSec) - str.SubList[0].PendingAck = ts2 - str.SubList[0].NextOffset = 4 - - req3, err := http.NewRequest("POST", url, bytes.NewBuffer([]byte(postJSON3))) - router3 := mux.NewRouter().StrictSlash(true) - w3 := httptest.NewRecorder() - mgr = oldPush.Manager{} - router3.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:acknowledge", WrapMockAuthConfig(SubAck, cfgKafka, &brk, str, &mgr, nil)) - router3.ServeHTTP(w3, req3) - suite.Equal(408, w3.Code) - suite.Equal(expJSON2, w3.Body.String()) - -} - -func (suite *HandlerTestSuite) TestSubError() { - - postJSON := `{ - -}` - url := "http://localhost:8080/v1/projects/ARGO/subscriptions/foo:pull" - req, err := http.NewRequest("POST", url, bytes.NewBuffer([]byte(postJSON))) - if err != nil { - log.Fatal(err) - } - - expJSON := `{ - "error": { - "code": 404, - "message": "Subscription doesn't exist", - "status": "NOT_FOUND" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - brk.Initialize([]string{"localhost"}) - brk.PopulateThree() // Add three messages to the broker queue - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:pull", WrapMockAuthConfig(SubPull, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(404, w.Code) - suite.Equal(expJSON, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestSubNoTopic() { - - postJSON := `{ - -}` - url := "http://localhost:8080/v1/projects/ARGO/subscriptions/no_topic_sub:pull" - req, err := http.NewRequest("POST", url, bytes.NewBuffer([]byte(postJSON))) - if err != nil { - log.Fatal(err) - } - - expJSON := `{ - "error": { - "code": 409, - "message": "Subscription's topic doesn't exist", - "status": "CONFLICT" - } -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - // add a mock sub that is linked to a non existent topic - str.SubList = append(str.SubList, stores.QSub{ - Name: "no_topic_sub", - ProjectUUID: "argo_uuid", - Topic: "unknown_topic"}, - ) - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:pull", WrapMockAuthConfig(SubPull, cfgKafka, &brk, str, &mgr, nil, "project_admin")) - router.ServeHTTP(w, req) - suite.Equal(409, w.Code) - suite.Equal(expJSON, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestSubPullAll() { - - postJSON := `{ - -}` - url := "http://localhost:8080/v1/projects/ARGO/subscriptions/sub1:pull" - req, err := http.NewRequest("POST", url, bytes.NewBuffer([]byte(postJSON))) - if err != nil { - log.Fatal(err) - } - - expJSON := `{ - "receivedMessages": [ - { - "ackId": "projects/ARGO/subscriptions/sub1:0", - "message": { - "messageId": "0", - "attributes": { - "foo": "bar" - }, - "data": "YmFzZTY0ZW5jb2RlZA==", - "publishTime": "2016-02-24T11:55:09.786127994Z" - } - }, - { - "ackId": "projects/ARGO/subscriptions/sub1:1", - "message": { - "messageId": "1", - "attributes": { - "foo2": "bar2" - }, - "data": "YmFzZTY0ZW5jb2RlZA==", - "publishTime": "2016-02-24T11:55:09.827678754Z" - } - }, - { - "ackId": "projects/ARGO/subscriptions/sub1:2", - "message": { - "messageId": "2", - "attributes": { - "foo2": "bar2" - }, - "data": "YmFzZTY0ZW5jb2RlZA==", - "publishTime": "2016-02-24T11:55:09.830417467Z" - } - } - ] -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - brk.Initialize([]string{"localhost"}) - brk.PopulateThree() // Add three messages to the broker queue - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - w := httptest.NewRecorder() - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}:pull", WrapMockAuthConfig(SubPull, cfgKafka, &brk, str, &mgr, nil)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expJSON, w.Body.String()) - -} - -func (suite *HandlerTestSuite) TestValidationInSubs() { - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - brk.Initialize([]string{"localhost"}) - brk.PopulateThree() // Add three messages to the broker queue - str := stores.NewMockStore("whatever", "argo_mgs") - - okResp := `{ - "name": "/projects/ARGO/subscriptions/sub1", - "topic": "/projects/ARGO/topics/topic1", - "pushConfig": { - "pushEndpoint": "", - "maxMessages": 0, - "retryPolicy": {}, - "verification_hash": "", - "verified": false - }, - "ackDeadlineSeconds": 10 -}` - - invProject := `{ - "error": { - "code": 400, - "message": "Invalid project name", - "status": "INVALID_ARGUMENT" - } -}` - - invSub := `{ - "error": { - "code": 400, - "message": "Invalid subscription name", - "status": "INVALID_ARGUMENT" - } -}` - - urls := []string{ - "http://localhost:8080/v1/projects/ARGO/subscriptions/sub1", - "http://localhost:8080/v1/projects/AR:GO/subscriptions/sub1", - "http://localhost:8080/v1/projects/ARGO/subscriptions/s,ub1", - "http://localhost:8080/v1/projects/AR,GO/subscriptions/s:ub1", - } - - codes := []int(nil) - responses := []string(nil) - - for _, url := range urls { - w := httptest.NewRecorder() - req, err := http.NewRequest("GET", url, bytes.NewBuffer([]byte(""))) - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/subscriptions/{subscription}", WrapValidate(WrapMockAuthConfig(SubListOne, cfgKafka, &brk, str, &mgr, nil))) - - if err != nil { - log.Fatal(err) - } - - router.ServeHTTP(w, req) - codes = append(codes, w.Code) - responses = append(responses, w.Body.String()) - - } - - // First request is valid so response is ok - suite.Equal(200, codes[0]) - suite.Equal(okResp, responses[0]) - - // Second request has invalid project name - suite.Equal(400, codes[1]) - suite.Equal(invProject, responses[1]) - - // Third request has invalid subscription name - suite.Equal(400, codes[2]) - suite.Equal(invSub, responses[2]) - - // Fourth request has invalid project and subscription name, but project is caught first - suite.Equal(400, codes[3]) - suite.Equal(invProject, responses[3]) - -} - -func (suite *HandlerTestSuite) TestValidationInTopics() { - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - brk := brokers.MockBroker{} - brk.Initialize([]string{"localhost"}) - brk.PopulateThree() // Add three messages to the broker queue - str := stores.NewMockStore("whatever", "argo_mgs") - - okResp := `{ - "name": "/projects/ARGO/topics/topic1" -}` - invProject := `{ - "error": { - "code": 400, - "message": "Invalid project name", - "status": "INVALID_ARGUMENT" - } -}` - - invTopic := `{ - "error": { - "code": 400, - "message": "Invalid topic name", - "status": "INVALID_ARGUMENT" - } -}` - - urls := []string{ - "http://localhost:8080/v1/projects/ARGO/topics/topic1", - "http://localhost:8080/v1/projects/AR:GO/topics/topic1", - "http://localhost:8080/v1/projects/ARGO/topics/top,ic1", - "http://localhost:8080/v1/projects/AR,GO/topics/top:ic1", - } - - codes := []int(nil) - responses := []string(nil) - - for _, url := range urls { - w := httptest.NewRecorder() - req, err := http.NewRequest("GET", url, bytes.NewBuffer([]byte(""))) - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - router.HandleFunc("/v1/projects/{project}/topics/{topic}", WrapValidate(WrapMockAuthConfig(TopicListOne, cfgKafka, &brk, str, &mgr, nil))) - - if err != nil { - log.Fatal(err) - } - - router.ServeHTTP(w, req) - codes = append(codes, w.Code) - responses = append(responses, w.Body.String()) - - } - - // First request is valid so response is ok - suite.Equal(200, codes[0]) - suite.Equal(okResp, responses[0]) - - // Second request has invalid project name - suite.Equal(400, codes[1]) - suite.Equal(invProject, responses[1]) - - // Third request has invalid topic name - suite.Equal(400, codes[2]) - suite.Equal(invTopic, responses[2]) - - // Fourth request has invalid project and topic names, but project is caught first - suite.Equal(400, codes[3]) - suite.Equal(invProject, responses[3]) - -} - -func (suite *HandlerTestSuite) TestHealthCheck() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/status", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "status": "ok", - "push_servers": [ - { - "endpoint": "localhost:5555", - "status": "SERVING" - } - ] -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - cfgKafka.PushEnabled = true - cfgKafka.PushWorkerToken = "push_token" - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - w := httptest.NewRecorder() - router.HandleFunc("/v1/status", WrapMockAuthConfig(HealthCheck, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - - suite.Equal(expResp, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestHealthCheckPushDisabled() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/status", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "status": "ok", - "push_functionality": "disabled" -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - cfgKafka.PushEnabled = false - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - w := httptest.NewRecorder() - router.HandleFunc("/v1/status", WrapMockAuthConfig(HealthCheck, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestHealthCheckPushWorkerMissing() { - - req, err := http.NewRequest("GET", "http://localhost:8080/v1/status", nil) - if err != nil { - log.Fatal(err) - } - - expResp := `{ - "status": "warning", - "push_servers": [ - { - "endpoint": "localhost:5555", - "status": "SERVING" - } - ] -}` - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - cfgKafka.PushEnabled = true - // add a wrong push worker token - cfgKafka.PushWorkerToken = "missing" - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - w := httptest.NewRecorder() - router.HandleFunc("/v1/status", WrapMockAuthConfig(HealthCheck, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - suite.Equal(200, w.Code) - suite.Equal(expResp, w.Body.String()) -} - -func (suite *HandlerTestSuite) TestSchemaCreate() { - - type td struct { - postBody string - expectedResponse string - schemaName string - expectedStatusCode int - msg string - } - - testData := []td{ - { - postBody: `{ - "type": "json", - "schema":{ - "type": "string" - } -}`, - schemaName: "new-schema", - expectedStatusCode: 200, - expectedResponse: `{ - "uuid": "{{UUID}}", - "name": "projects/ARGO/schemas/new-schema", - "type": "json", - "schema": { - "type": "string" - } -}`, - msg: "Case where the schema is valid and successfully created(JSON)", - }, - { - postBody: `{ - "type": "avro", - "schema":{ - "type": "record", - "namespace": "user.avro", - "name":"User", - "fields": [ - {"name": "username", "type": "string"}, - {"name": "phone", "type": "int"} - ] - } -}`, - schemaName: "new-schema-avro", - expectedStatusCode: 200, - expectedResponse: `{ - "uuid": "{{UUID}}", - "name": "projects/ARGO/schemas/new-schema-avro", - "type": "avro", - "schema": { - "fields": [ - { - "name": "username", - "type": "string" - }, - { - "name": "phone", - "type": "int" - } - ], - "name": "User", - "namespace": "user.avro", - "type": "record" - } -}`, - msg: "Case where the schema is valid and successfully created(AVRO)", - }, - { - postBody: `{ - "type": "unknown", - "schema":{ - "type": "string" - } -}`, - schemaName: "new-schema-2", - expectedStatusCode: 400, - expectedResponse: `{ - "error": { - "code": 400, - "message": "Schema type can only be 'json' or 'avro'", - "status": "INVALID_ARGUMENT" - } -}`, - msg: "Case where the schema type is unsupported", - }, - { - postBody: `{ - "type": "json", - "schema":{ - "type": "unknown" - } -}`, - schemaName: "new-schema-2", - expectedStatusCode: 400, - expectedResponse: `{ - "error": { - "code": 400, - "message": "has a primitive type that is NOT VALID -- given: /unknown/ Expected valid values are:[array boolean integer number null object string]", - "status": "INVALID_ARGUMENT" - } -}`, - msg: "Case where the json schema is not valid", - }, - { - postBody: `{ - "type": "avro", - "schema":{ - "type": "unknown" - } -}`, - schemaName: "new-schema-2", - expectedStatusCode: 400, - expectedResponse: `{ - "error": { - "code": 400, - "message": "unknown type name: \"unknown\"", - "status": "INVALID_ARGUMENT" - } -}`, - msg: "Case where the avro schema is not valid", - }, - { - postBody: `{ - "type": "json", - "schema":{ - "type": "string" - } -}`, - schemaName: "schema-1", - expectedStatusCode: 409, - expectedResponse: `{ - "error": { - "code": 409, - "message": "Schema already exists", - "status": "ALREADY_EXISTS" - } -}`, - msg: "Case where the json schema name already exists", - }, - } - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - cfgKafka.PushEnabled = true - cfgKafka.PushWorkerToken = "push_token" - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - - for _, t := range testData { - - w := httptest.NewRecorder() - url := fmt.Sprintf("http://localhost:8080/v1/projects/ARGO/schemas/%v", t.schemaName) - req, err := http.NewRequest("POST", url, strings.NewReader(t.postBody)) - if err != nil { - log.Fatal(err) - } - router.HandleFunc("/v1/projects/{project}/schemas/{schema}", WrapMockAuthConfig(SchemaCreate, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - - if t.expectedStatusCode == 200 { - s := schemas.Schema{} - json.Unmarshal(w.Body.Bytes(), &s) - t.expectedResponse = strings.Replace(t.expectedResponse, "{{UUID}}", s.UUID, 1) - } - - suite.Equal(t.expectedStatusCode, w.Code, t.msg) - suite.Equal(t.expectedResponse, w.Body.String(), t.msg) - } - -} - -func (suite *HandlerTestSuite) TestSchemaListOne() { - - type td struct { - expectedResponse string - schemaName string - expectedStatusCode int - msg string - } - - testData := []td{ - { - schemaName: "schema-1", - expectedStatusCode: 200, - expectedResponse: `{ - "uuid": "schema_uuid_1", - "name": "projects/ARGO/schemas/schema-1", - "type": "json", - "schema": { - "properties": { - "address": { - "type": "string" - }, - "email": { - "type": "string" - }, - "name": { - "type": "string" - }, - "telephone": { - "type": "string" - } - }, - "required": [ - "name", - "email" - ], - "type": "object" - } -}`, - msg: "Case where a specific schema is retrieved successfully", - }, - { - schemaName: "unknown", - expectedStatusCode: 404, - expectedResponse: `{ - "error": { - "code": 404, - "message": "Schema doesn't exist", - "status": "NOT_FOUND" - } -}`, - msg: "Case where the requested schema doesn't exist", - }, - } - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - cfgKafka.PushEnabled = true - cfgKafka.PushWorkerToken = "push_token" - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - - for _, t := range testData { - - w := httptest.NewRecorder() - url := fmt.Sprintf("http://localhost:8080/v1/projects/ARGO/schemas/%v", t.schemaName) - req, err := http.NewRequest("GET", url, nil) - if err != nil { - log.Fatal(err) - } - router.HandleFunc("/v1/projects/{project}/schemas/{schema}", WrapMockAuthConfig(SchemaListOne, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - - suite.Equal(t.expectedStatusCode, w.Code, t.msg) - suite.Equal(t.expectedResponse, w.Body.String(), t.msg) - } -} - -func (suite *HandlerTestSuite) TestSchemaListAll() { - - type td struct { - expectedResponse string - projectName string - expectedStatusCode int - msg string - } - - testData := []td{ - { - projectName: "ARGO", - expectedStatusCode: 200, - expectedResponse: `{ - "schemas": [ - { - "uuid": "schema_uuid_1", - "name": "projects/ARGO/schemas/schema-1", - "type": "json", - "schema": { - "properties": { - "address": { - "type": "string" - }, - "email": { - "type": "string" - }, - "name": { - "type": "string" - }, - "telephone": { - "type": "string" - } - }, - "required": [ - "name", - "email" - ], - "type": "object" - } - }, - { - "uuid": "schema_uuid_2", - "name": "projects/ARGO/schemas/schema-2", - "type": "json", - "schema": { - "properties": { - "address": { - "type": "string" - }, - "email": { - "type": "string" - }, - "name": { - "type": "string" - }, - "telephone": { - "type": "string" - } - }, - "required": [ - "name", - "email" - ], - "type": "object" - } - }, - { - "uuid": "schema_uuid_3", - "name": "projects/ARGO/schemas/schema-3", - "type": "avro", - "schema": { - "fields": [ - { - "name": "username", - "type": "string" - }, - { - "name": "phone", - "type": "int" - } - ], - "name": "User", - "namespace": "user.avro", - "type": "record" - } - } - ] -}`, - msg: "Case where the schemas under a project are successfully retrieved", - }, - { - projectName: "ARGO2", - expectedStatusCode: 200, - expectedResponse: `{ - "schemas": [] -}`, - msg: "Case where the given project has no schemas", - }, - } - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - cfgKafka.PushEnabled = true - cfgKafka.PushWorkerToken = "push_token" - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - - for _, t := range testData { - - w := httptest.NewRecorder() - url := fmt.Sprintf("http://localhost:8080/v1/projects/%s/schemas", t.projectName) - req, err := http.NewRequest("GET", url, nil) - if err != nil { - log.Fatal(err) - } - router.HandleFunc("/v1/projects/{project}/schemas", WrapMockAuthConfig(SchemaListAll, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - - suite.Equal(t.expectedStatusCode, w.Code, t.msg) - suite.Equal(t.expectedResponse, w.Body.String(), t.msg) - } -} - -func (suite *HandlerTestSuite) TestSchemaUpdate() { - - type td struct { - postBody string - expectedResponse string - schemaName string - expectedStatusCode int - msg string - } - - testData := []td{ - { - schemaName: "schema-2", - postBody: `{"name": "projects/ARGO/schemas/schema-1"}`, - expectedStatusCode: 409, - expectedResponse: `{ - "error": { - "code": 409, - "message": "Schema already exists", - "status": "ALREADY_EXISTS" - } -}`, - msg: "Case where the requested schema wants to update the name field to an already existing one", - }, - { - schemaName: "schema-1", - postBody: `{"type":"unsupported"}`, - expectedStatusCode: 400, - expectedResponse: `{ - "error": { - "code": 400, - "message": "Schema type can only be 'json' or 'avro'", - "status": "INVALID_ARGUMENT" - } -}`, - msg: "Case where the requested schema wants to update its type field to an unsupported option", - }, - { - schemaName: "schema-1", - postBody: `{"schema":{"type":"unknown"}}`, - expectedStatusCode: 400, - expectedResponse: `{ - "error": { - "code": 400, - "message": "has a primitive type that is NOT VALID -- given: /unknown/ Expected valid values are:[array boolean integer number null object string]", - "status": "INVALID_ARGUMENT" - } -}`, - msg: "Case where the requested schema wants to update its schema with invalid contents", - }, - { - schemaName: "schema-1", - expectedStatusCode: 200, - expectedResponse: `{ - "uuid": "schema_uuid_1", - "name": "projects/ARGO/schemas/new-name", - "type": "json", - "schema": { - "properties": { - "address": { - "type": "string" - }, - "email": { - "type": "string" - }, - "name": { - "type": "string" - }, - "telephone": { - "type": "string" - } - }, - "required": [ - "name", - "email", - "address" - ], - "type": "object" - } -}`, - postBody: `{ - "name": "projects/ARGO/schemas/new-name", - "type": "json", - "schema": { - "properties": { - "address": { - "type": "string" - }, - "email": { - "type": "string" - }, - "name": { - "type": "string" - }, - "telephone": { - "type": "string" - } - }, - "required": [ - "name", - "email", - "address" - ], - "type": "object" - } -}`, - - msg: "Case where a specific schema has all its fields updated successfully", - }, - { - schemaName: "unknown", - postBody: "", - expectedStatusCode: 404, - expectedResponse: `{ - "error": { - "code": 404, - "message": "Schema doesn't exist", - "status": "NOT_FOUND" - } -}`, - msg: "Case where the requested schema doesn't exist", - }, - } - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - cfgKafka.PushEnabled = true - cfgKafka.PushWorkerToken = "push_token" - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - - for _, t := range testData { - - w := httptest.NewRecorder() - url := fmt.Sprintf("http://localhost:8080/v1/projects/ARGO/schemas/%v", t.schemaName) - req, err := http.NewRequest("PUT", url, strings.NewReader(t.postBody)) - if err != nil { - log.Fatal(err) - } - router.HandleFunc("/v1/projects/{project}/schemas/{schema}", WrapMockAuthConfig(SchemaUpdate, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - - suite.Equal(t.expectedStatusCode, w.Code, t.msg) - suite.Equal(t.expectedResponse, w.Body.String(), t.msg) - } -} - -func (suite *HandlerTestSuite) TestSchemaDelete() { - - type td struct { - expectedResponse string - schemaName string - expectedStatusCode int - msg string - } - - testData := []td{ - { - expectedResponse: "", - schemaName: "schema-1", - expectedStatusCode: 200, - msg: "Case where the schema is successfully deleted", - }, - { - schemaName: "unknown", - expectedStatusCode: 404, - expectedResponse: `{ - "error": { - "code": 404, - "message": "Schema doesn't exist", - "status": "NOT_FOUND" - } -}`, - msg: "Case where the requested schema doesn't exist", - }, - } - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - cfgKafka.PushEnabled = true - cfgKafka.PushWorkerToken = "push_token" - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - - for _, t := range testData { - - w := httptest.NewRecorder() - url := fmt.Sprintf("http://localhost:8080/v1/projects/ARGO/schemas/%v", t.schemaName) - req, err := http.NewRequest("DELETE", url, nil) - if err != nil { - log.Fatal(err) - } - router.HandleFunc("/v1/projects/{project}/schemas/{schema}", WrapMockAuthConfig(SchemaDelete, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - - suite.Equal(t.expectedStatusCode, w.Code, t.msg) - suite.Equal(t.expectedResponse, w.Body.String(), t.msg) - } -} - -func (suite *HandlerTestSuite) TestSchemaValidateMessage() { - - type td struct { - expectedResponse string - postBody map[string]interface{} - schemaName string - expectedStatusCode int - msg string - } - - testData := []td{ - { - expectedResponse: `{ - "message": "Message validated successfully" -}`, - postBody: map[string]interface{}{ - "name": "name1", - "email": "email1", - }, - schemaName: "schema-1", - expectedStatusCode: 200, - msg: "Case where the message is successfully validated(JSON)", - }, - { - expectedResponse: `{ - "message": "Message validated successfully" -}`, - postBody: map[string]interface{}{ - "data": "DGFnZWxvc8T8Cg==", - }, - schemaName: "schema-3", - expectedStatusCode: 200, - msg: "Case where the message is successfully validated(AVRO)", - }, - { - postBody: map[string]interface{}{ - "name": "name1", - }, - schemaName: "schema-1", - expectedStatusCode: 400, - msg: "Case where the message is not valid(omit required email field)(JSON)", - expectedResponse: `{ - "error": { - "code": 400, - "message": "Message 0 data is not valid,(root): email is required", - "status": "INVALID_ARGUMENT" - } -}`, - }, - { - postBody: map[string]interface{}{ - "data": "T2JqAQQWYXZyby5zY2hlbWGYAnsidHlwZSI6InJlY29yZCIsIm5hbWUiOiJQbGFjZSIsIm5hbWVzcGFjZSI6InBsYWNlLmF2cm8iLCJmaWVsZHMiOlt7Im5hbWUiOiJwbGFjZW5hbWUiLCJ0eXBlIjoic3RyaW5nIn0seyJuYW1lIjoiYWRkcmVzcyIsInR5cGUiOiJzdHJpbmcifV19FGF2cm8uY29kZWMIbnVsbABM1P4b0GpYaCg9tqxa+YDZAiQSc3RyZWV0IDIyDnBsYWNlIGFM1P4b0GpYaCg9tqxa+YDZ", - }, - schemaName: "schema-3", - expectedStatusCode: 400, - msg: "Case where the message is not valid(AVRO)", - expectedResponse: `{ - "error": { - "code": 400, - "message": "Message 0 is not valid.cannot decode binary record \"user.avro.User\" field \"username\": cannot decode binary string: cannot decode binary bytes: negative size: -40", - "status": "INVALID_ARGUMENT" - } -}`, - }, - { - postBody: map[string]interface{}{ - "data": "DGFnZWxvc8T8Cg", - }, - schemaName: "schema-3", - expectedStatusCode: 400, - msg: "Case where the message is not in valid base64(AVRO)", - expectedResponse: `{ - "error": { - "code": 400, - "message": "Message 0 is not in valid base64 enocding,illegal base64 data at input byte 12", - "status": "INVALID_ARGUMENT" - } -}`, - }, - { - postBody: map[string]interface{}{ - "unknown": "unknown", - }, - schemaName: "schema-3", - expectedStatusCode: 400, - msg: "Case where the request arguments are missing the required data field(AVRO)", - expectedResponse: `{ - "error": { - "code": 400, - "message": "Invalid Schema Payload Arguments", - "status": "INVALID_ARGUMENT" - } -}`, - }, - { - schemaName: "unknown", - expectedStatusCode: 404, - expectedResponse: `{ - "error": { - "code": 404, - "message": "Schema doesn't exist", - "status": "NOT_FOUND" - } -}`, - msg: "Case where the schema doesn't exist", - }, - } - - cfgKafka := config.NewAPICfg() - cfgKafka.LoadStrJSON(suite.cfgStr) - cfgKafka.PushEnabled = true - cfgKafka.PushWorkerToken = "push_token" - brk := brokers.MockBroker{} - str := stores.NewMockStore("whatever", "argo_mgs") - router := mux.NewRouter().StrictSlash(true) - mgr := oldPush.Manager{} - pc := new(push.MockClient) - - for _, t := range testData { - - w := httptest.NewRecorder() - - url := fmt.Sprintf("http://localhost:8080/v1/projects/ARGO/schemas/%v:validate", t.schemaName) - - body, _ := json.MarshalIndent(t.postBody, "", "") - - req, err := http.NewRequest("POST", url, bytes.NewReader(body)) - if err != nil { - log.Fatal(err) - } - router.HandleFunc("/v1/projects/{project}/schemas/{schema}:validate", WrapMockAuthConfig(SchemaValidateMessage, cfgKafka, &brk, str, &mgr, pc)) - router.ServeHTTP(w, req) - - suite.Equal(t.expectedStatusCode, w.Code, t.msg) - suite.Equal(t.expectedResponse, w.Body.String(), t.msg) - } -} - -func TestHandlersTestSuite(t *testing.T) { - suite.Run(t, new(HandlerTestSuite)) -} diff --git a/metrics/metrics_test.go b/metrics/metrics_test.go index f54ea7a7..ea367348 100644 --- a/metrics/metrics_test.go +++ b/metrics/metrics_test.go @@ -5,6 +5,7 @@ import ( "strconv" "strings" "testing" + "time" "github.com/ARGOeu/argo-messaging/config" "github.com/ARGOeu/argo-messaging/stores" @@ -348,6 +349,71 @@ func (suite *MetricsTestSuite) TestAggrProjectUserTopicsTest() { } +func (suite *MetricsTestSuite) TestGetProjectsMessageCount() { + + store := stores.NewMockStore("", "") + store.Initialize() + + // test total message count per project + expectedTmpc := TotalProjectsMessageCount{ + Projects: []ProjectMessageCount{ + { + Project: "ARGO", + MessageCount: 60, + AverageDailyMessages: 15, + }, + }, + TotalCount: 60, + AverageDailyMessages: 15, + } + + tmpc, tmpcerr := GetProjectsMessageCount( + []string{"ARGO"}, + time.Date(2018, 10, 1, 0, 0, 0, 0, time.UTC), + time.Date(2018, 10, 4, 0, 0, 0, 0, time.UTC), + store, + ) + + suite.Equal(expectedTmpc, tmpc) + suite.Nil(tmpcerr) +} + +func (suite *MetricsTestSuite) TestGetVAReport() { + + store := stores.NewMockStore("", "") + store.Initialize() + + // test total message count per project + expectedTmpc := TotalProjectsMessageCount{ + Projects: []ProjectMessageCount{ + { + Project: "ARGO", + MessageCount: 280, + AverageDailyMessages: 0, + }, + }, + TotalCount: 280, + AverageDailyMessages: 0, + } + + va, tmpcerr := GetVAReport( + []string{"ARGO"}, + time.Date(2007, 10, 1, 0, 0, 0, 0, time.UTC), + time.Date(2020, 20, 4, 0, 0, 0, 0, time.UTC), + store, + ) + + expectedVA := VAReport{ + ProjectsMetrics: expectedTmpc, + UsersCount: 18, + TopicsCount: 8, + SubscriptionsCount: 8, + } + + suite.Equal(expectedVA, va) + suite.Nil(tmpcerr) +} + func TestMetricsTestSuite(t *testing.T) { suite.Run(t, new(MetricsTestSuite)) } diff --git a/metrics/models.go b/metrics/models.go index 3ae032b5..f9626745 100644 --- a/metrics/models.go +++ b/metrics/models.go @@ -53,6 +53,25 @@ type Metric struct { Description string `json:"description"` } +type ProjectMessageCount struct { + Project string `json:"project"` + MessageCount int64 `json:"message_count"` + AverageDailyMessages float64 `json:"average_daily_messages"` +} + +type TotalProjectsMessageCount struct { + Projects []ProjectMessageCount `json:"projects"` + TotalCount int64 `json:"total_message_count"` + AverageDailyMessages float64 `json:"average_daily_messages"` +} + +type VAReport struct { + ProjectsMetrics TotalProjectsMessageCount `json:"projects_metrics"` + UsersCount int `json:"users_count"` + TopicsCount int `json:"topics_count"` + SubscriptionsCount int `json:"subscriptions_count"` +} + type Timepoint struct { Timestamp string `json:"timestamp"` Value interface{} `json:"value"` diff --git a/metrics/queries.go b/metrics/queries.go index 355ba4ab..a8714175 100644 --- a/metrics/queries.go +++ b/metrics/queries.go @@ -1,7 +1,10 @@ package metrics import ( + "fmt" + amsProjects "github.com/ARGOeu/argo-messaging/projects" "github.com/ARGOeu/argo-messaging/stores" + "math" "time" ) @@ -106,3 +109,100 @@ func AggrProjectUserTopics(projectUUID string, store stores.Store) (MetricList, } return ml, err } + +// GetVAReport returns a VAReport populated with the needed metrics +func GetVAReport(projects []string, startDate time.Time, endDate time.Time, str stores.Store) (VAReport, error) { + + vaReport := VAReport{} + + tpm, err := GetProjectsMessageCount(projects, startDate, endDate, str) + if err != nil { + return vaReport, err + } + + // for the counters we need to include the ones created up to the end of the end date + // if some gives 2020-15-01 we need to get all counters up to 2020-15-01T23:59:59 + endDate = time.Date(endDate.Year(), endDate.Month(), endDate.Day(), 23, 59, 59, 0, endDate.Location()) + uc, err := str.UsersCount(startDate, endDate) + if err != nil { + return vaReport, err + } + + tc, err := str.TopicsCount(startDate, endDate) + if err != nil { + return vaReport, err + } + + sc, err := str.SubscriptionsCount(startDate, endDate) + if err != nil { + return vaReport, err + } + + vaReport.ProjectsMetrics = tpm + vaReport.UsersCount = uc + vaReport.TopicsCount = tc + vaReport.SubscriptionsCount = sc + + return vaReport, nil +} + +// GetProjectsMessageCount returns the total amount of messages per project for the given time window +func GetProjectsMessageCount(projects []string, startDate time.Time, endDate time.Time, str stores.Store) (TotalProjectsMessageCount, error) { + + tpj := TotalProjectsMessageCount{ + Projects: []ProjectMessageCount{}, + TotalCount: 0, + } + + var qtpj []stores.QProjectMessageCount + var err error + + // since we want to present the end result using project names and not uuids + // we need to hold the mapping of UUID to NAME + projectsUUIDNames := make(map[string]string) + + // check that all project UUIDs are correct + // translate the project NAMES to their respective UUIDs + projectUUIDs := make([]string, 0) + for _, prj := range projects { + projectUUID := amsProjects.GetUUIDByName(prj, str) + if projectUUID == "" { + return TotalProjectsMessageCount{}, fmt.Errorf("Project %v", prj) + } + projectUUIDs = append(projectUUIDs, projectUUID) + projectsUUIDNames[projectUUID] = prj + } + + qtpj, err = str.QueryTotalMessagesPerProject(projectUUIDs, startDate, endDate) + if err != nil { + return TotalProjectsMessageCount{}, err + } + + for _, prj := range qtpj { + + projectName := "" + + // if no project names were provided we have to do the mapping between name and uuid + if len(projects) == 0 { + projectName = amsProjects.GetNameByUUID(prj.ProjectUUID, str) + } else { + projectName = projectsUUIDNames[prj.ProjectUUID] + } + + avg := math.Ceil(prj.AverageDailyMessages*100) / 100 + + pc := ProjectMessageCount{ + Project: projectName, + MessageCount: prj.NumberOfMessages, + AverageDailyMessages: avg, + } + + tpj.Projects = append(tpj.Projects, pc) + + tpj.TotalCount += prj.NumberOfMessages + + tpj.AverageDailyMessages += avg + } + + return tpj, nil +} diff --git a/projects/project.go b/projects/project.go index b6d173a6..78fb278a 100644 --- a/projects/project.go +++ b/projects/project.go @@ -6,9 +6,7 @@ import ( "time" - "fmt" "github.com/ARGOeu/argo-messaging/stores" - "math" ) // ProjectUUID is the struct that holds ProjectUUID information @@ -26,18 +24,6 @@ type Projects struct { List []Project `json:"projects,omitempty"` } -type ProjectMessageCount struct { - Project string `json:"project"` - MessageCount int64 `json:"message_count"` - AverageDailyMessages float64 `json:"average_daily_messages"` -} - -type TotalProjectsMessageCount struct { - Projects []ProjectMessageCount `json:"projects"` - TotalCount int64 `json:"total_message_count"` - AverageDailyMessages float64 `json:"average_daily_messages"` -} - // ExportJSON exports ProjectUUID to json format func (p *Project) ExportJSON() (string, error) { output, err := json.MarshalIndent(p, "", " ") @@ -102,67 +88,6 @@ func Find(uuid string, name string, store stores.Store) (Projects, error) { return result, err } -// GetProjectsMessageCount returns the total amount of messages per project for the given time window -func GetProjectsMessageCount(projects []string, startDate time.Time, endDate time.Time, str stores.Store) (TotalProjectsMessageCount, error) { - - tpj := TotalProjectsMessageCount{ - Projects: []ProjectMessageCount{}, - TotalCount: 0, - } - - var qtpj []stores.QProjectMessageCount - var err error - - // since we want to present the end result using project names and not uuids - // we need to hold the mapping of UUID to NAME - projectsUUIDNames := make(map[string]string) - - // check that all project UUIDs are correct - // translate the project NAMES to their respective UUIDs - projectUUIDs := make([]string, 0) - for _, prj := range projects { - projectUUID := GetUUIDByName(prj, str) - if projectUUID == "" { - return TotalProjectsMessageCount{}, fmt.Errorf("Project %v", prj) - } - projectUUIDs = append(projectUUIDs, projectUUID) - projectsUUIDNames[projectUUID] = prj - } - - qtpj, err = str.QueryTotalMessagesPerProject(projectUUIDs, startDate, endDate) - if err != nil { - return TotalProjectsMessageCount{}, err - } - - for _, prj := range qtpj { - - projectName := "" - - // if no project names were provided we have to do the mapping between name and uuid - if len(projects) == 0 { - projectName = GetNameByUUID(prj.ProjectUUID, str) - } else { - projectName = projectsUUIDNames[prj.ProjectUUID] - } - - avg := math.Ceil(prj.AverageDailyMessages*100) / 100 - - pc := ProjectMessageCount{ - Project: projectName, - MessageCount: prj.NumberOfMessages, - AverageDailyMessages: avg, - } - - tpj.Projects = append(tpj.Projects, pc) - - tpj.TotalCount += prj.NumberOfMessages - - tpj.AverageDailyMessages += avg - } - - return tpj, nil -} - // GetNameByUUID queries projects by UUID and returns the project name. If not found, returns an empty string func GetNameByUUID(uuid string, store stores.Store) string { result := "" diff --git a/projects/project_test.go b/projects/project_test.go index dfbeaa0b..1f150deb 100644 --- a/projects/project_test.go +++ b/projects/project_test.go @@ -181,31 +181,6 @@ func (suite *ProjectsTestSuite) TestProjects() { suite.Equal(0, len(resSub)) } -func (suite *ProjectsTestSuite) TestGetProjectsMessageCount() { - - store := stores.NewMockStore("", "") - store.Initialize() - - // test total message count per project - expectedTmpc := TotalProjectsMessageCount{ - Projects: []ProjectMessageCount{ - {Project: "ARGO", MessageCount: 60, AverageDailyMessages: 20}, - }, - TotalCount: 60, - AverageDailyMessages: 20, - } - - tmpc, tmpcerr := GetProjectsMessageCount( - []string{"ARGO"}, - time.Date(2018, 10, 1, 0, 0, 0, 0, time.UTC), - time.Date(2018, 10, 4, 0, 0, 0, 0, time.UTC), - store, - ) - - suite.Equal(expectedTmpc, tmpc) - suite.Nil(tmpcerr) -} - func TestProjectsTestSuite(t *testing.T) { suite.Run(t, new(ProjectsTestSuite)) } diff --git a/push/grpc/client/client.go b/push/grpc/client/client.go index 30ea0f18..64bd8ac5 100644 --- a/push/grpc/client/client.go +++ b/push/grpc/client/client.go @@ -30,7 +30,7 @@ type GrpcClientStatus struct { } // Result prints the result of an grpc request -func (st *GrpcClientStatus) Result() string { +func (st *GrpcClientStatus) Result(details bool) string { grpcStatus := status.Convert(st.err) @@ -45,7 +45,11 @@ func (st *GrpcClientStatus) Result() string { "backend_service": "ams-push-server", }, ).Error(grpcStatus.Message()) - return "Push server is currently unavailable" + if details { + return grpcStatus.Message() + } else { + return "Push server is currently unavailable" + } } return fmt.Sprintf("Error: %v", grpcStatus.Message()) @@ -115,15 +119,16 @@ func (c *GrpcClient) SubscriptionStatus(ctx context.Context, fullSub string) Cli } // ActivateSubscription is a wrapper over the grpc ActivateSubscription call -func (c *GrpcClient) ActivateSubscription(ctx context.Context, fullSub, fullTopic, pushEndpoint, retryType string, retryPeriod uint32, maxMessages int64) ClientStatus { +func (c *GrpcClient) ActivateSubscription(ctx context.Context, fullSub, fullTopic, pushEndpoint, retryType string, retryPeriod uint32, maxMessages int64, authzHeader string) ClientStatus { actSubR := &amsPb.ActivateSubscriptionRequest{ Subscription: &amsPb.Subscription{ FullName: fullSub, FullTopic: fullTopic, PushConfig: &amsPb.PushConfig{ - PushEndpoint: pushEndpoint, - MaxMessages: maxMessages, + PushEndpoint: pushEndpoint, + MaxMessages: maxMessages, + AuthorizationHeader: authzHeader, RetryPolicy: &amsPb.RetryPolicy{ Type: retryType, Period: retryPeriod, diff --git a/push/grpc/client/client_test.go b/push/grpc/client/client_test.go index e7dcae50..1bb6b36d 100644 --- a/push/grpc/client/client_test.go +++ b/push/grpc/client/client_test.go @@ -18,7 +18,7 @@ func (suite *ClientTestSuite) TestResult() { err: nil, message: "ok message", } - suite.Equal("ok message", grpcStatus.Result()) + suite.Equal("ok message", grpcStatus.Result(false)) // error status grpcStatus2 := GrpcClientStatus{ @@ -26,7 +26,23 @@ func (suite *ClientTestSuite) TestResult() { message: "", } - suite.Equal("Error: invalid argument", grpcStatus2.Result()) + suite.Equal("Error: invalid argument", grpcStatus2.Result(false)) + + // unavailable error status + grpcStatus3 := GrpcClientStatus{ + err: status.Error(codes.Unavailable, "connection refused"), + message: "", + } + + suite.Equal("Push server is currently unavailable", grpcStatus3.Result(false)) + + // unavailable detailed status + grpcStatus4 := GrpcClientStatus{ + err: status.Error(codes.Unavailable, "connection refused"), + message: "", + } + + suite.Equal("connection refused", grpcStatus4.Result(true)) } func TestClientTestSuite(t *testing.T) { diff --git a/push/grpc/client/mock.go b/push/grpc/client/mock.go index 3fee2452..8543b7c3 100644 --- a/push/grpc/client/mock.go +++ b/push/grpc/client/mock.go @@ -26,7 +26,7 @@ func (*MockClient) Target() string { func (*MockClient) Dial() error { return nil } -func (*MockClient) ActivateSubscription(ctx context.Context, fullSub, fullTopic, pushEndpoint, retryType string, retryPeriod uint32, maxMessages int64) ClientStatus { +func (*MockClient) ActivateSubscription(ctx context.Context, fullSub, fullTopic, pushEndpoint, retryType string, retryPeriod uint32, maxMessages int64, authzHeader string) ClientStatus { switch fullSub { case "/projects/ARGO/subscriptions/subNew": @@ -77,6 +77,6 @@ type MockClientStatus struct { Status string } -func (m *MockClientStatus) Result() string { +func (m *MockClientStatus) Result(details bool) string { return m.Status } diff --git a/push/grpc/client/pushclient.go b/push/grpc/client/pushclient.go index 55fe26c7..60d013af 100644 --- a/push/grpc/client/pushclient.go +++ b/push/grpc/client/pushclient.go @@ -10,7 +10,7 @@ type Client interface { Dial() error // ActivateSubscription provides the push backend // with all the necessary information to start the push functionality for the respective subscription - ActivateSubscription(ctx context.Context, fullSub, fullTopic, pushEndpoint, retryType string, retryPeriod uint32, maxMessages int64) ClientStatus + ActivateSubscription(ctx context.Context, fullSub, fullTopic, pushEndpoint, retryType string, retryPeriod uint32, maxMessages int64, authzHeader string) ClientStatus // DeactivateSubscription asks the push backend to stop the push functionality for the respective subscription DeactivateSubscription(ctx context.Context, fullSub string) ClientStatus // SubscriptionStatus returns the current push status oif the given subscription @@ -26,5 +26,5 @@ type Client interface { // ClientStatus represents responses from a push backend type ClientStatus interface { // Result returns the string representation for the response from a push backend - Result() string + Result(details bool) string } diff --git a/push/grpc/proto/ams.pb.go b/push/grpc/proto/ams.pb.go index b682344e..b155c31a 100644 --- a/push/grpc/proto/ams.pb.go +++ b/push/grpc/proto/ams.pb.go @@ -398,10 +398,12 @@ type PushConfig struct { // Defaults to 1. How many messages should the push server consume and sent at once. MaxMessages int64 `protobuf:"varint,3,opt,name=max_messages,json=maxMessages,proto3" json:"max_messages,omitempty"` // Required. Retry policy. - RetryPolicy *RetryPolicy `protobuf:"bytes,2,opt,name=retry_policy,json=retryPolicy,proto3" json:"retry_policy,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + RetryPolicy *RetryPolicy `protobuf:"bytes,2,opt,name=retry_policy,json=retryPolicy,proto3" json:"retry_policy,omitempty"` + // Required. Authorization header that the sent messages should include into the request + AuthorizationHeader string `protobuf:"bytes,4,opt,name=authorization_header,json=authorizationHeader,proto3" json:"authorization_header,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *PushConfig) Reset() { *m = PushConfig{} } @@ -450,6 +452,13 @@ func (m *PushConfig) GetRetryPolicy() *RetryPolicy { return nil } +func (m *PushConfig) GetAuthorizationHeader() string { + if m != nil { + return m.AuthorizationHeader + } + return "" +} + // RetryPolicy holds information regarding the retry policy. type RetryPolicy struct { // Required. Type of the retry policy used (Only linear policy supported). @@ -517,35 +526,37 @@ func init() { func init() { proto.RegisterFile("ams.proto", fileDescriptor_85e4db6795b5b1aa) } var fileDescriptor_85e4db6795b5b1aa = []byte{ - // 444 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x54, 0xc1, 0x6e, 0xd4, 0x30, - 0x10, 0x4d, 0xda, 0x6a, 0x61, 0xc7, 0x49, 0x8b, 0x46, 0xa8, 0x0a, 0xd9, 0x6e, 0x29, 0xe6, 0x52, - 0x09, 0x64, 0xc4, 0xc2, 0xa1, 0x20, 0x2e, 0x15, 0x70, 0x04, 0x56, 0x59, 0x38, 0x71, 0x88, 0xdc, - 0xd4, 0x6d, 0x2d, 0x6d, 0x62, 0x13, 0x3b, 0xd5, 0xee, 0x07, 0xf0, 0x5d, 0xfc, 0x1a, 0x8a, 0xc9, - 0xb6, 0x09, 0xec, 0x46, 0xa8, 0x37, 0xcf, 0x9b, 0x79, 0x7e, 0x33, 0xf1, 0xbc, 0xc0, 0x90, 0xe7, - 0x86, 0xe9, 0x52, 0x59, 0x45, 0x4f, 0xe0, 0xd1, 0xac, 0x3a, 0x33, 0x59, 0x29, 0xb5, 0x95, 0xaa, - 0x98, 0x59, 0x6e, 0x2b, 0x93, 0x88, 0x1f, 0x95, 0x30, 0x16, 0x47, 0x30, 0xbc, 0xa8, 0xe6, 0xf3, - 0xb4, 0xe0, 0xb9, 0x88, 0xfc, 0x23, 0xff, 0x78, 0x98, 0xdc, 0xaf, 0x81, 0xcf, 0x3c, 0x17, 0xf4, - 0x35, 0xc4, 0xeb, 0x98, 0x46, 0xab, 0xc2, 0x08, 0xdc, 0x87, 0x81, 0x71, 0x48, 0xc3, 0x6b, 0x22, - 0xba, 0x07, 0x61, 0x47, 0x83, 0x3e, 0x80, 0xdd, 0x2e, 0x95, 0xbe, 0x85, 0xc3, 0x0f, 0x82, 0x67, - 0x56, 0x5e, 0x73, 0x2b, 0xda, 0x12, 0x37, 0x97, 0x47, 0x70, 0x2f, 0x17, 0xc6, 0xf0, 0xcb, 0x55, - 0x57, 0xab, 0x90, 0xbe, 0x83, 0xf1, 0x26, 0xee, 0x7f, 0x8c, 0x74, 0x02, 0x07, 0xa7, 0x77, 0xd3, - 0x9d, 0xc2, 0xe8, 0xb4, 0x47, 0xf5, 0x25, 0x04, 0xa6, 0x05, 0x3b, 0x36, 0x99, 0x84, 0xac, 0x53, - 0xdb, 0x29, 0xa1, 0x0b, 0x08, 0xda, 0xd9, 0xde, 0xc6, 0x71, 0x0c, 0xe0, 0x92, 0x56, 0x69, 0x99, - 0x45, 0x5b, 0x2e, 0xeb, 0xca, 0xbf, 0xd6, 0x00, 0x3e, 0x07, 0xa2, 0x2b, 0x73, 0x95, 0x66, 0xaa, - 0xb8, 0x90, 0x97, 0xd1, 0x8e, 0x53, 0x27, 0x6c, 0x5a, 0x99, 0xab, 0xf7, 0x0e, 0x4a, 0x40, 0xdf, - 0x9c, 0xe9, 0x4f, 0x1f, 0xe0, 0x36, 0x85, 0x4f, 0x21, 0x74, 0x64, 0x51, 0x9c, 0x6b, 0x25, 0x0b, - 0xdb, 0x88, 0x07, 0x35, 0xf8, 0xb1, 0xc1, 0xf0, 0x09, 0x04, 0x39, 0x5f, 0xa4, 0xcd, 0xe7, 0x30, - 0xd1, 0xf6, 0x91, 0x7f, 0xbc, 0x9d, 0x90, 0x9c, 0x2f, 0x3e, 0x35, 0x10, 0xbe, 0x80, 0xa0, 0x14, - 0xb6, 0x5c, 0xa6, 0x5a, 0xcd, 0x65, 0xb6, 0x74, 0x5d, 0x92, 0x49, 0xc0, 0x92, 0x1a, 0x9c, 0x3a, - 0x2c, 0x21, 0xe5, 0x6d, 0x40, 0xdf, 0x00, 0x69, 0xe5, 0x10, 0x61, 0xc7, 0x2e, 0xf5, 0x6a, 0x76, - 0x77, 0xae, 0xb7, 0x4c, 0x8b, 0x52, 0xaa, 0x73, 0x77, 0x5b, 0x98, 0x34, 0xd1, 0xe4, 0xd7, 0x16, - 0x90, 0x7a, 0x84, 0x99, 0x28, 0xaf, 0x65, 0x26, 0xf0, 0x1b, 0x3c, 0x5c, 0xf7, 0x3c, 0x78, 0xc0, - 0x7a, 0x5e, 0x2d, 0x1e, 0xb3, 0xbe, 0x6d, 0xa0, 0x1e, 0x7e, 0x87, 0xfd, 0xf5, 0xdb, 0x86, 0x87, - 0xac, 0x77, 0x0d, 0xe3, 0xc7, 0xac, 0x7f, 0xc5, 0xa9, 0x87, 0xcf, 0x60, 0xf0, 0xc7, 0x18, 0xb8, - 0xcb, 0x3a, 0x96, 0x89, 0xf7, 0xd8, 0x5f, 0x8e, 0xf1, 0xf0, 0x0b, 0xe0, 0xbf, 0x66, 0xc4, 0x98, - 0x6d, 0xf4, 0x76, 0x3c, 0x62, 0x9b, 0xdd, 0x4b, 0xbd, 0xb3, 0x81, 0xfb, 0x3d, 0xbc, 0xfa, 0x1d, - 0x00, 0x00, 0xff, 0xff, 0x09, 0x3c, 0xc8, 0x85, 0x2b, 0x04, 0x00, 0x00, + // 472 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x54, 0x41, 0x6f, 0xd3, 0x30, + 0x14, 0x6e, 0xb7, 0xa9, 0xd0, 0x97, 0x74, 0x43, 0x8f, 0x69, 0x0a, 0xe9, 0x3a, 0x86, 0xb9, 0x4c, + 0x02, 0x19, 0xad, 0x70, 0x18, 0x88, 0xcb, 0x04, 0x48, 0x5c, 0x80, 0xca, 0x85, 0x13, 0x87, 0xc8, + 0x4b, 0xbd, 0xd5, 0x52, 0x13, 0x1b, 0xdb, 0x99, 0x5a, 0x7e, 0x19, 0x37, 0xfe, 0x1a, 0x8a, 0x49, + 0xb7, 0x06, 0xda, 0x08, 0x71, 0xf3, 0xfb, 0x9e, 0x3f, 0x7f, 0xcf, 0xcf, 0xef, 0x33, 0x74, 0x79, + 0x66, 0xa9, 0x36, 0xca, 0x29, 0x72, 0x06, 0x0f, 0xc6, 0xc5, 0x85, 0x4d, 0x8d, 0xd4, 0x4e, 0xaa, + 0x7c, 0xec, 0xb8, 0x2b, 0x2c, 0x13, 0xdf, 0x0a, 0x61, 0x1d, 0xf6, 0xa1, 0x7b, 0x59, 0xcc, 0x66, + 0x49, 0xce, 0x33, 0x11, 0xb5, 0x8f, 0xdb, 0x27, 0x5d, 0x76, 0xb7, 0x04, 0x3e, 0xf2, 0x4c, 0x90, + 0x17, 0x10, 0xaf, 0x63, 0x5a, 0xad, 0x72, 0x2b, 0xf0, 0x00, 0x3a, 0xd6, 0x23, 0x15, 0xaf, 0x8a, + 0xc8, 0x1e, 0xf4, 0x6a, 0x1a, 0xe4, 0x1e, 0xec, 0xd6, 0xa9, 0xe4, 0x15, 0x1c, 0xbd, 0x15, 0x3c, + 0x75, 0xf2, 0x9a, 0x3b, 0xb1, 0x2a, 0x71, 0x73, 0x78, 0x04, 0x77, 0x32, 0x61, 0x2d, 0xbf, 0x5a, + 0x56, 0xb5, 0x0c, 0xc9, 0x6b, 0x18, 0x6c, 0xe2, 0xfe, 0xc3, 0x95, 0xce, 0xe0, 0xf0, 0xfc, 0xff, + 0x74, 0x47, 0xd0, 0x3f, 0x6f, 0x50, 0x3d, 0x85, 0xd0, 0xae, 0xc0, 0x9e, 0x1d, 0x0c, 0x7b, 0xb4, + 0xb6, 0xb7, 0xb6, 0x85, 0xcc, 0x21, 0x5c, 0xcd, 0x36, 0x16, 0x8e, 0x03, 0x00, 0x9f, 0x74, 0x4a, + 0xcb, 0x34, 0xda, 0xf2, 0x59, 0xbf, 0xfd, 0x73, 0x09, 0xe0, 0x53, 0x08, 0x74, 0x61, 0xa7, 0x49, + 0xaa, 0xf2, 0x4b, 0x79, 0x15, 0xed, 0x78, 0xf5, 0x80, 0x8e, 0x0a, 0x3b, 0x7d, 0xe3, 0x21, 0x06, + 0xfa, 0x66, 0x4d, 0x7e, 0xb4, 0x01, 0x6e, 0x53, 0xf8, 0x18, 0x7a, 0x9e, 0x2c, 0xf2, 0x89, 0x56, + 0x32, 0x77, 0x95, 0x78, 0x58, 0x82, 0xef, 0x2a, 0x0c, 0x1f, 0x41, 0x98, 0xf1, 0x79, 0x52, 0xb5, + 0xc3, 0x46, 0xdb, 0xc7, 0xed, 0x93, 0x6d, 0x16, 0x64, 0x7c, 0xfe, 0xa1, 0x82, 0xf0, 0x19, 0x84, + 0x46, 0x38, 0xb3, 0x48, 0xb4, 0x9a, 0xc9, 0x74, 0xe1, 0xab, 0x0c, 0x86, 0x21, 0x65, 0x25, 0x38, + 0xf2, 0x18, 0x0b, 0xcc, 0x6d, 0x80, 0xa7, 0xb0, 0xcf, 0x0b, 0x37, 0x55, 0x46, 0x7e, 0xe7, 0x65, + 0x0b, 0x92, 0xa9, 0xe0, 0x13, 0x61, 0x7c, 0xf9, 0x5d, 0x76, 0xbf, 0x96, 0x7b, 0xef, 0x53, 0xe4, + 0x25, 0x04, 0x2b, 0xc7, 0x21, 0xc2, 0x8e, 0x5b, 0xe8, 0x65, 0xbb, 0xfc, 0xba, 0x1c, 0x4c, 0x2d, + 0x8c, 0x54, 0x13, 0x5f, 0x40, 0x8f, 0x55, 0xd1, 0xf0, 0xe7, 0x16, 0x04, 0xe5, 0xad, 0xc7, 0xc2, + 0x5c, 0xcb, 0x54, 0xe0, 0x17, 0xd8, 0x5f, 0xf7, 0xa2, 0x78, 0x48, 0x1b, 0x1e, 0x3a, 0x1e, 0xd0, + 0xa6, 0x01, 0x22, 0x2d, 0xfc, 0x0a, 0x07, 0xeb, 0x07, 0x14, 0x8f, 0x68, 0xe3, 0xe4, 0xc6, 0x0f, + 0x69, 0xb3, 0x2b, 0x48, 0x0b, 0x9f, 0x40, 0xe7, 0xb7, 0x97, 0x70, 0x97, 0xd6, 0x5c, 0x16, 0xef, + 0xd1, 0x3f, 0x4c, 0xd6, 0xc2, 0x4f, 0x80, 0x7f, 0xfb, 0x17, 0x63, 0xba, 0xf1, 0x3b, 0x88, 0xfb, + 0x74, 0xb3, 0xe1, 0x49, 0xeb, 0xa2, 0xe3, 0x7f, 0x94, 0xe7, 0xbf, 0x02, 0x00, 0x00, 0xff, 0xff, + 0x9c, 0x92, 0xc0, 0xda, 0x5e, 0x04, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. diff --git a/push/grpc/proto/ams.proto b/push/grpc/proto/ams.proto index 16178e8a..7849a7d9 100644 --- a/push/grpc/proto/ams.proto +++ b/push/grpc/proto/ams.proto @@ -75,6 +75,8 @@ message PushConfig { int64 max_messages = 3; // Required. Retry policy. RetryPolicy retry_policy = 2; + // Required. Authorization header that the sent messages should include into the request + string authorization_header = 4; } // RetryPolicy holds information regarding the retry policy. diff --git a/routing.go b/routing.go index aa7338e6..7b66b024 100644 --- a/routing.go +++ b/routing.go @@ -7,6 +7,7 @@ import ( "github.com/ARGOeu/argo-messaging/brokers" "github.com/ARGOeu/argo-messaging/config" + "github.com/ARGOeu/argo-messaging/handlers" oldPush "github.com/ARGOeu/argo-messaging/push" push "github.com/ARGOeu/argo-messaging/push/grpc/client" "github.com/ARGOeu/argo-messaging/stores" @@ -37,6 +38,8 @@ func NewRouting(cfg *config.APICfg, brk brokers.Broker, str stores.Store, mgr *o // reference routes input in API object too keep info centralized ar.Routes = routes + tokenExtractStrategy := handlers.GetRequestTokenExtractStrategy(cfg.AuthOption()) + // For each route for _, route := range ar.Routes { @@ -44,16 +47,16 @@ func NewRouting(cfg *config.APICfg, brk brokers.Broker, str stores.Store, mgr *o var handler http.HandlerFunc handler = route.Handler - handler = WrapLog(handler, route.Name) + handler = handlers.WrapLog(handler, route.Name) // skip authentication/authorization for the health status and profile api calls if route.Name != "ams:healthStatus" && "users:profile" != route.Name && route.Name != "version:list" { - handler = WrapAuthorize(handler, route.Name) - handler = WrapAuthenticate(handler) + handler = handlers.WrapAuthorize(handler, route.Name, tokenExtractStrategy) + handler = handlers.WrapAuthenticate(handler, tokenExtractStrategy) } - handler = WrapValidate(handler) - handler = WrapConfig(handler, cfg, brk, str, mgr, c) + handler = handlers.WrapValidate(handler) + handler = handlers.WrapConfig(handler, cfg, brk, str, mgr, c) ar.Router. PathPrefix("/v1"). @@ -71,64 +74,64 @@ func NewRouting(cfg *config.APICfg, brk brokers.Broker, str stores.Store, mgr *o // Global list populated with default routes var defaultRoutes = []APIRoute{ - {"ams:metrics", "GET", "/metrics", OpMetrics}, - {"ams:healthStatus", "GET", "/status", HealthCheck}, - {"ams:dailyMessageAverage", "GET", "/metrics/daily-message-average", DailyMessageAverage}, - {"users:byToken", "GET", "/users:byToken/{token}", UserListByToken}, - {"users:byUUID", "GET", "/users:byUUID/{uuid}", UserListByUUID}, - {"users:list", "GET", "/users", UserListAll}, - {"users:profile", "GET", "/users/profile", UserProfile}, - {"users:show", "GET", "/users/{user}", UserListOne}, - {"users:refreshToken", "POST", "/users/{user}:refreshToken", RefreshToken}, - {"users:create", "POST", "/users/{user}", UserCreate}, - {"users:update", "PUT", "/users/{user}", UserUpdate}, - {"users:delete", "DELETE", "/users/{user}", UserDelete}, - {"registrations:newUser", "POST", "/registrations", RegisterUser}, - {"registrations:acceptNewUser", "POST", "/registrations/{uuid}:accept", AcceptRegisterUser}, - {"registrations:declineNewUser", "POST", "/registrations/{uuid}:decline", DeclineRegisterUser}, - {"registrations:show", "GET", "/registrations/{uuid}", ListOneRegistration}, - {"registrations:list", "GET", "/registrations", ListAllRegistrations}, - {"projects:list", "GET", "/projects", ProjectListAll}, - {"projects:metrics", "GET", "/projects/{project}:metrics", ProjectMetrics}, - {"projects:addUser", "POST", "/projects/{project}/members/{user}:add", ProjectUserAdd}, - {"projects:removeUser", "POST", "/projects/{project}/members/{user}:remove", ProjectUserRemove}, - {"projects:showUser", "GET", "/projects/{project}/members/{user}", ProjectUserListOne}, - {"projects:createUser", "POST", "/projects/{project}/members/{user}", ProjectUserCreate}, - {"projects:updateUser", "PUT", "/projects/{project}/members/{user}", ProjectUserUpdate}, - {"projects:listUsers", "GET", "/projects/{project}/members", ProjectListUsers}, - {"projects:show", "GET", "/projects/{project}", ProjectListOne}, - {"projects:create", "POST", "/projects/{project}", ProjectCreate}, - {"projects:update", "PUT", "/projects/{project}", ProjectUpdate}, - {"projects:delete", "DELETE", "/projects/{project}", ProjectDelete}, - {"subscriptions:list", "GET", "/projects/{project}/subscriptions", SubListAll}, - {"subscriptions:listByTopic", "GET", "/projects/{project}/topics/{topic}/subscriptions", ListSubsByTopic}, - {"subscriptions:offsets", "GET", "/projects/{project}/subscriptions/{subscription}:offsets", SubGetOffsets}, - {"subscriptions:timeToOffset", "GET", "/projects/{project}/subscriptions/{subscription}:timeToOffset", SubTimeToOffset}, - {"subscriptions:acl", "GET", "/projects/{project}/subscriptions/{subscription}:acl", SubACL}, - {"subscriptions:metrics", "GET", "/projects/{project}/subscriptions/{subscription}:metrics", SubMetrics}, - {"subscriptions:show", "GET", "/projects/{project}/subscriptions/{subscription}", SubListOne}, - {"subscriptions:create", "PUT", "/projects/{project}/subscriptions/{subscription}", SubCreate}, - {"subscriptions:delete", "DELETE", "/projects/{project}/subscriptions/{subscription}", SubDelete}, - {"subscriptions:pull", "POST", "/projects/{project}/subscriptions/{subscription}:pull", SubPull}, - {"subscriptions:acknowledge", "POST", "/projects/{project}/subscriptions/{subscription}:acknowledge", SubAck}, - {"subscriptions:verifyPushEndpoint", "POST", "/projects/{project}/subscriptions/{subscription}:verifyPushEndpoint", SubVerifyPushEndpoint}, - {"subscriptions:modifyAckDeadline", "POST", "/projects/{project}/subscriptions/{subscription}:modifyAckDeadline", SubModAck}, - {"subscriptions:modifyPushConfig", "POST", "/projects/{project}/subscriptions/{subscription}:modifyPushConfig", SubModPush}, - {"subscriptions:modifyOffset", "POST", "/projects/{project}/subscriptions/{subscription}:modifyOffset", SubSetOffset}, - {"subscriptions:modifyAcl", "POST", "/projects/{project}/subscriptions/{subscription}:modifyAcl", SubModACL}, - {"topics:list", "GET", "/projects/{project}/topics", TopicListAll}, - {"topics:acl", "GET", "/projects/{project}/topics/{topic}:acl", TopicACL}, - {"topics:metrics", "GET", "/projects/{project}/topics/{topic}:metrics", TopicMetrics}, - {"topics:show", "GET", "/projects/{project}/topics/{topic}", TopicListOne}, - {"topics:create", "PUT", "/projects/{project}/topics/{topic}", TopicCreate}, - {"topics:delete", "DELETE", "/projects/{project}/topics/{topic}", TopicDelete}, - {"topics:publish", "POST", "/projects/{project}/topics/{topic}:publish", TopicPublish}, - {"topics:modifyAcl", "POST", "/projects/{project}/topics/{topic}:modifyAcl", TopicModACL}, - {"schemas:validateMessage", "POST", "/projects/{project}/schemas/{schema}:validate", SchemaValidateMessage}, - {"schemas:create", "POST", "/projects/{project}/schemas/{schema}", SchemaCreate}, - {"schemas:show", "GET", "/projects/{project}/schemas/{schema}", SchemaListOne}, - {"schemas:list", "GET", "/projects/{project}/schemas", SchemaListAll}, - {"schemas:update", "PUT", "/projects/{project}/schemas/{schema}", SchemaUpdate}, - {"schemas:delete", "DELETE", "/projects/{project}/schemas/{schema}", SchemaDelete}, - {"version:list", "GET", "/version", ListVersion}, + {"ams:metrics", "GET", "/metrics", handlers.OpMetrics}, + {"ams:healthStatus", "GET", "/status", handlers.HealthCheck}, + {"ams:vaMetrics", "GET", "/metrics/va_metrics", handlers.VaMetrics}, + {"users:byToken", "GET", "/users:byToken/{token}", handlers.UserListByToken}, + {"users:byUUID", "GET", "/users:byUUID/{uuid}", handlers.UserListByUUID}, + {"users:list", "GET", "/users", handlers.UserListAll}, + {"users:profile", "GET", "/users/profile", handlers.UserProfile}, + {"users:show", "GET", "/users/{user}", handlers.UserListOne}, + {"users:refreshToken", "POST", "/users/{user}:refreshToken", handlers.RefreshToken}, + {"users:create", "POST", "/users/{user}", handlers.UserCreate}, + {"users:update", "PUT", "/users/{user}", handlers.UserUpdate}, + {"users:delete", "DELETE", "/users/{user}", handlers.UserDelete}, + {"registrations:newUser", "POST", "/registrations", handlers.RegisterUser}, + {"registrations:acceptNewUser", "POST", "/registrations/{uuid}:accept", handlers.AcceptRegisterUser}, + {"registrations:declineNewUser", "POST", "/registrations/{uuid}:decline", handlers.DeclineRegisterUser}, + {"registrations:show", "GET", "/registrations/{uuid}", handlers.ListOneRegistration}, + {"registrations:list", "GET", "/registrations", handlers.ListAllRegistrations}, + {"projects:list", "GET", "/projects", handlers.ProjectListAll}, + {"projects:metrics", "GET", "/projects/{project}:metrics", handlers.ProjectMetrics}, + {"projects:addUser", "POST", "/projects/{project}/members/{user}:add", handlers.ProjectUserAdd}, + {"projects:removeUser", "POST", "/projects/{project}/members/{user}:remove", handlers.ProjectUserRemove}, + {"projects:showUser", "GET", "/projects/{project}/members/{user}", handlers.ProjectUserListOne}, + {"projects:createUser", "POST", "/projects/{project}/members/{user}", handlers.ProjectUserCreate}, + {"projects:updateUser", "PUT", "/projects/{project}/members/{user}", handlers.ProjectUserUpdate}, + {"projects:listUsers", "GET", "/projects/{project}/members", handlers.ProjectListUsers}, + {"projects:show", "GET", "/projects/{project}", handlers.ProjectListOne}, + {"projects:create", "POST", "/projects/{project}", handlers.ProjectCreate}, + {"projects:update", "PUT", "/projects/{project}", handlers.ProjectUpdate}, + {"projects:delete", "DELETE", "/projects/{project}", handlers.ProjectDelete}, + {"subscriptions:list", "GET", "/projects/{project}/subscriptions", handlers.SubListAll}, + {"subscriptions:listByTopic", "GET", "/projects/{project}/topics/{topic}/subscriptions", handlers.ListSubsByTopic}, + {"subscriptions:offsets", "GET", "/projects/{project}/subscriptions/{subscription}:offsets", handlers.SubGetOffsets}, + {"subscriptions:timeToOffset", "GET", "/projects/{project}/subscriptions/{subscription}:timeToOffset", handlers.SubTimeToOffset}, + {"subscriptions:acl", "GET", "/projects/{project}/subscriptions/{subscription}:acl", handlers.SubACL}, + {"subscriptions:metrics", "GET", "/projects/{project}/subscriptions/{subscription}:metrics", handlers.SubMetrics}, + {"subscriptions:show", "GET", "/projects/{project}/subscriptions/{subscription}", handlers.SubListOne}, + {"subscriptions:create", "PUT", "/projects/{project}/subscriptions/{subscription}", handlers.SubCreate}, + {"subscriptions:delete", "DELETE", "/projects/{project}/subscriptions/{subscription}", handlers.SubDelete}, + {"subscriptions:pull", "POST", "/projects/{project}/subscriptions/{subscription}:pull", handlers.SubPull}, + {"subscriptions:acknowledge", "POST", "/projects/{project}/subscriptions/{subscription}:acknowledge", handlers.SubAck}, + {"subscriptions:verifyPushEndpoint", "POST", "/projects/{project}/subscriptions/{subscription}:verifyPushEndpoint", handlers.SubVerifyPushEndpoint}, + {"subscriptions:modifyAckDeadline", "POST", "/projects/{project}/subscriptions/{subscription}:modifyAckDeadline", handlers.SubModAck}, + {"subscriptions:modifyPushConfig", "POST", "/projects/{project}/subscriptions/{subscription}:modifyPushConfig", handlers.SubModPush}, + {"subscriptions:modifyOffset", "POST", "/projects/{project}/subscriptions/{subscription}:modifyOffset", handlers.SubSetOffset}, + {"subscriptions:modifyAcl", "POST", "/projects/{project}/subscriptions/{subscription}:modifyAcl", handlers.SubModACL}, + {"topics:list", "GET", "/projects/{project}/topics", handlers.TopicListAll}, + {"topics:acl", "GET", "/projects/{project}/topics/{topic}:acl", handlers.TopicACL}, + {"topics:metrics", "GET", "/projects/{project}/topics/{topic}:metrics", handlers.TopicMetrics}, + {"topics:show", "GET", "/projects/{project}/topics/{topic}", handlers.TopicListOne}, + {"topics:create", "PUT", "/projects/{project}/topics/{topic}", handlers.TopicCreate}, + {"topics:delete", "DELETE", "/projects/{project}/topics/{topic}", handlers.TopicDelete}, + {"topics:publish", "POST", "/projects/{project}/topics/{topic}:publish", handlers.TopicPublish}, + {"topics:modifyAcl", "POST", "/projects/{project}/topics/{topic}:modifyAcl", handlers.TopicModACL}, + {"schemas:validateMessage", "POST", "/projects/{project}/schemas/{schema}:validate", handlers.SchemaValidateMessage}, + {"schemas:create", "POST", "/projects/{project}/schemas/{schema}", handlers.SchemaCreate}, + {"schemas:show", "GET", "/projects/{project}/schemas/{schema}", handlers.SchemaListOne}, + {"schemas:list", "GET", "/projects/{project}/schemas", handlers.SchemaListAll}, + {"schemas:update", "PUT", "/projects/{project}/schemas/{schema}", handlers.SchemaUpdate}, + {"schemas:delete", "DELETE", "/projects/{project}/schemas/{schema}", handlers.SchemaDelete}, + {"version:list", "GET", "/version", handlers.ListVersion}, } diff --git a/stores/mock.go b/stores/mock.go index 0cb7ccd5..bc2e215e 100644 --- a/stores/mock.go +++ b/stores/mock.go @@ -25,6 +25,44 @@ type MockStore struct { OpMetrics map[string]QopMetric } +func (mk *MockStore) TopicsCount(startDate, endDate time.Time) (int, error) { + + counter := 0 + + for _, sub := range mk.SubList { + if sub.CreatedOn.After(startDate) && sub.CreatedOn.Before(endDate) { + counter++ + } + } + + return counter, nil +} + +func (mk *MockStore) SubscriptionsCount(startDate, endDate time.Time) (int, error) { + + counter := 0 + for _, t := range mk.TopicList { + if t.CreatedOn.After(startDate) && t.CreatedOn.Before(endDate) { + counter++ + } + } + + return counter, nil +} + +func (mk *MockStore) UsersCount(startDate, endDate time.Time) (int, error) { + + counter := 0 + + for _, u := range mk.UserList { + if u.CreatedOn.After(startDate) && u.CreatedOn.Before(endDate) { + counter++ + } + } + + return counter, nil +} + // QueryACL Topic/Subscription ACL func (mk *MockStore) QueryACL(projectUUID string, resource string, name string) (QAcl, error) { if resource == "topics" { @@ -549,10 +587,12 @@ func (mk *MockStore) ModAck(projectUUID string, name string, ack int) error { } // ModSubPush modifies the subscription push configuration -func (mk *MockStore) ModSubPush(projectUUID string, name string, push string, maxMessages int64, rPolicy string, rPeriod int, vhash string, verified bool) error { +func (mk *MockStore) ModSubPush(projectUUID string, name string, push string, authzType string, authzValue string, maxMessages int64, rPolicy string, rPeriod int, vhash string, verified bool) error { for i, item := range mk.SubList { if item.ProjectUUID == projectUUID && item.Name == name { mk.SubList[i].PushEndpoint = push + mk.SubList[i].AuthorizationType = authzType + mk.SubList[i].AuthorizationHeader = authzValue mk.SubList[i].MaxMessages = maxMessages mk.SubList[i].RetPolicy = rPolicy mk.SubList[i].RetPeriod = rPeriod @@ -670,12 +710,13 @@ func (mk *MockStore) QueryUsers(projectUUID string, uuid string, name string) ([ func (mk *MockStore) PaginatedQueryUsers(pageToken string, pageSize int32, projectUUID string) ([]QUser, int32, string, error) { var qUsers []QUser - var totalSize int32 var nextPageToken string var err error var pg int var limit int + totalSize := int32(0) + if pageSize == 0 { limit = len(mk.UserList) } else { @@ -694,8 +735,6 @@ func (mk *MockStore) PaginatedQueryUsers(pageToken string, pageSize int32, proje return id1 > id2 }) - totalSize = int32(len(mk.UserList)) - for _, user := range mk.UserList { if projectUUID != "" { @@ -739,8 +778,16 @@ func (mk *MockStore) PaginatedQueryUsers(pageToken string, pageSize int32, proje qUsers = qUsers[:len(qUsers)-1] } - if projectUUID != "" { + if projectUUID == "" { totalSize = int32(len(qUsers)) + } else { + for _, user := range mk.UserList { + for _, project := range user.Projects { + if projectUUID == project.ProjectUUID { + totalSize++ + } + } + } } return qUsers, totalSize, nextPageToken, err @@ -765,20 +812,20 @@ func (mk *MockStore) Initialize() { mk.OpMetrics = make(map[string]QopMetric) // populate topics - qtop4 := QTopic{3, "argo_uuid", "topic4", 0, 0, time.Date(0, 0, 0, 0, 0, 0, 0, time.Local), 0, ""} - qtop3 := QTopic{2, "argo_uuid", "topic3", 0, 0, time.Date(2019, 5, 7, 0, 0, 0, 0, time.Local), 8.99, "schema_uuid_3"} - qtop2 := QTopic{1, "argo_uuid", "topic2", 0, 0, time.Date(2019, 5, 8, 0, 0, 0, 0, time.Local), 5.45, "schema_uuid_1"} - qtop1 := QTopic{0, "argo_uuid", "topic1", 0, 0, time.Date(2019, 5, 6, 0, 0, 0, 0, time.Local), 10, ""} + qtop4 := QTopic{3, "argo_uuid", "topic4", 0, 0, time.Date(0, 0, 0, 0, 0, 0, 0, time.Local), 0, "", time.Date(2020, 11, 19, 0, 0, 0, 0, time.Local), []string{}} + qtop3 := QTopic{2, "argo_uuid", "topic3", 0, 0, time.Date(2019, 5, 7, 0, 0, 0, 0, time.Local), 8.99, "schema_uuid_3", time.Date(2020, 11, 20, 0, 0, 0, 0, time.Local), []string{}} + qtop2 := QTopic{1, "argo_uuid", "topic2", 0, 0, time.Date(2019, 5, 8, 0, 0, 0, 0, time.Local), 5.45, "schema_uuid_1", time.Date(2020, 11, 21, 0, 0, 0, 0, time.Local), []string{}} + qtop1 := QTopic{0, "argo_uuid", "topic1", 0, 0, time.Date(2019, 5, 6, 0, 0, 0, 0, time.Local), 10, "", time.Date(2020, 11, 22, 0, 0, 0, 0, time.Local), []string{}} mk.TopicList = append(mk.TopicList, qtop1) mk.TopicList = append(mk.TopicList, qtop2) mk.TopicList = append(mk.TopicList, qtop3) mk.TopicList = append(mk.TopicList, qtop4) // populate Subscriptions - qsub1 := QSub{0, "argo_uuid", "sub1", "topic1", 0, 0, "", "", 0, 10, "", 0, 0, 0, "", false, time.Date(2019, 5, 6, 0, 0, 0, 0, time.Local), 10} - qsub2 := QSub{1, "argo_uuid", "sub2", "topic2", 0, 0, "", "", 0, 10, "", 0, 0, 0, "", false, time.Date(2019, 5, 7, 0, 0, 0, 0, time.Local), 8.99} - qsub3 := QSub{2, "argo_uuid", "sub3", "topic3", 0, 0, "", "", 0, 10, "", 0, 0, 0, "", false, time.Date(2019, 5, 8, 0, 0, 0, 0, time.Local), 5.45} - qsub4 := QSub{3, "argo_uuid", "sub4", "topic4", 0, 0, "", "endpoint.foo", 1, 10, "linear", 300, 0, 0, "push-id-1", true, time.Date(0, 0, 0, 0, 0, 0, 0, time.Local), 0} + qsub1 := QSub{0, "argo_uuid", "sub1", "topic1", 0, 0, "", "", 0, "", "", 10, "", 0, 0, 0, "", false, time.Date(2019, 5, 6, 0, 0, 0, 0, time.Local), 10, time.Date(2020, 11, 19, 0, 0, 0, 0, time.Local), []string{}} + qsub2 := QSub{1, "argo_uuid", "sub2", "topic2", 0, 0, "", "", 0, "", "", 10, "", 0, 0, 0, "", false, time.Date(2019, 5, 7, 0, 0, 0, 0, time.Local), 8.99, time.Date(2020, 11, 20, 0, 0, 0, 0, time.Local), []string{}} + qsub3 := QSub{2, "argo_uuid", "sub3", "topic3", 0, 0, "", "", 0, "", "", 10, "", 0, 0, 0, "", false, time.Date(2019, 5, 8, 0, 0, 0, 0, time.Local), 5.45, time.Date(2020, 11, 21, 0, 0, 0, 0, time.Local), []string{}} + qsub4 := QSub{3, "argo_uuid", "sub4", "topic4", 0, 0, "", "endpoint.foo", 1, "autogen", "auth-header-1", 10, "linear", 300, 0, 0, "push-id-1", true, time.Date(0, 0, 0, 0, 0, 0, 0, time.Local), 0, time.Date(2020, 11, 22, 0, 0, 0, 0, time.Local), []string{}} mk.SubList = append(mk.SubList, qsub1) mk.SubList = append(mk.SubList, qsub2) mk.SubList = append(mk.SubList, qsub3) @@ -904,6 +951,9 @@ func (mk *MockStore) QueryTotalMessagesPerProject(projectUUIDs []string, startDa days := int64(1) if !endDate.Equal(startDate) { days = int64(endDate.Sub(startDate).Hours() / 24) + // add an extra day to compensate for the fact that we need the starting day included as well + // e.g. Aug 1 to Aug 31 should be calculated as 31 days and not as 30 + days += 1 } if len(projectUUIDs) == 0 { @@ -1018,7 +1068,7 @@ func (mk *MockStore) HasProject(name string) bool { } // InsertTopic inserts a new topic object to the store -func (mk *MockStore) InsertTopic(projectUUID string, name string, schemaUUID string) error { +func (mk *MockStore) InsertTopic(projectUUID string, name string, schemaUUID string, createdOn time.Time) error { topic := QTopic{ ID: len(mk.TopicList), ProjectUUID: projectUUID, @@ -1028,31 +1078,38 @@ func (mk *MockStore) InsertTopic(projectUUID string, name string, schemaUUID str LatestPublish: time.Time{}, PublishRate: 0, SchemaUUID: schemaUUID, + CreatedOn: createdOn, + ACL: []string{}, } mk.TopicList = append(mk.TopicList, topic) return nil } // InsertSub inserts a new sub object to the store -func (mk *MockStore) InsertSub(projectUUID string, name string, topic string, offset int64, maxMessages int64, ack int, push string, rPolicy string, rPeriod int, vhash string, verified bool) error { +func (mk *MockStore) InsertSub(projectUUID string, name string, topic string, offset int64, maxMessages int64, authT string, authH string, ack int, push string, rPolicy string, rPeriod int, vhash string, verified bool, createdOn time.Time) error { sub := QSub{ - ID: len(mk.SubList), - ProjectUUID: projectUUID, - Name: name, - Topic: topic, - Offset: offset, - Ack: ack, - MaxMessages: maxMessages, - PushEndpoint: push, - RetPolicy: rPolicy, - RetPeriod: rPeriod, - VerificationHash: vhash, - Verified: verified, - MsgNum: 0, - TotalBytes: 0, - LatestConsume: time.Time{}, - ConsumeRate: 0, + ID: len(mk.SubList), + ProjectUUID: projectUUID, + Name: name, + Topic: topic, + Offset: offset, + Ack: ack, + MaxMessages: maxMessages, + AuthorizationType: authT, + AuthorizationHeader: authH, + PushEndpoint: push, + RetPolicy: rPolicy, + RetPeriod: rPeriod, + VerificationHash: vhash, + Verified: verified, + MsgNum: 0, + TotalBytes: 0, + LatestConsume: time.Time{}, + ConsumeRate: 0, + CreatedOn: createdOn, + ACL: []string{}, } + mk.SubList = append(mk.SubList, sub) mk.SubsACL[name] = QAcl{} return nil diff --git a/stores/mongo.go b/stores/mongo.go index 28d9fd4b..f2915456 100644 --- a/stores/mongo.go +++ b/stores/mongo.go @@ -78,6 +78,49 @@ func (mong *MongoStore) Initialize() { } } +// SubscriptionsCount returns the amount of subscriptions created in the given time period +func (mong *MongoStore) SubscriptionsCount(startDate, endDate time.Time) (int, error) { + return mong.getDocCountForCollection(startDate, endDate, "subscriptions") +} + +// TopicsCount returns the amount of topics created in the given time period +func (mong *MongoStore) TopicsCount(startDate, endDate time.Time) (int, error) { + return mong.getDocCountForCollection(startDate, endDate, "topics") +} + +// UserCount returns the amount of users created in the given time period +func (mong *MongoStore) UsersCount(startDate, endDate time.Time) (int, error) { + return mong.getDocCountForCollection(startDate, endDate, "users") +} + +// getDocCountForCollection returns the document count for a collection in a given time period +// collection should support field created_on +func (mong *MongoStore) getDocCountForCollection(startDate, endDate time.Time, col string) (int, error) { + + query := bson.M{ + "created_on": bson.M{ + "$gte": startDate, + "$lte": endDate, + }, + } + + db := mong.Session.DB(mong.Database) + c := db.C(col) + + count, err := c.Find(query).Count() + if err != nil { + log.WithFields( + log.Fields{ + "type": "backend_log", + "backend_service": "mongo", + "backend_hosts": mong.Server, + }, + ).Fatal(err.Error()) + } + + return count, nil +} + // QueryProjects queries the database for a specific project or a list of all projects func (mong *MongoStore) QueryProjects(uuid string, name string) ([]QProject, error) { @@ -565,10 +608,15 @@ func (mong *MongoStore) PaginatedQueryUsers(pageToken string, pageSize int32, pr db := mong.Session.DB(mong.Database) c := db.C("users") - // check the total of the users selected by the query not taking into acount pagination + // check the total of the users selected by the query not taking into account pagination if size, err = c.Find(query).Count(); err != nil { - log.Fatal("STORE", "\t", err.Error()) - + log.WithFields( + log.Fields{ + "type": "backend_log", + "backend_service": "mongo", + "backend_hosts": mong.Server, + }, + ).Fatal(err.Error()) } totalSize = int32(size) @@ -632,20 +680,7 @@ func (mong *MongoStore) PaginatedQueryUsers(pageToken string, pageSize int32, pr qUsers = qUsers[:len(qUsers)-1] } - if size, err = c.Count(); err != nil { - log.WithFields( - log.Fields{ - "type": "backend_log", - "backend_service": "mongo", - "backend_hosts": mong.Server, - }, - ).Fatal(err.Error()) - } - - totalSize = int32(size) - return qUsers, totalSize, nextPageToken, err - } //QuerySubsByTopic returns subscriptions of a specific topic @@ -1208,7 +1243,7 @@ func (mong *MongoStore) HasProject(name string) bool { } // InsertTopic inserts a topic to the store -func (mong *MongoStore) InsertTopic(projectUUID string, name string, schemaUUID string) error { +func (mong *MongoStore) InsertTopic(projectUUID string, name string, schemaUUID string, createdOn time.Time) error { topic := QTopic{ ProjectUUID: projectUUID, @@ -1218,6 +1253,8 @@ func (mong *MongoStore) InsertTopic(projectUUID string, name string, schemaUUID LatestPublish: time.Time{}, PublishRate: 0, SchemaUUID: schemaUUID, + CreatedOn: createdOn, + ACL: []string{}, } return mong.InsertResource("topics", topic) @@ -1272,23 +1309,27 @@ func (mong *MongoStore) InsertProject(uuid string, name string, createdOn time.T } // InsertSub inserts a subscription to the store -func (mong *MongoStore) InsertSub(projectUUID string, name string, topic string, offset int64, maxMessages int64, ack int, push string, rPolicy string, rPeriod int, vhash string, verified bool) error { +func (mong *MongoStore) InsertSub(projectUUID string, name string, topic string, offset int64, maxMessages int64, authzType string, authzHeader string, ack int, push string, rPolicy string, rPeriod int, vhash string, verified bool, createdOn time.Time) error { sub := QSub{ - ProjectUUID: projectUUID, - Name: name, - Topic: topic, - Offset: offset, - NextOffset: 0, - PendingAck: "", - Ack: ack, - MaxMessages: maxMessages, - PushEndpoint: push, - RetPolicy: rPolicy, - RetPeriod: rPeriod, - VerificationHash: vhash, - Verified: verified, - MsgNum: 0, - TotalBytes: 0, + ProjectUUID: projectUUID, + Name: name, + Topic: topic, + Offset: offset, + NextOffset: 0, + PendingAck: "", + Ack: ack, + MaxMessages: maxMessages, + AuthorizationType: authzType, + AuthorizationHeader: authzHeader, + PushEndpoint: push, + RetPolicy: rPolicy, + RetPeriod: rPeriod, + VerificationHash: vhash, + Verified: verified, + MsgNum: 0, + TotalBytes: 0, + CreatedOn: createdOn, + ACL: []string{}, } return mong.InsertResource("subscriptions", sub) } @@ -1320,6 +1361,9 @@ func (mong *MongoStore) QueryTotalMessagesPerProject(projectUUIDs []string, star days := 1 if !endDate.Equal(startDate) { days = int(endDate.Sub(startDate).Hours() / 24) + // add an extra day to compensate for the fact that we need the starting day included as well + // e.g. Aug 1 to Aug 31 should be calculated as 31 days and not as 30 + days += 1 } condQuery := []bson.M{ @@ -1382,8 +1426,6 @@ func (mong *MongoStore) QueryTotalMessagesPerProject(projectUUIDs []string, star ).Fatal(err.Error()) } - fmt.Printf("%+v\n", qdp) - return qdp, err } @@ -1563,7 +1605,7 @@ func (mong *MongoStore) ModAck(projectUUID string, name string, ack int) error { } // ModSubPush modifies the push configuration -func (mong *MongoStore) ModSubPush(projectUUID string, name string, push string, maxMessages int64, rPolicy string, rPeriod int, vhash string, verified bool) error { +func (mong *MongoStore) ModSubPush(projectUUID string, name string, push string, authzType string, authzValue string, maxMessages int64, rPolicy string, rPeriod int, vhash string, verified bool) error { db := mong.Session.DB(mong.Database) c := db.C("subscriptions") @@ -1572,12 +1614,14 @@ func (mong *MongoStore) ModSubPush(projectUUID string, name string, push string, "name": name, }, bson.M{"$set": bson.M{ - "push_endpoint": push, - "max_messages": maxMessages, - "retry_policy": rPolicy, - "retry_period": rPeriod, - "verification_hash": vhash, - "verified": verified, + "push_endpoint": push, + "authorization_type": authzType, + "authorization_header": authzValue, + "max_messages": maxMessages, + "retry_policy": rPolicy, + "retry_period": rPeriod, + "verification_hash": vhash, + "verified": verified, }, }) return err diff --git a/stores/query_models.go b/stores/query_models.go index f29d6494..a9917b76 100644 --- a/stores/query_models.go +++ b/stores/query_models.go @@ -6,24 +6,28 @@ import ( // QSub are the results of the Qsub query type QSub struct { - ID interface{} `bson:"_id,omitempty"` - ProjectUUID string `bson:"project_uuid"` - Name string `bson:"name"` - Topic string `bson:"topic"` - Offset int64 `bson:"offset"` - NextOffset int64 `bson:"next_offset"` - PendingAck string `bson:"pending_ack"` - PushEndpoint string `bson:"push_endpoint"` - MaxMessages int64 `bson:"max_messages"` - Ack int `bson:"ack"` - RetPolicy string `bson:"retry_policy"` - RetPeriod int `bson:"retry_period"` - MsgNum int64 `bson:"msg_num"` - TotalBytes int64 `bson:"total_bytes"` - VerificationHash string `bson:"verification_hash"` - Verified bool `bson:"verified"` - LatestConsume time.Time `bson:"latest_consume"` - ConsumeRate float64 `bson:"consume_rate"` + ID interface{} `bson:"_id,omitempty"` + ProjectUUID string `bson:"project_uuid"` + Name string `bson:"name"` + Topic string `bson:"topic"` + Offset int64 `bson:"offset"` + NextOffset int64 `bson:"next_offset"` + PendingAck string `bson:"pending_ack"` + PushEndpoint string `bson:"push_endpoint"` + MaxMessages int64 `bson:"max_messages"` + AuthorizationType string `bson:"authorization_type"` + AuthorizationHeader string `bson:"authorization_header"` + Ack int `bson:"ack"` + RetPolicy string `bson:"retry_policy"` + RetPeriod int `bson:"retry_period"` + MsgNum int64 `bson:"msg_num"` + TotalBytes int64 `bson:"total_bytes"` + VerificationHash string `bson:"verification_hash"` + Verified bool `bson:"verified"` + LatestConsume time.Time `bson:"latest_consume"` + ConsumeRate float64 `bson:"consume_rate"` + CreatedOn time.Time `bson:"created_on"` + ACL []string `bson:"acl"` } // QAcl holds a list of authorized users queried from topic or subscription collections @@ -104,6 +108,8 @@ type QTopic struct { LatestPublish time.Time `bson:"latest_publish"` PublishRate float64 `bson:"publish_rate"` SchemaUUID string `bson:"schema_uuid"` + CreatedOn time.Time `bson:"created_on"` + ACL []string `bson:"acl"` } // QDailyTopicMsgCount holds information about the daily number of messages published to a topic diff --git a/stores/store.go b/stores/store.go index 4b62d61e..45b71cff 100644 --- a/stores/store.go +++ b/stores/store.go @@ -38,13 +38,13 @@ type Store interface { InsertUser(uuid string, projects []QProjectRoles, name string, firstName string, lastName string, org string, desc string, token string, email string, serviceRoles []string, createdOn time.Time, modifiedOn time.Time, createdBy string) error InsertProject(uuid string, name string, createdOn time.Time, modifiedOn time.Time, createdBy string, description string) error InsertOpMetric(hostname string, cpu float64, mem float64) error - InsertTopic(projectUUID string, name string, schemaUUID string) error + InsertTopic(projectUUID string, name string, schemaUUID string, createdOn time.Time) error IncrementTopicMsgNum(projectUUID string, name string, num int64) error IncrementDailyTopicMsgCount(projectUUID string, topicName string, num int64, date time.Time) error IncrementTopicBytes(projectUUID string, name string, totalBytes int64) error IncrementSubBytes(projectUUID string, name string, totalBytes int64) error IncrementSubMsgNum(projectUUID string, name string, num int64) error - InsertSub(projectUUID string, name string, topic string, offest int64, maxMessages int64, ack int, push string, rPolicy string, rPeriod int, vhash string, verified bool) error + InsertSub(projectUUID string, name string, topic string, offest int64, maxMessages int64, authzType string, authzHeader string, ack int, push string, rPolicy string, rPeriod int, vhash string, verified bool, createdOn time.Time) error HasProject(name string) bool HasUsers(projectUUID string, users []string) (bool, []string) QueryOneSub(projectUUID string, name string) (QSub, error) @@ -56,7 +56,7 @@ type Store interface { UpdateSubOffset(projectUUID string, name string, offset int64) UpdateSubPull(projectUUID string, name string, offset int64, ts string) error UpdateSubOffsetAck(projectUUID string, name string, offset int64, ts string) error - ModSubPush(projectUUID string, name string, push string, maxMessages int64, rPolicy string, rPeriod int, vhash string, verified bool) error + ModSubPush(projectUUID string, name string, push string, authzType string, authzValue string, maxMessages int64, rPolicy string, rPeriod int, vhash string, verified bool) error QueryACL(projectUUID string, resource string, name string) (QAcl, error) ExistsInACL(projectUUID string, resource string, resourceName string, userUUID string) error ModACL(projectUUID string, resource string, name string, acl []string) error @@ -68,6 +68,9 @@ type Store interface { QuerySchemas(projectUUID, schemaUUID, name string) ([]QSchema, error) UpdateSchema(schemaUUID, name, schemaType, rawSchemaString string) error DeleteSchema(schemaUUID string) error + UsersCount(startDate, endDate time.Time) (int, error) + TopicsCount(startDate, endDate time.Time) (int, error) + SubscriptionsCount(startDate, endDate time.Time) (int, error) Clone() Store Close() } diff --git a/stores/store_test.go b/stores/store_test.go index 02eb3574..2676f4c3 100644 --- a/stores/store_test.go +++ b/stores/store_test.go @@ -19,17 +19,17 @@ func (suite *StoreTestSuite) TestMockStore() { suite.Equal("mockbase", store.Database) eTopList := []QTopic{ - {3, "argo_uuid", "topic4", 0, 0, time.Date(0, 0, 0, 0, 0, 0, 0, time.Local), 0, ""}, - {2, "argo_uuid", "topic3", 0, 0, time.Date(2019, 5, 7, 0, 0, 0, 0, time.Local), 8.99, "schema_uuid_3"}, - {1, "argo_uuid", "topic2", 0, 0, time.Date(2019, 5, 8, 0, 0, 0, 0, time.Local), 5.45, "schema_uuid_1"}, - {0, "argo_uuid", "topic1", 0, 0, time.Date(2019, 5, 6, 0, 0, 0, 0, time.Local), 10, ""}, + {3, "argo_uuid", "topic4", 0, 0, time.Date(0, 0, 0, 0, 0, 0, 0, time.Local), 0, "", time.Date(2020, 11, 19, 0, 0, 0, 0, time.Local), []string{}}, + {2, "argo_uuid", "topic3", 0, 0, time.Date(2019, 5, 7, 0, 0, 0, 0, time.Local), 8.99, "schema_uuid_3", time.Date(2020, 11, 20, 0, 0, 0, 0, time.Local), []string{}}, + {1, "argo_uuid", "topic2", 0, 0, time.Date(2019, 5, 8, 0, 0, 0, 0, time.Local), 5.45, "schema_uuid_1", time.Date(2020, 11, 21, 0, 0, 0, 0, time.Local), []string{}}, + {0, "argo_uuid", "topic1", 0, 0, time.Date(2019, 5, 6, 0, 0, 0, 0, time.Local), 10, "", time.Date(2020, 11, 22, 0, 0, 0, 0, time.Local), []string{}}, } eSubList := []QSub{ - {3, "argo_uuid", "sub4", "topic4", 0, 0, "", "endpoint.foo", 1, 10, "linear", 300, 0, 0, "push-id-1", true, time.Date(0, 0, 0, 0, 0, 0, 0, time.Local), 0}, - {2, "argo_uuid", "sub3", "topic3", 0, 0, "", "", 0, 10, "", 0, 0, 0, "", false, time.Date(2019, 5, 8, 0, 0, 0, 0, time.Local), 5.45}, - {1, "argo_uuid", "sub2", "topic2", 0, 0, "", "", 0, 10, "", 0, 0, 0, "", false, time.Date(2019, 5, 7, 0, 0, 0, 0, time.Local), 8.99}, - {0, "argo_uuid", "sub1", "topic1", 0, 0, "", "", 0, 10, "", 0, 0, 0, "", false, time.Date(2019, 5, 6, 0, 0, 0, 0, time.Local), 10}, + {3, "argo_uuid", "sub4", "topic4", 0, 0, "", "endpoint.foo", 1, "autogen", "auth-header-1", 10, "linear", 300, 0, 0, "push-id-1", true, time.Date(0, 0, 0, 0, 0, 0, 0, time.Local), 0, time.Date(2020, 11, 22, 0, 0, 0, 0, time.Local), []string{}}, + {2, "argo_uuid", "sub3", "topic3", 0, 0, "", "", 0, "", "", 10, "", 0, 0, 0, "", false, time.Date(2019, 5, 8, 0, 0, 0, 0, time.Local), 5.45, time.Date(2020, 11, 21, 0, 0, 0, 0, time.Local), []string{}}, + {1, "argo_uuid", "sub2", "topic2", 0, 0, "", "", 0, "", "", 10, "", 0, 0, 0, "", false, time.Date(2019, 5, 7, 0, 0, 0, 0, time.Local), 8.99, time.Date(2020, 11, 20, 0, 0, 0, 0, time.Local), []string{}}, + {0, "argo_uuid", "sub1", "topic1", 0, 0, "", "", 0, "", "", 10, "", 0, 0, 0, "", false, time.Date(2019, 5, 6, 0, 0, 0, 0, time.Local), 10, time.Date(2020, 11, 19, 0, 0, 0, 0, time.Local), []string{}}, } // retrieve all topics tpList, ts1, pg1, _ := store.QueryTopics("argo_uuid", "", "", "", 0) @@ -39,8 +39,8 @@ func (suite *StoreTestSuite) TestMockStore() { // retrieve first 2 eTopList1st2 := []QTopic{ - {3, "argo_uuid", "topic4", 0, 0, time.Date(0, 0, 0, 0, 0, 0, 0, time.Local), 0, ""}, - {2, "argo_uuid", "topic3", 0, 0, time.Date(2019, 5, 7, 0, 0, 0, 0, time.Local), 8.99, "schema_uuid_3"}, + {3, "argo_uuid", "topic4", 0, 0, time.Date(0, 0, 0, 0, 0, 0, 0, time.Local), 0, "", time.Date(2020, 11, 19, 0, 0, 0, 0, time.Local), []string{}}, + {2, "argo_uuid", "topic3", 0, 0, time.Date(2019, 5, 7, 0, 0, 0, 0, time.Local), 8.99, "schema_uuid_3", time.Date(2020, 11, 20, 0, 0, 0, 0, time.Local), []string{}}, } tpList2, ts2, pg2, _ := store.QueryTopics("argo_uuid", "", "", "", 2) suite.Equal(eTopList1st2, tpList2) @@ -49,7 +49,7 @@ func (suite *StoreTestSuite) TestMockStore() { // retrieve the last one eTopList3 := []QTopic{ - {0, "argo_uuid", "topic1", 0, 0, time.Date(2019, 5, 6, 0, 0, 0, 0, time.Local), 10, ""}, + {0, "argo_uuid", "topic1", 0, 0, time.Date(2019, 5, 6, 0, 0, 0, 0, time.Local), 10, "", time.Date(2020, 11, 22, 0, 0, 0, 0, time.Local), []string{}}, } tpList3, ts3, pg3, _ := store.QueryTopics("argo_uuid", "", "", "0", 1) suite.Equal(eTopList3, tpList3) @@ -58,7 +58,7 @@ func (suite *StoreTestSuite) TestMockStore() { // retrieve a single topic eTopList4 := []QTopic{ - {0, "argo_uuid", "topic1", 0, 0, time.Date(2019, 5, 6, 0, 0, 0, 0, time.Local), 10, ""}, + {0, "argo_uuid", "topic1", 0, 0, time.Date(2019, 5, 6, 0, 0, 0, 0, time.Local), 10, "", time.Date(2020, 11, 22, 0, 0, 0, 0, time.Local), []string{}}, } tpList4, ts4, pg4, _ := store.QueryTopics("argo_uuid", "", "topic1", "", 0) suite.Equal(eTopList4, tpList4) @@ -67,8 +67,8 @@ func (suite *StoreTestSuite) TestMockStore() { // retrieve user's topics eTopList5 := []QTopic{ - {1, "argo_uuid", "topic2", 0, 0, time.Date(2019, 5, 8, 0, 0, 0, 0, time.Local), 5.45, "schema_uuid_1"}, - {0, "argo_uuid", "topic1", 0, 0, time.Date(2019, 5, 6, 0, 0, 0, 0, time.Local), 10, ""}, + {1, "argo_uuid", "topic2", 0, 0, time.Date(2019, 5, 8, 0, 0, 0, 0, time.Local), 5.45, "schema_uuid_1", time.Date(2020, 11, 21, 0, 0, 0, 0, time.Local), []string{}}, + {0, "argo_uuid", "topic1", 0, 0, time.Date(2019, 5, 6, 0, 0, 0, 0, time.Local), 10, "", time.Date(2020, 11, 22, 0, 0, 0, 0, time.Local), []string{}}, } tpList5, ts5, pg5, _ := store.QueryTopics("argo_uuid", "uuid1", "", "", 0) suite.Equal(eTopList5, tpList5) @@ -77,7 +77,7 @@ func (suite *StoreTestSuite) TestMockStore() { // retrieve use's topic with pagination eTopList6 := []QTopic{ - {1, "argo_uuid", "topic2", 0, 0, time.Date(2019, 5, 8, 0, 0, 0, 0, time.Local), 5.45, "schema_uuid_1"}, + {1, "argo_uuid", "topic2", 0, 0, time.Date(2019, 5, 8, 0, 0, 0, 0, time.Local), 5.45, "schema_uuid_1", time.Date(2020, 11, 21, 0, 0, 0, 0, time.Local), []string{}}, } tpList6, ts6, pg6, _ := store.QueryTopics("argo_uuid", "uuid1", "", "", 1) @@ -93,8 +93,8 @@ func (suite *StoreTestSuite) TestMockStore() { // retrieve first 2 subs eSubListFirstPage := []QSub{ - {3, "argo_uuid", "sub4", "topic4", 0, 0, "", "endpoint.foo", 1, 10, "linear", 300, 0, 0, "push-id-1", true, time.Date(0, 0, 0, 0, 0, 0, 0, time.Local), 0}, - {2, "argo_uuid", "sub3", "topic3", 0, 0, "", "", 0, 10, "", 0, 0, 0, "", false, time.Date(2019, 5, 8, 0, 0, 0, 0, time.Local), 5.45}} + {3, "argo_uuid", "sub4", "topic4", 0, 0, "", "endpoint.foo", 1, "autogen", "auth-header-1", 10, "linear", 300, 0, 0, "push-id-1", true, time.Date(0, 0, 0, 0, 0, 0, 0, time.Local), 0, time.Date(2020, 11, 22, 0, 0, 0, 0, time.Local), []string{}}, + {2, "argo_uuid", "sub3", "topic3", 0, 0, "", "", 0, "", "", 10, "", 0, 0, 0, "", false, time.Date(2019, 5, 8, 0, 0, 0, 0, time.Local), 5.45, time.Date(2020, 11, 21, 0, 0, 0, 0, time.Local), []string{}}} subList2, ts2, pg2, err2 := store.QuerySubs("argo_uuid", "", "", "", 2) suite.Equal(eSubListFirstPage, subList2) @@ -103,8 +103,8 @@ func (suite *StoreTestSuite) TestMockStore() { // retrieve next 2 subs eSubListNextPage := []QSub{ - {1, "argo_uuid", "sub2", "topic2", 0, 0, "", "", 0, 10, "", 0, 0, 0, "", false, time.Date(2019, 5, 7, 0, 0, 0, 0, time.Local), 8.99}, - {0, "argo_uuid", "sub1", "topic1", 0, 0, "", "", 0, 10, "", 0, 0, 0, "", false, time.Date(2019, 5, 6, 0, 0, 0, 0, time.Local), 10}, + {1, "argo_uuid", "sub2", "topic2", 0, 0, "", "", 0, "", "", 10, "", 0, 0, 0, "", false, time.Date(2019, 5, 7, 0, 0, 0, 0, time.Local), 8.99, time.Date(2020, 11, 20, 0, 0, 0, 0, time.Local), []string{}}, + {0, "argo_uuid", "sub1", "topic1", 0, 0, "", "", 0, "", "", 10, "", 0, 0, 0, "", false, time.Date(2019, 5, 6, 0, 0, 0, 0, time.Local), 10, time.Date(2020, 11, 19, 0, 0, 0, 0, time.Local), []string{}}, } subList3, ts3, pg3, err3 := store.QuerySubs("argo_uuid", "", "", "1", 2) @@ -114,9 +114,9 @@ func (suite *StoreTestSuite) TestMockStore() { // retrieve user's subs eSubList4 := []QSub{ - {ID: 3, ProjectUUID: "argo_uuid", Name: "sub4", Topic: "topic4", Offset: 0, NextOffset: 0, PendingAck: "", PushEndpoint: "endpoint.foo", MaxMessages: 1, Ack: 10, RetPolicy: "linear", RetPeriod: 300, MsgNum: 0, TotalBytes: 0, VerificationHash: "push-id-1", Verified: true, LatestConsume: time.Date(0, 0, 0, 0, 0, 0, 0, time.Local), ConsumeRate: 0}, - {ID: 2, ProjectUUID: "argo_uuid", Name: "sub3", Topic: "topic3", Offset: 0, NextOffset: 0, PendingAck: "", PushEndpoint: "", MaxMessages: 0, Ack: 10, RetPolicy: "", RetPeriod: 0, MsgNum: 0, TotalBytes: 0, LatestConsume: time.Date(2019, 5, 8, 0, 0, 0, 0, time.Local), ConsumeRate: 5.45}, - {ID: 1, ProjectUUID: "argo_uuid", Name: "sub2", Topic: "topic2", Offset: 0, NextOffset: 0, PendingAck: "", PushEndpoint: "", MaxMessages: 0, Ack: 10, RetPolicy: "", RetPeriod: 0, MsgNum: 0, TotalBytes: 0, LatestConsume: time.Date(2019, 5, 7, 0, 0, 0, 0, time.Local), ConsumeRate: 8.99}, + {ID: 3, ProjectUUID: "argo_uuid", Name: "sub4", Topic: "topic4", Offset: 0, NextOffset: 0, PendingAck: "", PushEndpoint: "endpoint.foo", MaxMessages: 1, AuthorizationType: "autogen", AuthorizationHeader: "auth-header-1", Ack: 10, RetPolicy: "linear", RetPeriod: 300, MsgNum: 0, TotalBytes: 0, VerificationHash: "push-id-1", Verified: true, LatestConsume: time.Date(0, 0, 0, 0, 0, 0, 0, time.Local), ConsumeRate: 0, CreatedOn: time.Date(2020, 11, 22, 0, 0, 0, 0, time.Local), ACL: []string{}}, + {ID: 2, ProjectUUID: "argo_uuid", Name: "sub3", Topic: "topic3", Offset: 0, NextOffset: 0, PendingAck: "", PushEndpoint: "", MaxMessages: 0, Ack: 10, RetPolicy: "", RetPeriod: 0, MsgNum: 0, TotalBytes: 0, LatestConsume: time.Date(2019, 5, 8, 0, 0, 0, 0, time.Local), ConsumeRate: 5.45, CreatedOn: time.Date(2020, 11, 21, 0, 0, 0, 0, time.Local), ACL: []string{}}, + {ID: 1, ProjectUUID: "argo_uuid", Name: "sub2", Topic: "topic2", Offset: 0, NextOffset: 0, PendingAck: "", PushEndpoint: "", MaxMessages: 0, Ack: 10, RetPolicy: "", RetPeriod: 0, MsgNum: 0, TotalBytes: 0, LatestConsume: time.Date(2019, 5, 7, 0, 0, 0, 0, time.Local), ConsumeRate: 8.99, CreatedOn: time.Date(2020, 11, 20, 0, 0, 0, 0, time.Local), ACL: []string{}}, } subList4, ts4, pg4, err4 := store.QuerySubs("argo_uuid", "uuid1", "", "", 0) @@ -127,8 +127,8 @@ func (suite *StoreTestSuite) TestMockStore() { // retrieve user's subs eSubList5 := []QSub{ - {ID: 3, ProjectUUID: "argo_uuid", Name: "sub4", Topic: "topic4", Offset: 0, NextOffset: 0, PendingAck: "", PushEndpoint: "endpoint.foo", MaxMessages: 1, Ack: 10, RetPolicy: "linear", RetPeriod: 300, MsgNum: 0, TotalBytes: 0, VerificationHash: "push-id-1", Verified: true, LatestConsume: time.Date(0, 0, 0, 0, 0, 0, 0, time.Local), ConsumeRate: 0}, - {ID: 2, ProjectUUID: "argo_uuid", Name: "sub3", Topic: "topic3", Offset: 0, NextOffset: 0, PendingAck: "", PushEndpoint: "", MaxMessages: 0, Ack: 10, RetPolicy: "", RetPeriod: 0, MsgNum: 0, TotalBytes: 0, LatestConsume: time.Date(2019, 5, 8, 0, 0, 0, 0, time.Local), ConsumeRate: 5.45}, + {ID: 3, ProjectUUID: "argo_uuid", Name: "sub4", Topic: "topic4", Offset: 0, NextOffset: 0, PendingAck: "", PushEndpoint: "endpoint.foo", MaxMessages: 1, AuthorizationType: "autogen", AuthorizationHeader: "auth-header-1", Ack: 10, RetPolicy: "linear", RetPeriod: 300, MsgNum: 0, TotalBytes: 0, VerificationHash: "push-id-1", Verified: true, LatestConsume: time.Date(0, 0, 0, 0, 0, 0, 0, time.Local), ConsumeRate: 0, CreatedOn: time.Date(2020, 11, 22, 0, 0, 0, 0, time.Local), ACL: []string{}}, + {ID: 2, ProjectUUID: "argo_uuid", Name: "sub3", Topic: "topic3", Offset: 0, NextOffset: 0, PendingAck: "", PushEndpoint: "", MaxMessages: 0, Ack: 10, RetPolicy: "", RetPeriod: 0, MsgNum: 0, TotalBytes: 0, LatestConsume: time.Date(2019, 5, 8, 0, 0, 0, 0, time.Local), ConsumeRate: 5.45, CreatedOn: time.Date(2020, 11, 21, 0, 0, 0, 0, time.Local), ACL: []string{}}, } subList5, ts5, pg5, err5 := store.QuerySubs("argo_uuid", "uuid1", "", "", 2) @@ -161,6 +161,8 @@ func (suite *StoreTestSuite) TestMockStore() { TotalBytes: 0, LatestConsume: time.Date(2019, 5, 6, 0, 0, 0, 0, time.Local), ConsumeRate: 10, + CreatedOn: time.Date(2020, 11, 19, 0, 0, 0, 0, time.Local), + ACL: []string{}, }, }, subListByTopic) suite.Nil(errSublistByTopic) @@ -199,23 +201,23 @@ func (suite *StoreTestSuite) TestMockStore() { suite.Equal(true, store.HasResourceRoles("topics:list_all", []string{"publisher"})) suite.Equal(true, store.HasResourceRoles("topics:publish", []string{"publisher"})) - store.InsertTopic("argo_uuid", "topicFresh", "") - store.InsertSub("argo_uuid", "subFresh", "topicFresh", 0, 0, 10, "", "", 0, "", false) + store.InsertTopic("argo_uuid", "topicFresh", "", time.Date(2020, 9, 11, 0, 0, 0, 0, time.Local)) + store.InsertSub("argo_uuid", "subFresh", "topicFresh", 0, 0, "", "", 10, "", "", 0, "", false, time.Date(2020, 12, 19, 0, 0, 0, 0, time.Local)) eTopList2 := []QTopic{ - {4, "argo_uuid", "topicFresh", 0, 0, time.Time{}, 0, ""}, - {3, "argo_uuid", "topic4", 0, 0, time.Date(0, 0, 0, 0, 0, 0, 0, time.Local), 0, ""}, - {2, "argo_uuid", "topic3", 0, 0, time.Date(2019, 5, 7, 0, 0, 0, 0, time.Local), 8.99, "schema_uuid_3"}, - {1, "argo_uuid", "topic2", 0, 0, time.Date(2019, 5, 8, 0, 0, 0, 0, time.Local), 5.45, "schema_uuid_1"}, - {0, "argo_uuid", "topic1", 0, 0, time.Date(2019, 5, 6, 0, 0, 0, 0, time.Local), 10, ""}, + {4, "argo_uuid", "topicFresh", 0, 0, time.Time{}, 0, "", time.Date(2020, 9, 11, 0, 0, 0, 0, time.Local), []string{}}, + {3, "argo_uuid", "topic4", 0, 0, time.Date(0, 0, 0, 0, 0, 0, 0, time.Local), 0, "", time.Date(2020, 11, 19, 0, 0, 0, 0, time.Local), []string{}}, + {2, "argo_uuid", "topic3", 0, 0, time.Date(2019, 5, 7, 0, 0, 0, 0, time.Local), 8.99, "schema_uuid_3", time.Date(2020, 11, 20, 0, 0, 0, 0, time.Local), []string{}}, + {1, "argo_uuid", "topic2", 0, 0, time.Date(2019, 5, 8, 0, 0, 0, 0, time.Local), 5.45, "schema_uuid_1", time.Date(2020, 11, 21, 0, 0, 0, 0, time.Local), []string{}}, + {0, "argo_uuid", "topic1", 0, 0, time.Date(2019, 5, 6, 0, 0, 0, 0, time.Local), 10, "", time.Date(2020, 11, 22, 0, 0, 0, 0, time.Local), []string{}}, } eSubList2 := []QSub{ - {4, "argo_uuid", "subFresh", "topicFresh", 0, 0, "", "", 0, 10, "", 0, 0, 0, "", false, time.Time{}, 0}, - {3, "argo_uuid", "sub4", "topic4", 0, 0, "", "endpoint.foo", 1, 10, "linear", 300, 0, 0, "push-id-1", true, time.Date(0, 0, 0, 0, 0, 0, 0, time.Local), 0}, - {2, "argo_uuid", "sub3", "topic3", 0, 0, "", "", 0, 10, "", 0, 0, 0, "", false, time.Date(2019, 5, 8, 0, 0, 0, 0, time.Local), 5.45}, - {1, "argo_uuid", "sub2", "topic2", 0, 0, "", "", 0, 10, "", 0, 0, 0, "", false, time.Date(2019, 5, 7, 0, 0, 0, 0, time.Local), 8.99}, - {0, "argo_uuid", "sub1", "topic1", 0, 0, "", "", 0, 10, "", 0, 0, 0, "", false, time.Date(2019, 5, 6, 0, 0, 0, 0, time.Local), 10}} + {4, "argo_uuid", "subFresh", "topicFresh", 0, 0, "", "", 0, "", "", 10, "", 0, 0, 0, "", false, time.Time{}, 0, time.Date(2020, 12, 19, 0, 0, 0, 0, time.Local), []string{}}, + {3, "argo_uuid", "sub4", "topic4", 0, 0, "", "endpoint.foo", 1, "autogen", "auth-header-1", 10, "linear", 300, 0, 0, "push-id-1", true, time.Date(0, 0, 0, 0, 0, 0, 0, time.Local), 0, time.Date(2020, 11, 22, 0, 0, 0, 0, time.Local), []string{}}, + {2, "argo_uuid", "sub3", "topic3", 0, 0, "", "", 0, "", "", 10, "", 0, 0, 0, "", false, time.Date(2019, 5, 8, 0, 0, 0, 0, time.Local), 5.45, time.Date(2020, 11, 21, 0, 0, 0, 0, time.Local), []string{}}, + {1, "argo_uuid", "sub2", "topic2", 0, 0, "", "", 0, "", "", 10, "", 0, 0, 0, "", false, time.Date(2019, 5, 7, 0, 0, 0, 0, time.Local), 8.99, time.Date(2020, 11, 20, 0, 0, 0, 0, time.Local), []string{}}, + {0, "argo_uuid", "sub1", "topic1", 0, 0, "", "", 0, "", "", 10, "", 0, 0, 0, "", false, time.Date(2019, 5, 6, 0, 0, 0, 0, time.Local), 10, time.Date(2020, 11, 19, 0, 0, 0, 0, time.Local), []string{}}} tpList, _, _, _ = store.QueryTopics("argo_uuid", "", "", "", 0) suite.Equal(eTopList2, tpList) @@ -239,7 +241,7 @@ func (suite *StoreTestSuite) TestMockStore() { suite.Equal("not found", err.Error()) sb, err := store.QueryOneSub("argo_uuid", "sub1") - esb := QSub{0, "argo_uuid", "sub1", "topic1", 0, 0, "", "", 0, 10, "", 0, 0, 0, "", false, time.Date(2019, 5, 6, 0, 0, 0, 0, time.Local), 10} + esb := QSub{0, "argo_uuid", "sub1", "topic1", 0, 0, "", "", 0, "", "", 10, "", 0, 0, 0, "", false, time.Date(2019, 5, 6, 0, 0, 0, 0, time.Local), 10, time.Date(2020, 11, 19, 0, 0, 0, 0, time.Local), []string{}} suite.Equal(esb, sb) // Test modify ack deadline in store @@ -248,7 +250,7 @@ func (suite *StoreTestSuite) TestMockStore() { suite.Equal(66, subAck.Ack) // Test mod push sub - e1 := store.ModSubPush("argo_uuid", "sub1", "example.com", 3, "linear", 400, "hash-1", true) + e1 := store.ModSubPush("argo_uuid", "sub1", "example.com", "autogen", "auth-h-1", 3, "linear", 400, "hash-1", true) sub1, _ := store.QueryOneSub("argo_uuid", "sub1") suite.Nil(e1) suite.Equal("example.com", sub1.PushEndpoint) @@ -256,9 +258,11 @@ func (suite *StoreTestSuite) TestMockStore() { suite.Equal("linear", sub1.RetPolicy) suite.Equal(400, sub1.RetPeriod) suite.Equal("hash-1", sub1.VerificationHash) + suite.Equal("autogen", sub1.AuthorizationType) + suite.Equal("auth-h-1", sub1.AuthorizationHeader) suite.True(sub1.Verified) - e2 := store.ModSubPush("argo_uuid", "unknown", "", 0, "", 0, "", false) + e2 := store.ModSubPush("argo_uuid", "unknown", "", "", "", 0, "", 0, "", false) suite.Equal("not found", e2.Error()) // exists in acl @@ -506,7 +510,7 @@ func (suite *StoreTestSuite) TestMockStore() { suite.Equal(8, qUsers2[0].ID) suite.Equal(7, qUsers2[1].ID) suite.Equal("6", pg2) - suite.Equal(int32(9), ts2) + suite.Equal(int32(2), ts2) suite.Equal(0, len(qUsers3)) suite.Equal("", pg3) @@ -515,7 +519,7 @@ func (suite *StoreTestSuite) TestMockStore() { suite.Equal(4, qUsers4[0].ID) suite.Equal(3, qUsers4[1].ID) suite.Equal("2", pg4) - suite.Equal(int32(9), ts4) + suite.Equal(int32(2), ts4) // test update topic latest publish time e1ulp := store2.UpdateTopicLatestPublish("argo_uuid", "topic1", time.Date(2019, 8, 8, 0, 0, 0, 0, time.Local)) @@ -543,7 +547,7 @@ func (suite *StoreTestSuite) TestMockStore() { // test QueryTotalMessagesPerProject expectedQpmc := []QProjectMessageCount{ - {ProjectUUID: "argo_uuid", NumberOfMessages: 30, AverageDailyMessages: 10}, + {ProjectUUID: "argo_uuid", NumberOfMessages: 30, AverageDailyMessages: 7}, } qpmc, qpmcerr1 := store2.QueryTotalMessagesPerProject([]string{"argo_uuid"}, time.Date(2018, 10, 1, 0, 0, 0, 0, time.UTC), time.Date(2018, 10, 4, 0, 0, 0, 0, time.UTC)) suite.Equal(expectedQpmc, qpmc) @@ -632,6 +636,15 @@ func (suite *StoreTestSuite) TestMockStore() { ur2, _ := store.QueryRegistrations("ur-uuid1", "accepted", "", "", "", "") suite.Equal(expur2, ur2) + sdate := time.Date(2008, 11, 19, 8, 0, 0, 0, time.Local) + edate := time.Date(2020, 11, 21, 6, 0, 0, 0, time.Local) + tc, _ := store3.TopicsCount(sdate, edate) + sc, _ := store3.SubscriptionsCount(sdate, edate) + uc, _ := store2.UsersCount(sdate, edate) + + suite.Equal(3, tc) + suite.Equal(3, sc) + suite.Equal(9, uc) } func TestStoresTestSuite(t *testing.T) { diff --git a/subscriptions/subscription.go b/subscriptions/subscription.go index 6e5dca78..296a9050 100644 --- a/subscriptions/subscription.go +++ b/subscriptions/subscription.go @@ -18,9 +18,12 @@ import ( ) const ( - LinearRetryPolicyType = "linear" - SlowStartRetryPolicyType = "slowstart" - UnSupportedRetryPolicyError = `Retry policy can only be of 'linear' or 'slowstart' type` + LinearRetryPolicyType = "linear" + SlowStartRetryPolicyType = "slowstart" + AutoGenerationAuthorizationHeader = "autogen" + DisabledAuthorizationHeader = "disabled" + UnSupportedRetryPolicyError = `Retry policy can only be of 'linear' or 'slowstart' type` + UnSupportedAuthorizationHeader = `Authorization header type can only be of 'autogen' or 'disabled' type` ) var supportedRetryPolicyTypes = []string{ @@ -28,6 +31,11 @@ var supportedRetryPolicyTypes = []string{ SlowStartRetryPolicyType, } +var supportedAuthorizationHeaderTypes = []string{ + AutoGenerationAuthorizationHeader, + DisabledAuthorizationHeader, +} + // Subscription struct to hold information for a given topic type Subscription struct { ProjectUUID string `json:"-"` @@ -41,17 +49,19 @@ type Subscription struct { NextOffset int64 `json:"-"` PendingAck string `json:"-"` PushStatus string `json:"push_status,omitempty"` + CreatedOn string `json:"created_on"` LatestConsume time.Time `json:"-"` ConsumeRate float64 `json:"-"` } // PushConfig holds optional configuration for push operations type PushConfig struct { - Pend string `json:"pushEndpoint"` - MaxMessages int64 `json:"maxMessages"` - RetPol RetryPolicy `json:"retryPolicy"` - VerificationHash string `json:"verification_hash"` - Verified bool `json:"verified"` + Pend string `json:"pushEndpoint"` + MaxMessages int64 `json:"maxMessages"` + AuthorizationHeader AuthorizationHeader `json:"authorization_header"` + RetPol RetryPolicy `json:"retryPolicy"` + VerificationHash string `json:"verification_hash"` + Verified bool `json:"verified"` } // SubMetrics holds the subscription's metric details @@ -68,6 +78,11 @@ type RetryPolicy struct { Period int `json:"period,omitempty"` } +type AuthorizationHeader struct { + Type string `json:"type,omitempty"` + Value string `json:"value,omitempty"` +} + // PaginatedSubscriptions holds information about a subscriptions' page and how to access the next page type PaginatedSubscriptions struct { Subscriptions []Subscription `json:"subscriptions"` @@ -124,6 +139,17 @@ func IsRetryPolicySupported(retPol string) bool { return false } +// IsAuthorizationHeaderTypeSupported checks if the provided authorization header type is supported by the service +func IsAuthorizationHeaderTypeSupported(authzType string) bool { + + for _, aht := range supportedAuthorizationHeaderTypes { + if authzType == aht { + return true + } + } + return false +} + // FindMetric returns the metric of a specific subscription func FindMetric(projectUUID string, name string, store stores.Store) (SubMetrics, error) { result := SubMetrics{MsgNum: 0} @@ -287,7 +313,9 @@ func VerifyPushEndpoint(sub Subscription, c *http.Client, store stores.Store) er } // update the push config with verified true - err = ModSubPush(sub.ProjectUUID, sub.Name, sub.PushCfg.Pend, sub.PushCfg.MaxMessages, sub.PushCfg.RetPol.PolicyType, sub.PushCfg.RetPol.Period, sub.PushCfg.VerificationHash, true, store) + err = ModSubPush(sub.ProjectUUID, sub.Name, sub.PushCfg.Pend, sub.PushCfg.AuthorizationHeader.Type, + sub.PushCfg.AuthorizationHeader.Value, sub.PushCfg.MaxMessages, sub.PushCfg.RetPol.PolicyType, + sub.PushCfg.RetPol.Period, sub.PushCfg.VerificationHash, true, store) if err != nil { return err } @@ -327,6 +355,7 @@ func Find(projectUUID, userUUID, name, pageToken string, pageSize int32, store s curSub.Offset = item.Offset curSub.NextOffset = item.NextOffset curSub.Ack = item.Ack + curSub.CreatedOn = item.CreatedOn.Format("2006-01-02T15:04:05Z") if item.PushEndpoint != "" { rp := RetryPolicy{ PolicyType: item.RetPolicy, @@ -341,12 +370,18 @@ func Find(projectUUID, userUUID, name, pageToken string, pageSize int32, store s maxM = item.MaxMessages } + authzCFG := AuthorizationHeader{ + Type: item.AuthorizationType, + Value: item.AuthorizationHeader, + } + curSub.PushCfg = PushConfig{ - Pend: item.PushEndpoint, - MaxMessages: maxM, - RetPol: rp, - VerificationHash: item.VerificationHash, - Verified: item.Verified, + Pend: item.PushEndpoint, + MaxMessages: maxM, + AuthorizationHeader: authzCFG, + RetPol: rp, + VerificationHash: item.VerificationHash, + Verified: item.Verified, } } curSub.LatestConsume = item.LatestConsume @@ -398,7 +433,7 @@ func LoadPushSubs(store stores.Store) PaginatedSubscriptions { } // CreateSub creates a new subscription -func CreateSub(projectUUID string, name string, topic string, push string, offset int64, maxMessages int64, ack int, retPolicy string, retPeriod int, vhash string, verified bool, store stores.Store) (Subscription, error) { +func CreateSub(projectUUID string, name string, topic string, push string, offset int64, maxMessages int64, authzType string, authzHeader string, ack int, retPolicy string, retPeriod int, vhash string, verified bool, createdOn time.Time, store stores.Store) (Subscription, error) { if HasSub(projectUUID, name, store) { return Subscription{}, errors.New("exists") @@ -412,7 +447,7 @@ func CreateSub(projectUUID string, name string, topic string, push string, offse retPeriod = 0 } - err := store.InsertSub(projectUUID, name, topic, offset, maxMessages, ack, push, retPolicy, retPeriod, vhash, verified) + err := store.InsertSub(projectUUID, name, topic, offset, maxMessages, authzType, authzHeader, ack, push, retPolicy, retPeriod, vhash, verified, createdOn) if err != nil { return Subscription{}, errors.New("backend error") } @@ -440,7 +475,7 @@ func ModAck(projectUUID string, name string, ack int, store stores.Store) error } // ModSubPush updates the subscription push config -func ModSubPush(projectUUID string, name string, push string, maxMessages int64, retPolicy string, retPeriod int, vhash string, verified bool, store stores.Store) error { +func ModSubPush(projectUUID string, name string, push string, authzType string, authzValue string, maxMessages int64, retPolicy string, retPeriod int, vhash string, verified bool, store stores.Store) error { if HasSub(projectUUID, name, store) == false { return errors.New("not found") @@ -450,7 +485,7 @@ func ModSubPush(projectUUID string, name string, push string, maxMessages int64, retPeriod = 0 } - return store.ModSubPush(projectUUID, name, push, maxMessages, retPolicy, retPeriod, vhash, verified) + return store.ModSubPush(projectUUID, name, push, authzType, authzValue, maxMessages, retPolicy, retPeriod, vhash, verified) } // RemoveSub removes an existing subscription diff --git a/subscriptions/subscription_test.go b/subscriptions/subscription_test.go index 54312ebc..2864eafd 100644 --- a/subscriptions/subscription_test.go +++ b/subscriptions/subscription_test.go @@ -137,6 +137,7 @@ func (suite *SubTestSuite) TestGetSubByName() { expSub := New("argo_uuid", "ARGO", "sub1", "topic1") expSub.PushCfg.RetPol.PolicyType = "" expSub.PushCfg.RetPol.Period = 0 + expSub.CreatedOn = "2020-11-19T00:00:00Z" expSub.LatestConsume = time.Date(2019, 5, 6, 0, 0, 0, 0, time.Local) expSub.ConsumeRate = 10 suite.Equal(expSub, result.Subscriptions[0]) @@ -177,28 +178,34 @@ func (suite *SubTestSuite) TestGetSubsByProject() { expSub1.PushCfg.MaxMessages = 0 expSub1.LatestConsume = time.Date(2019, 5, 6, 0, 0, 0, 0, time.Local) expSub1.ConsumeRate = 10 + expSub1.CreatedOn = "2020-11-19T00:00:00Z" expSub2 := New("argo_uuid", "ARGO", "sub2", "topic2") expSub2.PushCfg.RetPol.PolicyType = "" expSub2.PushCfg.RetPol.Period = 0 expSub2.PushCfg.MaxMessages = 0 expSub2.LatestConsume = time.Date(2019, 5, 7, 0, 0, 0, 0, time.Local) expSub2.ConsumeRate = 8.99 + expSub2.CreatedOn = "2020-11-20T00:00:00Z" expSub3 := New("argo_uuid", "ARGO", "sub3", "topic3") expSub3.PushCfg.RetPol.PolicyType = "" expSub3.PushCfg.RetPol.Period = 0 expSub3.PushCfg.MaxMessages = 0 expSub3.LatestConsume = time.Date(2019, 5, 8, 0, 0, 0, 0, time.Local) expSub3.ConsumeRate = 5.45 + expSub3.CreatedOn = "2020-11-21T00:00:00Z" expSub4 := New("argo_uuid", "ARGO", "sub4", "topic4") expSub4.PushCfg.RetPol.PolicyType = "linear" expSub4.PushCfg.RetPol.Period = 300 + expSub4.CreatedOn = "2020-11-22T00:00:00Z" rp := RetryPolicy{"linear", 300} + authCFG := AuthorizationHeader{"autogen", "auth-header-1"} expSub4.PushCfg = PushConfig{ - Pend: "endpoint.foo", - RetPol: rp, - VerificationHash: "push-id-1", - Verified: true, - MaxMessages: 1, + Pend: "endpoint.foo", + AuthorizationHeader: authCFG, + RetPol: rp, + VerificationHash: "push-id-1", + Verified: true, + MaxMessages: 1, } expSub4.LatestConsume = time.Date(0, 0, 0, 0, 0, 0, 0, time.Local) expSub4.ConsumeRate = 0 @@ -274,28 +281,34 @@ func (suite *SubTestSuite) TestLoadFromCfg() { expSub1.PushCfg.MaxMessages = 0 expSub1.LatestConsume = time.Date(2019, 5, 6, 0, 0, 0, 0, time.Local) expSub1.ConsumeRate = 10 + expSub1.CreatedOn = "2020-11-19T00:00:00Z" expSub2 := New("argo_uuid", "ARGO", "sub2", "topic2") expSub2.PushCfg.RetPol.PolicyType = "" expSub2.PushCfg.RetPol.Period = 0 expSub2.PushCfg.MaxMessages = 0 expSub2.LatestConsume = time.Date(2019, 5, 7, 0, 0, 0, 0, time.Local) expSub2.ConsumeRate = 8.99 + expSub2.CreatedOn = "2020-11-20T00:00:00Z" expSub3 := New("argo_uuid", "ARGO", "sub3", "topic3") expSub3.PushCfg.RetPol.PolicyType = "" expSub3.PushCfg.RetPol.Period = 0 expSub3.PushCfg.MaxMessages = 0 expSub3.LatestConsume = time.Date(2019, 5, 8, 0, 0, 0, 0, time.Local) expSub3.ConsumeRate = 5.45 + expSub3.CreatedOn = "2020-11-21T00:00:00Z" expSub4 := New("argo_uuid", "ARGO", "sub4", "topic4") expSub4.PushCfg.RetPol.PolicyType = "linear" expSub4.PushCfg.RetPol.Period = 300 + authCFG := AuthorizationHeader{"autogen", "auth-header-1"} + expSub4.CreatedOn = "2020-11-22T00:00:00Z" rp := RetryPolicy{"linear", 300} expSub4.PushCfg = PushConfig{ - Pend: "endpoint.foo", - RetPol: rp, - VerificationHash: "push-id-1", - Verified: true, - MaxMessages: 1, + Pend: "endpoint.foo", + AuthorizationHeader: authCFG, + RetPol: rp, + VerificationHash: "push-id-1", + Verified: true, + MaxMessages: 1, } expSub4.LatestConsume = time.Date(0, 0, 0, 0, 0, 0, 0, time.Local) expSub4.ConsumeRate = 0 @@ -308,6 +321,12 @@ func (suite *SubTestSuite) TestLoadFromCfg() { } +func (suite *SubTestSuite) TestIsAuthzTypeSupported() { + suite.True(IsAuthorizationHeaderTypeSupported("autogen")) + suite.True(IsAuthorizationHeaderTypeSupported("disabled")) + suite.False(IsRetryPolicySupported("unknown")) +} + func (suite *SubTestSuite) TestIsRetPolSupported() { suite.True(IsRetryPolicySupported("linear")) suite.False(IsRetryPolicySupported("unknown")) @@ -335,12 +354,13 @@ func (suite *SubTestSuite) TestCreateSubStore() { store := stores.NewMockStore(APIcfg.StoreHost, APIcfg.StoreDB) - sub, err := CreateSub("argo_uuid", "sub1", "topic1", "", 0, 0, 0, "linear", 300, "", true, store) + sub, err := CreateSub("argo_uuid", "sub1", "topic1", "", 0, 0, "", "", 0, "linear", 300, "", true, time.Date(2019, 7, 7, 0, 0, 0, 0, time.Local), store) suite.Equal(Subscription{}, sub) suite.Equal("exists", err.Error()) - sub2, err2 := CreateSub("argo_uuid", "subNew", "topicNew", "", 0, 0, 0, "linear", 300, "", true, store) + sub2, err2 := CreateSub("argo_uuid", "subNew", "topicNew", "", 0, 0, "", "", 0, "linear", 300, "", true, time.Date(2019, 7, 7, 0, 0, 0, 0, time.Local), store) expSub := New("argo_uuid", "ARGO", "subNew", "topicNew") + expSub.CreatedOn = "2019-07-07T00:00:00Z" suite.Equal(expSub, sub2) suite.Equal(nil, err2) @@ -374,7 +394,7 @@ func (suite *SubTestSuite) TestModSubPush() { store := stores.NewMockStore(APIcfg.StoreHost, APIcfg.StoreDB) // modify push config - err1 := ModSubPush("argo_uuid", "sub1", "example.com", 2, "linear", 400, "hash-1", true, store) + err1 := ModSubPush("argo_uuid", "sub1", "example.com", "autogen", "auth-h", 2, "linear", 400, "hash-1", true, store) suite.Nil(err1) @@ -387,7 +407,7 @@ func (suite *SubTestSuite) TestModSubPush() { suite.True(sub1.Verified) // test error case - err2 := ModSubPush("argo_uuid", "unknown", "", 0, "", 0, "", false, store) + err2 := ModSubPush("argo_uuid", "unknown", "", "", "", 0, "", 0, "", false, store) suite.Equal("not found", err2.Error()) } @@ -508,11 +528,13 @@ func (suite *SubTestSuite) TestExportJson() { "pushConfig": { "pushEndpoint": "", "maxMessages": 0, + "authorization_header": {}, "retryPolicy": {}, "verification_hash": "", "verified": false }, - "ackDeadlineSeconds": 10 + "ackDeadlineSeconds": 10, + "created_on": "2020-11-19T00:00:00Z" }` suite.Equal(expJSON, outJSON) @@ -524,6 +546,10 @@ func (suite *SubTestSuite) TestExportJson() { "pushConfig": { "pushEndpoint": "endpoint.foo", "maxMessages": 1, + "authorization_header": { + "type": "autogen", + "value": "auth-header-1" + }, "retryPolicy": { "type": "linear", "period": 300 @@ -531,7 +557,8 @@ func (suite *SubTestSuite) TestExportJson() { "verification_hash": "push-id-1", "verified": true }, - "ackDeadlineSeconds": 10 + "ackDeadlineSeconds": 10, + "created_on": "2020-11-22T00:00:00Z" }, { "name": "/projects/ARGO/subscriptions/sub3", @@ -539,11 +566,13 @@ func (suite *SubTestSuite) TestExportJson() { "pushConfig": { "pushEndpoint": "", "maxMessages": 0, + "authorization_header": {}, "retryPolicy": {}, "verification_hash": "", "verified": false }, - "ackDeadlineSeconds": 10 + "ackDeadlineSeconds": 10, + "created_on": "2020-11-21T00:00:00Z" }, { "name": "/projects/ARGO/subscriptions/sub2", @@ -551,11 +580,13 @@ func (suite *SubTestSuite) TestExportJson() { "pushConfig": { "pushEndpoint": "", "maxMessages": 0, + "authorization_header": {}, "retryPolicy": {}, "verification_hash": "", "verified": false }, - "ackDeadlineSeconds": 10 + "ackDeadlineSeconds": 10, + "created_on": "2020-11-20T00:00:00Z" }, { "name": "/projects/ARGO/subscriptions/sub1", @@ -563,11 +594,13 @@ func (suite *SubTestSuite) TestExportJson() { "pushConfig": { "pushEndpoint": "", "maxMessages": 0, + "authorization_header": {}, "retryPolicy": {}, "verification_hash": "", "verified": false }, - "ackDeadlineSeconds": 10 + "ackDeadlineSeconds": 10, + "created_on": "2020-11-19T00:00:00Z" } ], "nextPageToken": "", diff --git a/topics/topic.go b/topics/topic.go index 109f0175..a5228837 100644 --- a/topics/topic.go +++ b/topics/topic.go @@ -20,6 +20,7 @@ type Topic struct { LatestPublish time.Time `json:"-"` PublishRate float64 `json:"-"` Schema string `json:"schema,omitempty"` + CreatedOn string `json:"created_on"` } type TopicMetrics struct { @@ -110,6 +111,7 @@ func Find(projectUUID, userUUID, name, pageToken string, pageSize int32, store s curTop := New(item.ProjectUUID, projectName, item.Name) curTop.LatestPublish = item.LatestPublish curTop.PublishRate = item.PublishRate + curTop.CreatedOn = item.CreatedOn.Format("2006-01-02T15:04:05Z") if item.SchemaUUID != "" { sl, err := schemas.Find(projectUUID, item.SchemaUUID, "", store) @@ -159,13 +161,13 @@ func (tl *PaginatedTopics) ExportJSON() (string, error) { } // CreateTopic creates a new topic -func CreateTopic(projectUUID string, name string, schemaUUID string, store stores.Store) (Topic, error) { +func CreateTopic(projectUUID string, name string, schemaUUID string, createdOn time.Time, store stores.Store) (Topic, error) { if HasTopic(projectUUID, name, store) { return Topic{}, errors.New("exists") } - err := store.InsertTopic(projectUUID, name, schemaUUID) + err := store.InsertTopic(projectUUID, name, schemaUUID, createdOn) if err != nil { return Topic{}, errors.New("backend error") } diff --git a/topics/topic_test.go b/topics/topic_test.go index 659acb73..26ab06e3 100644 --- a/topics/topic_test.go +++ b/topics/topic_test.go @@ -41,6 +41,7 @@ func (suite *TopicTestSuite) TestGetTopicByName() { expTopic := New("argo_uuid", "ARGO", "topic1") expTopic.PublishRate = 10 expTopic.LatestPublish = time.Date(2019, 5, 6, 0, 0, 0, 0, time.Local) + expTopic.CreatedOn = "2020-11-22T00:00:00Z" suite.Equal(expTopic, myTopics.Topics[0]) } @@ -50,23 +51,23 @@ func (suite *TopicTestSuite) TestGetPaginatedTopics() { // retrieve all topics expPt1 := PaginatedTopics{Topics: []Topic{ - {"argo_uuid", "topic4", "/projects/ARGO/topics/topic4", time.Date(0, 0, 0, 0, 0, 0, 0, time.Local), 0, ""}, - {"argo_uuid", "topic3", "/projects/ARGO/topics/topic3", time.Date(2019, 5, 7, 0, 0, 0, 0, time.Local), 8.99, "projects/ARGO/schemas/schema-3"}, - {"argo_uuid", "topic2", "/projects/ARGO/topics/topic2", time.Date(2019, 5, 8, 0, 0, 0, 0, time.Local), 5.45, "projects/ARGO/schemas/schema-1"}, - {"argo_uuid", "topic1", "/projects/ARGO/topics/topic1", time.Date(2019, 5, 6, 0, 0, 0, 0, time.Local), 10, ""}}, + {"argo_uuid", "topic4", "/projects/ARGO/topics/topic4", time.Date(0, 0, 0, 0, 0, 0, 0, time.Local), 0, "", "2020-11-19T00:00:00Z"}, + {"argo_uuid", "topic3", "/projects/ARGO/topics/topic3", time.Date(2019, 5, 7, 0, 0, 0, 0, time.Local), 8.99, "projects/ARGO/schemas/schema-3", "2020-11-20T00:00:00Z"}, + {"argo_uuid", "topic2", "/projects/ARGO/topics/topic2", time.Date(2019, 5, 8, 0, 0, 0, 0, time.Local), 5.45, "projects/ARGO/schemas/schema-1", "2020-11-21T00:00:00Z"}, + {"argo_uuid", "topic1", "/projects/ARGO/topics/topic1", time.Date(2019, 5, 6, 0, 0, 0, 0, time.Local), 10, "", "2020-11-22T00:00:00Z"}}, NextPageToken: "", TotalSize: 4} pgTopics1, err1 := Find("argo_uuid", "", "", "", 0, store) // retrieve first 2 topics expPt2 := PaginatedTopics{Topics: []Topic{ - {"argo_uuid", "topic4", "/projects/ARGO/topics/topic4", time.Date(0, 0, 0, 0, 0, 0, 0, time.Local), 0, ""}, - {"argo_uuid", "topic3", "/projects/ARGO/topics/topic3", time.Date(2019, 5, 7, 0, 0, 0, 0, time.Local), 8.99, "projects/ARGO/schemas/schema-3"}}, + {"argo_uuid", "topic4", "/projects/ARGO/topics/topic4", time.Date(0, 0, 0, 0, 0, 0, 0, time.Local), 0, "", "2020-11-19T00:00:00Z"}, + {"argo_uuid", "topic3", "/projects/ARGO/topics/topic3", time.Date(2019, 5, 7, 0, 0, 0, 0, time.Local), 8.99, "projects/ARGO/schemas/schema-3", "2020-11-20T00:00:00Z"}}, NextPageToken: "MQ==", TotalSize: 4} pgTopics2, err2 := Find("argo_uuid", "", "", "", 2, store) // retrieve the next topic expPt3 := PaginatedTopics{Topics: []Topic{ - {"argo_uuid", "topic1", "/projects/ARGO/topics/topic1", time.Date(2019, 5, 6, 0, 0, 0, 0, time.Local), 10, ""}}, + {"argo_uuid", "topic1", "/projects/ARGO/topics/topic1", time.Date(2019, 5, 6, 0, 0, 0, 0, time.Local), 10, "", "2020-11-22T00:00:00Z"}}, NextPageToken: "", TotalSize: 4} pgTopics3, err3 := Find("argo_uuid", "", "", "MA==", 1, store) @@ -75,14 +76,14 @@ func (suite *TopicTestSuite) TestGetPaginatedTopics() { // retrieve topics for a specific user expPt5 := PaginatedTopics{Topics: []Topic{ - {"argo_uuid", "topic2", "/projects/ARGO/topics/topic2", time.Date(2019, 5, 8, 0, 0, 0, 0, time.Local), 5.45, "projects/ARGO/schemas/schema-1"}, - {"argo_uuid", "topic1", "/projects/ARGO/topics/topic1", time.Date(2019, 5, 6, 0, 0, 0, 0, time.Local), 10, ""}}, + {"argo_uuid", "topic2", "/projects/ARGO/topics/topic2", time.Date(2019, 5, 8, 0, 0, 0, 0, time.Local), 5.45, "projects/ARGO/schemas/schema-1", "2020-11-21T00:00:00Z"}, + {"argo_uuid", "topic1", "/projects/ARGO/topics/topic1", time.Date(2019, 5, 6, 0, 0, 0, 0, time.Local), 10, "", "2020-11-22T00:00:00Z"}}, NextPageToken: "", TotalSize: 2} pgTopics5, err5 := Find("argo_uuid", "uuid1", "", "", 2, store) // retrieve topics for a specific user with pagination expPt6 := PaginatedTopics{Topics: []Topic{ - {"argo_uuid", "topic2", "/projects/ARGO/topics/topic2", time.Date(2019, 5, 8, 0, 0, 0, 0, time.Local), 5.45, "projects/ARGO/schemas/schema-1"}}, + {"argo_uuid", "topic2", "/projects/ARGO/topics/topic2", time.Date(2019, 5, 8, 0, 0, 0, 0, time.Local), 5.45, "projects/ARGO/schemas/schema-1", "2020-11-21T00:00:00Z"}}, NextPageToken: "MA==", TotalSize: 2} pgTopics6, err6 := Find("argo_uuid", "uuid1", "", "", 1, store) @@ -129,13 +130,14 @@ func (suite *TopicTestSuite) TestCreateTopicStore() { store := stores.NewMockStore(APIcfg.StoreHost, APIcfg.StoreDB) - tp, err := CreateTopic("argo_uuid", "topic1", "", store) + tp, err := CreateTopic("argo_uuid", "topic1", "", time.Time{}, store) suite.Equal(Topic{}, tp) suite.Equal("exists", err.Error()) - tp2, err2 := CreateTopic("argo_uuid", "topicNew", "schema_uuid_1", store) + tp2, err2 := CreateTopic("argo_uuid", "topicNew", "schema_uuid_1", time.Date(2019, 5, 7, 0, 0, 0, 0, time.Local), store) expTopic := New("argo_uuid", "ARGO", "topicNew") expTopic.Schema = "projects/ARGO/schemas/schema-1" + expTopic.CreatedOn = "2019-05-07T00:00:00Z" suite.Equal(expTopic, tp2) suite.Equal(nil, err2) } @@ -172,25 +174,30 @@ func (suite *TopicTestSuite) TestExportJson() { topics, _ := Find("argo_uuid", "", "topic1", "", 0, store) outJSON, _ := topics.Topics[0].ExportJSON() expJSON := `{ - "name": "/projects/ARGO/topics/topic1" + "name": "/projects/ARGO/topics/topic1", + "created_on": "2020-11-22T00:00:00Z" }` suite.Equal(expJSON, outJSON) expJSON2 := `{ "topics": [ { - "name": "/projects/ARGO/topics/topic4" + "name": "/projects/ARGO/topics/topic4", + "created_on": "2020-11-19T00:00:00Z" }, { "name": "/projects/ARGO/topics/topic3", - "schema": "projects/ARGO/schemas/schema-3" + "schema": "projects/ARGO/schemas/schema-3", + "created_on": "2020-11-20T00:00:00Z" }, { "name": "/projects/ARGO/topics/topic2", - "schema": "projects/ARGO/schemas/schema-1" + "schema": "projects/ARGO/schemas/schema-1", + "created_on": "2020-11-21T00:00:00Z" }, { - "name": "/projects/ARGO/topics/topic1" + "name": "/projects/ARGO/topics/topic1", + "created_on": "2020-11-22T00:00:00Z" } ], "nextPageToken": "", diff --git a/validation.go b/validation.go deleted file mode 100644 index 1da73d59..00000000 --- a/validation.go +++ /dev/null @@ -1,34 +0,0 @@ -package main - -import ( - "regexp" - "strconv" - "strings" -) - -func validName(name string) bool { - r, _ := regexp.Compile("^[a-zA-Z0-9_-]+$") - return r.Match([]byte(name)) -} - -// validAckID checks the validity of an AckID string against a given project and subscription -func validAckID(project string, sub string, ackID string) bool { - - tokens := strings.Split(ackID, "/") - - if len(tokens) != 4 || tokens[0] != "projects" || tokens[1] != project || tokens[2] != "subscriptions" { - return false - } - - subTokens := strings.Split(tokens[3], ":") - if len(subTokens) != 2 || subTokens[0] != sub { - return false - } - _, err := strconv.ParseInt(subTokens[1], 10, 64) - if err != nil { - - return false - } - - return true -} diff --git a/validation/validation.go b/validation/validation.go new file mode 100644 index 00000000..a6433e61 --- /dev/null +++ b/validation/validation.go @@ -0,0 +1,50 @@ +package validation + +import ( + "net/url" + "regexp" + "strconv" + "strings" +) + +func ValidName(name string) bool { + r, _ := regexp.Compile("^[a-zA-Z0-9_-]+$") + return r.Match([]byte(name)) +} + +// ValidAckID checks the validity of an AckID string against a given project and subscription +func ValidAckID(project string, sub string, ackID string) bool { + + tokens := strings.Split(ackID, "/") + + if len(tokens) != 4 || tokens[0] != "projects" || tokens[1] != project || tokens[2] != "subscriptions" { + return false + } + + subTokens := strings.Split(tokens[3], ":") + if len(subTokens) != 2 || subTokens[0] != sub { + return false + } + _, err := strconv.ParseInt(subTokens[1], 10, 64) + if err != nil { + + return false + } + + return true +} + +// IsValidHTTPS checks if a url string is valid https url +func IsValidHTTPS(urlStr string) bool { + u, err := url.ParseRequestURI(urlStr) + if err != nil { + return false + } + // If a valid url is in form without slashes after scheme consider it invalid. + // If a valid url doesn't have https as a scheme consider it invalid + if u.Host == "" || u.Scheme != "https" { + return false + } + + return true +} diff --git a/validation/validation_test.go b/validation/validation_test.go new file mode 100644 index 00000000..92bc23d0 --- /dev/null +++ b/validation/validation_test.go @@ -0,0 +1,47 @@ +package validation + +import ( + "github.com/stretchr/testify/suite" + "testing" +) + +type ValidationTestSuite struct { + suite.Suite +} + +func (suite *ValidationTestSuite) TestValidHTTPS() { + suite.Equal(false, IsValidHTTPS("ht")) + suite.Equal(false, IsValidHTTPS("www.example.com")) + suite.Equal(false, IsValidHTTPS("https:www.example.com")) + suite.Equal(false, IsValidHTTPS("http://www.example.com")) + suite.Equal(true, IsValidHTTPS("https://www.example.com")) + +} + +func (suite *ValidationTestSuite) TestValidation() { + // nameValidations + suite.Equal(true, ValidName("topic101")) + suite.Equal(true, ValidName("topic_101")) + suite.Equal(true, ValidName("topic_101_another_thing")) + suite.Equal(true, ValidName("topic___343_random")) + suite.Equal(true, ValidName("topic_dc1cc538-1361-4317-a235-0bf383d4a69f")) + suite.Equal(false, ValidName("topic_dc1cc538.1361-4317-a235-0bf383d4a69f")) + suite.Equal(false, ValidName("topic.not.valid")) + suite.Equal(false, ValidName("spaces are not valid")) + suite.Equal(false, ValidName("topic/A")) + suite.Equal(false, ValidName("topic/B")) + + // ackID validations + suite.Equal(true, ValidAckID("ARGO", "sub101", "projects/ARGO/subscriptions/sub101:5")) + suite.Equal(false, ValidAckID("ARGO", "sub101", "projects/ARGO/subscriptions/sub101:aaa")) + suite.Equal(false, ValidAckID("ARGO", "sub101", "projects/FARGO/subscriptions/sub101:5")) + suite.Equal(false, ValidAckID("ARGO", "sub101", "projects/ARGO/subscriptions/subF00:5")) + suite.Equal(false, ValidAckID("ARGO", "sub101", "falsepath/ARGO/subscriptions/sub101:5")) + suite.Equal(true, ValidAckID("FOO", "BAR", "projects/FOO/subscriptions/BAR:11155")) + suite.Equal(false, ValidAckID("FOO", "BAR", "projects/FOO//subscriptions/BAR:11155")) + +} + +func TestValidationTestSuite(t *testing.T) { + suite.Run(t, new(ValidationTestSuite)) +} diff --git a/version/version.go b/version/version.go index 47a263e3..1d32639c 100644 --- a/version/version.go +++ b/version/version.go @@ -8,7 +8,7 @@ import ( var ( // Release version of the service. Bump it up during new version release - Release = "1.0.7" + Release = "1.0.8" // Commit hash provided during build Commit = "Unknown" // BuildTime provided during build @@ -42,8 +42,6 @@ func LogInfo() { // Model struct holds version information about the binary build type Model struct { - Release string `xml:"release" json:"release"` - Commit string `xml:"commit" json:"commit"` BuildTime string `xml:"build_time" json:"build_time"` GO string `xml:"golang" json:"golang"` Compiler string `xml:"compiler" json:"compiler"`