...
 
Commits (11)
......@@ -12,22 +12,30 @@ import (
type AppError struct {
error error
Message string
Code int
message string
code int
}
func (e AppError) Error() string {
return fmt.Sprintf("%s (%s)", e.Message, e.error.Error())
return fmt.Sprintf("%s (%s)", e.message, e.error.Error())
}
func (err AppError) Handle(w http.ResponseWriter) {
// log.Println(err)
http.Error(w, err.Error(), err.Code)
// TODO: writeErrorResponse
writeGetResponse(w, nil, nil, err)
// http.Error(w, err.Error(), err.Code)
http.Error(w, "", err.code)
writeErrorResponse(w, err)
}
type dataGetter func(context.Context, map[string]string, params.QueryParams) ([]interface{}, error)
func NewError(err error, msg string, status int) AppError {
return AppError{
error: err,
message: msg,
code: status,
}
}
// type dataGetter func(context.Context, map[string]string, params.QueryParams) ([]interface{}, error)
type dataGetter func(context.Context, map[string]string, map[string]string) ([]interface{}, error)
type App struct {
*db.DBOracle
......@@ -36,55 +44,57 @@ type App struct {
func (app *App) GetWrapper(getter dataGetter) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
qParams, err := params.InitQueryParams(r.URL.Query())
qParams, err := params.PrepParams(r.URL.Query())
if err != nil {
AppError{err, "invalid query parameters", http.StatusBadRequest}.Handle(w)
NewError(err, "invalid query parameters", http.StatusBadRequest).Handle(w)
return
}
username, password, _ := r.BasicAuth()
userCtx, err := app.Authenticate(username, password)
if err != nil {
AppError{err, "Failed to connect", http.StatusUnauthorized}.Handle(w)
NewError(err, "Failed to connect", http.StatusUnauthorized).Handle(w)
return
}
data, err := getter(userCtx, mux.Vars(r), qParams)
if err != nil {
AppError{err, "Failed to retrieve data", http.StatusInternalServerError}.Handle(w)
NewError(err, "Failed to retrieve data", http.StatusInternalServerError).Handle(w)
return
}
writeGetResponse(w, qParams.EncodePaging(r.URL, uint64(len(data))), data, nil)
writeGetResponse(w, params.GetPages(qParams, r.URL, len(data)), data)
}
}
type dataDecoder func(http.ResponseWriter, *http.Request) (db.Putabler, error)
type dataDecoder func(http.ResponseWriter, *http.Request) (db.Encodabler, error)
type dataPutter func(context.Context, map[string]string, map[string]string, db.Encodabler) error
func PutWrapper(app *App, decoder dataDecoder) http.HandlerFunc {
func PutWrapper(app *App, decoder dataDecoder, putter dataPutter) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
qParams, err := params.InitQueryParams(r.URL.Query())
qParams, err := params.PrepParams(r.URL.Query())
if err != nil {
AppError{err, "invalid query parameters", http.StatusBadRequest}.Handle(w)
NewError(err, "invalid query parameters", http.StatusBadRequest).Handle(w)
return
}
username, password, _ := r.BasicAuth()
userCtx, err := app.Authenticate(username, password)
if err != nil {
AppError{err, "Failed to connect", http.StatusUnauthorized}.Handle(w)
NewError(err, "Failed to connect", http.StatusUnauthorized).Handle(w)
return
}
data, err := decoder(w, r)
if err != nil {
// if err := target.Decode(w, r); err != nil {
AppError{err, "Failed to decode payload", http.StatusBadRequest}.Handle(w)
NewError(err, "Failed to decode payload", http.StatusBadRequest).Handle(w)
return
}
log.Println("PutWrapper: ", data)
if err := data.Put(mux.Vars(r), qParams); err != nil {
AppError{err, "Failed to put data", http.StatusInternalServerError}.Handle(w)
if err := putter(userCtx, mux.Vars(r), qParams, data); err != nil {
NewError(err, "Failed to put data", http.StatusInternalServerError).Handle(w)
return
}
// TODO: writePutResponse
......
......@@ -2,7 +2,6 @@ package db
import (
"context"
"dmpapi/params"
"fmt"
"log"
)
......@@ -60,48 +59,85 @@ type L2DataModel struct {
maybe we need different implementations for all, startDate-only,
endDate-only to increase data access performance */
func (db *DBOracle) GetSensorDataLevel1(userCtx context.Context, pParams map[string]string, qParams params.QueryParams) ([]interface{}, error) {
func (db *DBOracle) GetSensorDataLevel1(userCtx context.Context, pParams map[string]string, qParams map[string]string) ([]interface{}, error) {
return db.getSensorL1Data(
userCtx, L1_QUERY,
pParams["id"],
qParams.StartDate.Format(DATE_FORMAT), qParams.EndDate.Format(DATE_FORMAT),
qParams.Start, qParams.Limit)
qParams["startDate"], qParams["endDate"],
qParams["start"], qParams["limit"],
)
}
func (db *DBOracle) GetSensorDataLevel1Aggregate(userCtx context.Context, pParams map[string]string, qParams params.QueryParams) ([]interface{}, error) {
func (db *DBOracle) GetSensorDataLevel1Aggregate(userCtx context.Context, pParams map[string]string, qParams map[string]string) ([]interface{}, error) {
return db.getSensorL1Data(
userCtx, fmt.Sprintf(L1_AGGREGATE_QUERY, AGGREGATES[pParams["aggregate"]], FUNCTIONS[pParams["func"]]),
pParams["id"],
qParams.StartDate.Format(DATE_FORMAT), qParams.EndDate.Format(DATE_FORMAT),
qParams.Start, qParams.Limit,
qParams["startDate"], qParams["endDate"],
qParams["start"], qParams["limit"],
)
}
func (db *DBOracle) GetSensorByIDLevel2bAggregate(userCtx context.Context, pParams map[string]string, qParams map[string]string) ([]interface{}, error) {
query := fmt.Sprintf(
L2B_AGGREGATE_QUERY,
AGGREGATES[pParams["aggregate"]],
pParams["projectid"],
"SENSOR_ID",
FUNCTIONS[pParams["func"]],
)
return db.getSensorL2Data(
userCtx, query,
pParams["id"],
qParams["startDate"], qParams["endDate"],
qParams["badMax"], qParams["doubtMax"],
qParams["start"], qParams["limit"],
)
}
func (db *DBOracle) GetSensorByNameLevel2bAggregate(userCtx context.Context, pParams map[string]string, qParams map[string]string) ([]interface{}, error) {
query := fmt.Sprintf(
L2B_AGGREGATE_QUERY,
AGGREGATES[pParams["aggregate"]],
pParams["projectid"],
"NAME",
FUNCTIONS[pParams["func"]],
)
return db.getSensorL2Data(
userCtx, query,
pParams["name"],
qParams["startDate"], qParams["endDate"],
qParams["badMax"], qParams["doubtMax"],
qParams["start"], qParams["limit"],
)
}
func (db *DBOracle) GetSensorDataLevel2(userCtx context.Context, pParams map[string]string, qParams params.QueryParams) ([]interface{}, error) {
func (db *DBOracle) GetSensorDataLevel2(userCtx context.Context, pParams map[string]string, qParams map[string]string) ([]interface{}, error) {
return db.getSensorL2Data(
userCtx, L2A_QUERY,
pParams["id"],
qParams.StartDate.Format(DATE_FORMAT), qParams.EndDate.Format(DATE_FORMAT),
qParams.Start, qParams.Limit,
qParams["startDate"], qParams["endDate"],
qParams["start"], qParams["limit"],
)
}
func (db *DBOracle) GetSensorByIDLevel2b(userCtx context.Context, pParams map[string]string, qParams params.QueryParams) ([]interface{}, error) {
func (db *DBOracle) GetSensorByIDLevel2b(userCtx context.Context, pParams map[string]string, qParams map[string]string) ([]interface{}, error) {
return db.getSensorL2Data(
userCtx, fmt.Sprintf(L2B_QUERY, pParams["projectid"], "SENSOR_ID"),
pParams["id"],
qParams.StartDate.Format(DATE_FORMAT), qParams.EndDate.Format(DATE_FORMAT),
qParams.Start, qParams.Limit,
qParams["startDate"], qParams["endDate"],
qParams["start"], qParams["limit"],
)
}
func (db *DBOracle) GetSensorByNameLevel2b(userCtx context.Context, pParams map[string]string, qParams params.QueryParams) ([]interface{}, error) {
func (db *DBOracle) GetSensorByNameLevel2b(userCtx context.Context, pParams map[string]string, qParams map[string]string) ([]interface{}, error) {
return db.getSensorL2Data(
userCtx, fmt.Sprintf(L2B_QUERY, pParams["projectid"], "NAME"),
pParams["name"],
qParams.StartDate.Format(DATE_FORMAT), qParams.EndDate.Format(DATE_FORMAT),
qParams.Start, qParams.Limit,
qParams["startDate"], qParams["endDate"],
qParams["start"], qParams["limit"],
)
}
......@@ -146,5 +182,6 @@ func (db *DBOracle) getSensorL2Data(userCtx context.Context, query string, args
}
out = append(out, rowData)
}
log.Println("reader: ", out)
return out, nil
}
......@@ -2,15 +2,41 @@ package db
import (
"bytes"
"dmpapi/params"
"context"
"encoding/csv"
"encoding/json"
"fmt"
"log"
"net/http"
"net/url"
"reflect"
"strconv"
)
var (
FLAGS = map[string]bool{
"OK": true,
"DOUBTFUL": true,
"BAD": true,
}
CAUSES = map[string]bool{
"": true,
"BATTERY_LOW": true,
"BELOW_MINIMUM": true,
"ABOVE_MAXIMUM": true,
"BELOW_OR_ABOVE_MIN_MAX": true,
"ISOLATED_SPIKE": true,
"DEFECTIVE_SENSOR": true,
"LEFT_CENSORED_DATA": true,
"RIGHT_CENSORED_DATA": true,
"OTHER": true,
}
)
const (
LEVEL2A_URL = "https://logger-worker.intranet.ufz.de/gateway/uploadl2a/device/"
)
func getTags(strct interface{}, name string) []string {
t := reflect.TypeOf(strct)
out := []string{}
......@@ -22,10 +48,8 @@ func getTags(strct interface{}, name string) []string {
return out
}
type Putabler interface {
Keys() []string
Values() [][]string
Put(map[string]string, params.QueryParams) error
type Encodabler interface {
Encode() (*bytes.Buffer, error)
}
type DataFlags []struct {
......@@ -35,55 +59,87 @@ type DataFlags []struct {
Comment string `json:"quality_comment"`
}
func (t DataFlags) Keys() []string {
return getTags(t[0], "json")
}
func (t DataFlags) Values() [][]string {
out := [][]string{}
for _, row := range t {
out = append(
out,
[]string{strconv.Itoa(row.ID), row.Flag, row.Cause, row.Comment},
)
}
return out
}
func (t DataFlags) Put(pParams map[string]string, qParams params.QueryParams) error {
func (t DataFlags) Encode() (*bytes.Buffer, error) {
file := &bytes.Buffer{}
writer := csv.NewWriter(file)
if err := writer.Write(t.Keys()); err != nil {
if err := writer.Write(getTags(t[0], "json")); err != nil {
log.Fatal(err)
return err
return nil, err
}
for _, row := range t.Values() {
if err := writer.Write(row); err != nil {
for _, row := range t {
converted := []string{strconv.Itoa(row.ID), row.Flag, row.Cause, row.Comment}
if err := writer.Write(converted); err != nil {
log.Fatal(err)
return err
return nil, err
}
}
writer.Flush()
log.Println("Put:", file)
return file, nil
}
func (db *DBOracle) PutLoggerFlags(userCtx context.Context, pParams map[string]string, qParams map[string]string, data Encodabler) error {
loggers, err := db.GetLoggerByID(userCtx, pParams, qParams)
if err != nil {
return err
}
deviceID := loggers[0].(LoggerModel).DeviceID
if deviceID == "" {
return fmt.Errorf("No DEVICE_ID found for logger id: %s", pParams["id"])
}
file, err := data.Encode()
if err != nil {
return err
}
log.Println(file)
uploadURL, _ := url.Parse(LEVEL2A_URL + deviceID)
log.Println(uploadURL)
return nil
}
func NewDataFlags(w http.ResponseWriter, r *http.Request) (Putabler, error) {
func DecodeDataFlags(w http.ResponseWriter, r *http.Request) (Encodabler, error) {
// TODO: add consistency checks
r.Body = http.MaxBytesReader(w, r.Body, 10<<20)
dec := json.NewDecoder(r.Body)
dec.DisallowUnknownFields()
target := DataFlags{}
if err := dec.Decode(&target); err != nil {
data := DataFlags{}
if err := dec.Decode(&data); err != nil {
return nil, err
}
if err := checkLevel2a(data); err != nil {
return nil, err
}
log.Println("decodeBody:", target)
// TODO: some sort of safety nets
// (empty body, more than PAGE_LIMIT lines)
// log.Println("decodeBody: ", length, PutData)
return target, nil
return data, nil
}
func checkLevel2a(data DataFlags) error {
for i, row := range data {
if _, ok := FLAGS[row.Flag]; !ok {
return fmt.Errorf("line %v: invalid quality_flag '%s'", i, row.Flag)
}
if _, ok := CAUSES[row.Cause]; !ok {
return fmt.Errorf("line %v: invalid quality_cause: '%s'", i, row.Cause)
}
if (row.Flag != "OK") && (row.Cause == "") {
return fmt.Errorf("line %v: quality_cause needed if quality_flag is not 'OK'")
}
if row.Cause == "OTHER" {
return fmt.Errorf("line %v: quality_comment needed if quality_cause is set to 'OTHER'")
}
}
return nil
}
......@@ -2,33 +2,38 @@ package db
import (
"context"
"dmpapi/params"
"fmt"
"log"
"strings"
)
const (
LOGGER_QUERY = "SELECT l.LOGGER_ID, l.NAME, l.LABEL, l.RESPONSIBLE_PERSON, l.UNTERSUCHUNGSGEBIET_ID FROM LOGGER.UNI_EXP_LOGGER l"
LOGGER_QUERY = "SELECT l.LOGGER_ID, l.DEVICE_ID, l.NAME, l.LABEL, l.RESPONSIBLE_PERSON, l.UNTERSUCHUNGSGEBIET_ID FROM LOGGER.UNI_EXP_LOGGER l"
LOGGER_WHERE_QUERY = "WHERE l.LOGGER_ID = :1"
LOGGER_PAGE_QUERY = "ORDER BY l.LOGGER_ID OFFSET :1 ROWS FETCH NEXT :2 ROWS ONLY"
)
type LoggerModel struct {
ID int `db:"LOGGER_ID" json:"loggerId"`
Name string `db:"NAME" json:"loggerName"`
Label string `db:"LABEL" json:"loggerLabel"`
Person string `db:"RESPONSIBLE_PERSON" json:"responsiblePerson"`
Station int `db:"UNTERSUCHUNGSGEBIET_ID" json:"stationId"`
ID int `db:"LOGGER_ID" json:"loggerId"`
DeviceID string `db:"DEVICE_ID" json:"deviceID"`
Name string `db:"NAME" json:"loggerName"`
Label string `db:"LABEL" json:"loggerLabel"`
Person string `db:"RESPONSIBLE_PERSON" json:"responsiblePerson"`
Station int `db:"UNTERSUCHUNGSGEBIET_ID" json:"stationId"`
}
func (db *DBOracle) GetLoggersAll(userCtx context.Context, pParams map[string]string, qParams params.QueryParams) ([]interface{}, error) {
func (db *DBOracle) GetLoggersAll(userCtx context.Context, pParams map[string]string, qParams map[string]string) ([]interface{}, error) {
query := strings.Join([]string{LOGGER_QUERY, LOGGER_PAGE_QUERY}, " ")
return db.getLoggers(userCtx, query, qParams.QueryPaging.Start, qParams.QueryPaging.Limit)
return db.getLoggers(userCtx, query, qParams["start"], qParams["limit"])
}
func (db *DBOracle) GetLoggerByID(userCtx context.Context, pParams map[string]string, qParams params.QueryParams) ([]interface{}, error) {
func (db *DBOracle) GetLoggerByID(userCtx context.Context, pParams map[string]string, qParams map[string]string) ([]interface{}, error) {
query := strings.Join([]string{LOGGER_QUERY, LOGGER_WHERE_QUERY}, " ")
return db.getLoggers(userCtx, query, pParams["id"])
loggers, err := db.getLoggers(userCtx, query, pParams["id"])
if (err == nil) && (len(loggers) == 0) {
return nil, fmt.Errorf("Invalid logger id: %v", pParams["id"])
}
return loggers, err
}
func (db *DBOracle) getLoggers(userCtx context.Context, query string, args ...interface{}) ([]interface{}, error) {
......
......@@ -7,14 +7,13 @@ const (
L1_QUERY = `
WITH tmp AS (
SELECT
v.LEVEL1_VALUE_ID AS VALUE_ID,
v.LEVEL1_VALUE_ID AS VALUE_ID,
TO_CHAR(r.TIMESTAMP_MEASUREMENT, 'YYYY-MM-DD HH24:MI:SS') AS TIMESTAMP_MEASUREMENT,
TO_NUMBER_SAFE(v.VALUE) AS VALUE_MEASUREMENT,
r.LOGGER_ID AS LOGGER_ID,
v.SENSOR_ID AS SENSOR_ID,
s.NAME AS SENSOR_NAME,
RANK()
OVER(PARTITION BY r.TIMESTAMP_MEASUREMENT ORDER BY r.LEVEL1_RECORD_ID DESC) DEST_RANK
TO_NUMBER_SAFE(v.VALUE) AS VALUE_MEASUREMENT,
r.LOGGER_ID AS LOGGER_ID,
v.SENSOR_ID AS SENSOR_ID,
s.NAME AS SENSOR_NAME,
RANK() OVER(PARTITION BY r.TIMESTAMP_MEASUREMENT ORDER BY r.LEVEL1_RECORD_ID DESC) DEST_RANK
FROM
LOGGER.UNI_EXP_LEVEL1_VALUE v
JOIN LOGGER.UNI_EXP_LEVEL1_RECORD r ON v.LEVEL1_RECORD_ID = r.LEVEL1_RECORD_ID
......@@ -29,19 +28,18 @@ const (
ORDER BY TIMESTAMP_MEASUREMENT, LOGGER_ID, SENSOR_ID
OFFSET :4 ROWS FETCH NEXT :5 ROWS ONLY`
// TODO: Ask Martin if we can have duplicated value-date pairs here as well
L2A_QUERY = `
WITH tmp AS (
SELECT
v.LEVEL1_VALUE_ID AS VALUE_ID,
v.LEVEL1_VALUE_ID AS VALUE_ID,
TO_CHAR(r.TIMESTAMP_MEASUREMENT, 'YYYY-MM-DD HH24:MI:SS') AS TIMESTAMP_MEASUREMENT,
TO_NUMBER_SAFE(v.VALUE) AS VALUE_MEASUREMENT,
r.LOGGER_ID AS LOGGER_ID,
v.SENSOR_ID AS SENSOR_ID,
s.NAME AS SENSOR_NAME,
f.QUALITY_FLAG AS QUALITY_FLAG,
f.QUALITY_CAUSE AS QUALITY_CAUSE,
f.QUALITY_COMMENT AS QUALITY_COMMENT
TO_NUMBER_SAFE(v.VALUE) AS VALUE_MEASUREMENT,
r.LOGGER_ID AS LOGGER_ID,
v.SENSOR_ID AS SENSOR_ID,
s.NAME AS SENSOR_NAME,
f.QUALITY_FLAG AS QUALITY_FLAG,
f.QUALITY_CAUSE AS QUALITY_CAUSE,
f.QUALITY_COMMENT AS QUALITY_COMMENT
FROM
LOGGER.UNI_EXP_LEVEL1_VALUE v
JOIN LOGGER.UNI_EXP_LEVEL1_RECORD r ON v.LEVEL1_RECORD_ID = r.LEVEL1_RECORD_ID
......@@ -57,41 +55,15 @@ const (
ORDER BY TIMESTAMP_MEASUREMENT, LOGGER_ID, SENSOR_ID
OFFSET :4 ROWS FETCH NEXT :5 ROWS ONLY`
L2B_QUERY = `
WITH tmp AS (
SELECT
TO_CHAR(d.TIMESTAMP, 'YYYY-MM-DD HH24:MI:SS') AS TIMESTAMP_MEASUREMENT,
d.LOGGER_ID AS LOGGER_ID,
TO_NUMBER_SAFE(d.VALUE) AS VALUE_MEASUREMENT,
s.SENSOR_ID AS SENSOR_ID,
s.NAME AS SENSOR_NAME,
d.QUALITY_FLAG AS QUALITY_FLAG,
d.QUALITY_CAUSE AS QUALITY_CAUSE,
d.QUALITY_COMMENT AS QUALITY_COMMENT
FROM
LOGGER.EXP_DP%s_L2B_DATA d
join LOGGER.UNI_EXP_SENSOR s
on d.NAME = s.NAME
WHERE
s.%s = :1
AND d.TIMESTAMP >= LOGGER.DMP_DATE.PARSE(:2)
AND d.TIMESTAMP < LOGGER.DMP_DATE.PARSE(:3)
)
SELECT * FROM tmp
WHERE VALUE_MEASUREMENT IS NOT NULL
ORDER BY TIMESTAMP_MEASUREMENT, LOGGER_ID, SENSOR_ID
OFFSET :4 ROWS FETCH NEXT :5 ROWS ONLY`
L1_AGGREGATE_QUERY = `
WITH tmp AS (
SELECT
TO_CHAR(TRUNC(r.TIMESTAMP_MEASUREMENT, '%s'), 'YYYY-MM-DD HH24:MI:SS') AS TIMESTAMP_MEASUREMENT,
TO_NUMBER_SAFE(v.VALUE) AS VALUE_MEASUREMENT,
r.LOGGER_ID AS LOGGER_ID,
v.SENSOR_ID AS SENSOR_ID,
s.NAME AS SENSOR_NAME,
RANK()
OVER(PARTITION BY r.TIMESTAMP_MEASUREMENT ORDER BY r.LEVEL1_RECORD_ID DESC) DEST_RANK
TO_NUMBER_SAFE(v.VALUE) AS VALUE_MEASUREMENT,
r.LOGGER_ID AS LOGGER_ID,
v.SENSOR_ID AS SENSOR_ID,
s.NAME AS SENSOR_NAME,
RANK() OVER(PARTITION BY r.TIMESTAMP_MEASUREMENT ORDER BY r.LEVEL1_RECORD_ID DESC) DEST_RANK
FROM
LOGGER.UNI_EXP_LEVEL1_VALUE v
JOIN LOGGER.UNI_EXP_LEVEL1_RECORD r ON v.LEVEL1_RECORD_ID = r.LEVEL1_RECORD_ID
......@@ -112,4 +84,71 @@ const (
GROUP BY TIMESTAMP_MEASUREMENT, LOGGER_ID, SENSOR_ID, SENSOR_NAME
ORDER BY TIMESTAMP_MEASUREMENT, LOGGER_ID, SENSOR_ID, SENSOR_NAME
OFFSET :4 ROWS FETCH NEXT :5 ROWS ONLY`
L2B_QUERY = `
WITH tmp AS (
SELECT
TO_CHAR(d.TIMESTAMP, 'YYYY-MM-DD HH24:MI:SS') AS TIMESTAMP_MEASUREMENT,
d.LOGGER_ID AS LOGGER_ID,
TO_NUMBER_SAFE(d.VALUE) AS VALUE_MEASUREMENT,
s.SENSOR_ID AS SENSOR_ID,
s.NAME AS SENSOR_NAME,
d.QUALITY_FLAG AS QUALITY_FLAG,
d.QUALITY_CAUSE AS QUALITY_CAUSE,
d.QUALITY_COMMENT AS QUALITY_COMMENT
FROM
LOGGER.EXP_DP%s_L2B_DATA d
join LOGGER.UNI_EXP_SENSOR s
on d.NAME = s.NAME
WHERE
s.%s = :1
AND d.TIMESTAMP >= LOGGER.DMP_DATE.PARSE(:2)
AND d.TIMESTAMP < LOGGER.DMP_DATE.PARSE(:3)
)
SELECT * FROM tmp
WHERE VALUE_MEASUREMENT IS NOT NULL
ORDER BY TIMESTAMP_MEASUREMENT, LOGGER_ID, SENSOR_ID, SENSOR_NAME
OFFSET :4 ROWS FETCH NEXT :5 ROWS ONLY`
L2B_AGGREGATE_QUERY = `
WITH data AS (
SELECT
TO_CHAR(TRUNC(d.TIMESTAMP, '%[1]s'), 'YYYY-MM-DD HH24:MI:SS') AS TIMESTAMP_MEASUREMENT,
d.LOGGER_ID AS LOGGER_ID,
TO_NUMBER_SAFE(d.VALUE) AS VALUE_MEASUREMENT,
s.SENSOR_ID AS SENSOR_ID,
s.NAME AS NAME,
d.QUALITY_FLAG AS QUALITY_FLAG,
d.QUALITY_CAUSE AS QUALITY_CAUSE,
d.QUALITY_COMMENT AS QUALITY_COMMENT
FROM
LOGGER.EXP_DP%[2]s_L2B_DATA d
join LOGGER.UNI_EXP_SENSOR s on d.NAME = s.NAME
WHERE
s.%[3]s = :1
AND d.TIMESTAMP >= LOGGER.DMP_DATE.PARSE(:2)
AND d.TIMESTAMP < LOGGER.DMP_DATE.PARSE(:3)
),
aggregated AS (
SELECT
TIMESTAMP_MEASUREMENT AS TIMESTAMP_MEASUREMENT,
LOGGER_ID AS LOGGER_ID,
%[4]s(CASE WHEN QUALITY_FLAG in ('BAD', 'DOUBTFUL') THEN NULL ELSE VALUE_MEASUREMENT END) AS VALUE_MEASUREMENT,
MAX(SENSOR_ID) AS SENSOR_ID,
NAME AS SENSOR_NAME,
CASE
WHEN AVG(CASE WHEN QUALITY_FLAG = 'BAD' then 1 else 0 end) > :4 THEN 'BAD'
WHEN AVG(CASE WHEN QUALITY_FLAG in ('BAD', 'DOUBTFUL') then 1 else 0 end) > :5 THEN 'DOUBTFUL'
ELSE 'OK'
END AS QUALITY_FLAG,
NULL AS QUALITY_CAUSE,
NULL AS QUALITY_COMMENT
FROM data
GROUP BY TIMESTAMP_MEASUREMENT, LOGGER_ID, NAME
ORDER BY TIMESTAMP_MEASUREMENT, LOGGER_ID, NAME
)
SELECT * FROM aggregated
WHERE VALUE_MEASUREMENT IS NOT NULL
OFFSET :6 ROWS FETCH NEXT :7 ROWS ONLY
`
)
......@@ -3,7 +3,6 @@ package db
import (
"context"
"database/sql/driver"
"dmpapi/params"
"fmt"
"log"
"strings"
......@@ -94,24 +93,24 @@ type SensorModel struct {
locationModel
}
func (db *DBOracle) GetSensorsAll(userCtx context.Context, pParams map[string]string, qParams params.QueryParams) ([]interface{}, error) {
func (db *DBOracle) GetSensorsAll(userCtx context.Context, pParams map[string]string, qParams map[string]string) ([]interface{}, error) {
query := strings.Join([]string{SENSOR_QUERY, SENSOR_PAGE_QUERY}, " ")
return db.getSensors(userCtx, query, qParams.Start, qParams.Limit)
return db.getSensors(userCtx, query, qParams["start"], qParams["limit"])
}
func (db *DBOracle) GetSensorsByID(userCtx context.Context, pParams map[string]string, qParams params.QueryParams) ([]interface{}, error) {
func (db *DBOracle) GetSensorsByID(userCtx context.Context, pParams map[string]string, qParams map[string]string) ([]interface{}, error) {
query := strings.Join([]string{SENSOR_QUERY, SENSOR_WHERE_SENSORID}, " ")
return db.getSensors(userCtx, query, pParams["id"])
}
func (db *DBOracle) GetSensorsByName(userCtx context.Context, pParams map[string]string, qParams params.QueryParams) ([]interface{}, error) {
func (db *DBOracle) GetSensorsByName(userCtx context.Context, pParams map[string]string, qParams map[string]string) ([]interface{}, error) {
query := strings.Join([]string{SENSOR_QUERY, SENSOR_WHERE_NAME}, " ")
return db.getSensors(userCtx, query, pParams["name"])
}
func (db *DBOracle) GetSensorsByLoggerID(userCtx context.Context, pParams map[string]string, qParams params.QueryParams) ([]interface{}, error) {
func (db *DBOracle) GetSensorsByLoggerID(userCtx context.Context, pParams map[string]string, qParams map[string]string) ([]interface{}, error) {
query := strings.Join([]string{SENSOR_QUERY, SENSOR_WHERE_LOGGERID, SENSOR_PAGE_QUERY}, " ")
return db.getSensors(userCtx, query, pParams["id"], qParams.QueryPaging.Start, qParams.QueryPaging.Limit)
return db.getSensors(userCtx, query, pParams["id"], qParams["start"], qParams["limit"])
}
func (db *DBOracle) getSensors(userCtx context.Context, query string, args ...interface{}) ([]interface{}, error) {
......
package main
import (
d "dmpapi/db"
"encoding/json"
// "fmt"
"net/http/httptest"
"testing"
)
type LoggerResponse struct {
BaseResponse
Data []d.LoggerModel `json:"data"`
}
func readLoggerResponseBody(t *testing.T, rr *httptest.ResponseRecorder) LoggerResponse {
res := LoggerResponse{}
err := json.Unmarshal([]byte(rr.Body.Bytes()), &res)
if err != nil {
t.Fatal("json.Unmarshal error: ", err)
}
return res
}
func equalLoggerModels(t *testing.T, expected []d.LoggerModel, got []d.LoggerModel) {
if len(expected) != len(got) {
t.Errorf("handler return wrong number of data parameters: got %v, expected %v",
len(expected), len(got))
}
for i, _ := range expected {
if expected[i] != got[i] {
t.Errorf("handler return wrong data parameter: got %v, expected %v",
expected[i], got[i])
}
}
}
// func TestLoggerHandlers(t *testing.T) {
// testCases := []struct {
// route string
// expected []d.LoggerModel
// }{
// {fmt.Sprintf("/v1/projects/1/loggers/%d", logger1.ID), []d.LoggerModel{logger1}},
// {fmt.Sprintf("/v1/projects/1/loggers/%s", logger2.Label), []d.LoggerModel{logger2}},
// {"/v1/projects/1/loggers", loggers},
// }
// app := &App{&DBMock{}}
// router := InitRouter(app)
// for _, tc := range testCases {
// t.Run(tc.route, func(t *testing.T) {
// req := prepGetRequest(t, tc.route)
// rr := httptest.NewRecorder()
// router.ServeHTTP(rr, req)
// res := readLoggerResponseBody(t, rr)
// equalLoggerModels(t, tc.expected, res.Data)
// })
// }
// }
......@@ -15,8 +15,6 @@ func InitRouter(app *App) *mux.Router {
router.Use(urlPrepper)
router.Use(headerContentSetter)
router.Handle("/loggers/{id:[0-9]+}/flags", PutWrapper(app, db.NewDataFlags)).Methods("PUT")
// routes
router.Handle("/loggers", app.GetWrapper(app.GetLoggersAll)).Methods("GET")
router.Handle("/loggers/{id:[0-9]+}", app.GetWrapper(app.GetLoggerByID)).Methods("GET")
......@@ -29,41 +27,22 @@ func InitRouter(app *App) *mux.Router {
router.Handle("/sensors/{name:\\w+}", app.GetWrapper(app.GetSensorsByName)).Methods("GET")
router.Handle("/sensors", app.GetWrapper(app.GetSensorsAll)).Methods("GET")
// register multiple times to cover the different parameter combinations
level1Handler := app.GetWrapper(app.GetSensorDataLevel1)
level1Route := "/sensors/{id:[0-9]+}/data/level1"
router.Handle(level1Route, level1Handler).Methods("GET").Queries("startDate", "")
router.Handle(level1Route, level1Handler).Methods("GET").Queries("endDate", "")
router.Handle(level1Route, level1Handler).Methods("GET").Queries("startDate", "", "endDate", "")
router.Handle(level1Route, level1Handler).Methods("GET")
router.Handle("/sensors/{id:[0-9]+}/data/level1", app.GetWrapper(app.GetSensorDataLevel1)).Methods("GEt")
level1AggregateHandler := app.GetWrapper(app.GetSensorDataLevel1Aggregate)
level1AggregateRoute := "/sensors/{id:[0-9]+}/data/level1/{aggregate:(?:hourly|daily|monthly|yearly)}/{func:(?:sum|mean)}"
router.Handle(level1AggregateRoute, level1AggregateHandler).Methods("GET").Queries("startDate", "")
router.Handle(level1AggregateRoute, level1AggregateHandler).Methods("GET").Queries("endDate", "")
router.Handle(level1AggregateRoute, level1AggregateHandler).Methods("GET").Queries("startDate", "", "endDate", "")
router.Handle(level1AggregateRoute, level1AggregateHandler).Methods("GET")
router.Handle(level1AggregateRoute, app.GetWrapper(app.GetSensorDataLevel1Aggregate)).Methods("GET")
router.Handle("/sensors/{id:[0-9]+}/data/level2", app.GetWrapper(app.GetSensorDataLevel2)).Methods("GET")
router.Handle("/sensors/{id:[0-9]+}/data/level2b", app.GetWrapper(app.GetSensorByIDLevel2b)).Methods("GET")
router.Handle("/sensors/{name:\\w+}/data/level2b", app.GetWrapper(app.GetSensorByNameLevel2b)).Methods("GET")
level2aHandler := app.GetWrapper(app.GetSensorDataLevel2)
level2aRoute := "/sensors/{id:[0-9]+}/data/level2"
router.Handle(level2aRoute, level2aHandler).Methods("GET").Queries("startDate", "")
router.Handle(level2aRoute, level2aHandler).Methods("GET").Queries("endDate", "")
router.Handle(level2aRoute, level2aHandler).Methods("GET").Queries("startDate", "", "endDate", "")
router.Handle(level2aRoute, level2aHandler).Methods("GET")
level2bByIDAggregateRoute := "/sensors/{id:[0-9]+}/data/level2b/{aggregate:(?:hourly|daily|monthly|yearly)}/{func:(?:sum|mean)}"
router.Handle(level2bByIDAggregateRoute, app.GetWrapper(app.GetSensorByIDLevel2bAggregate)).Methods("GET")
level2bByIDHandler := app.GetWrapper(app.GetSensorByIDLevel2b)
level2bByIDRoute := "/sensors/{id:[0-9]+}/data/level2b"
router.Handle(level2bByIDRoute, level2bByIDHandler).Methods("GET").Queries("startDate", "")
router.Handle(level2bByIDRoute, level2bByIDHandler).Methods("GET").Queries("endDate", "")
router.Handle(level2bByIDRoute, level2bByIDHandler).Methods("GET").Queries("startDate", "", "endDate", "")
router.Handle(level2bByIDRoute, level2bByIDHandler).Methods("GET")
level2bByNameAggregateRoute := "/sensors/{name:\\w+}/data/level2b/{aggregate:(?:hourly|daily|monthly|yearly)}/{func:(?:sum|mean)}"
router.Handle(level2bByNameAggregateRoute, app.GetWrapper(app.GetSensorByNameLevel2bAggregate)).Methods("GET")
level2bByNameHandler := app.GetWrapper(app.GetSensorByNameLevel2b)
level2bByNameRoute := "/sensors/{name:\\w+}/data/level2b"
router.Handle(level2bByNameRoute, level2bByNameHandler).Methods("GET").Queries("startDate", "")
router.Handle(level2bByNameRoute, level2bByNameHandler).Methods("GET").Queries("endDate", "")
router.Handle(level2bByNameRoute, level2bByNameHandler).Methods("GET").Queries("startDate", "", "endDate", "")
router.Handle(level2bByNameRoute, level2bByNameHandler).Methods("GET")
router.Handle("/loggers/{id:[0-9]+}/flags", PutWrapper(app, db.DecodeDataFlags, app.PutLoggerFlags)).Methods("PUT")
return router
}
......
package main
import (
"context"
d "dmpapi/db"
"net/http"
"strconv"
"testing"
)
var (
logger1 = d.LoggerModel{ID: 1, Name: "Logger1", Label: "L1", Person: "Me", Station: 1}
logger2 = d.LoggerModel{ID: 2, Name: "Logger2", Label: "L2", Person: "Me", Station: 1}
logger3 = d.LoggerModel{ID: 3, Name: "Logger3", Label: "L3", Person: "Me", Station: 1}
logger4 = d.LoggerModel{ID: 4, Name: "Logger4", Label: "L4", Person: "Me", Station: 2}
logger5 = d.LoggerModel{ID: 5, Name: "Logger5", Label: "L5", Person: "Me", Station: 2}
loggers = []d.LoggerModel{logger1, logger2, logger3, logger4, logger5}
sensor1 = d.TheSensorModel{d.SensorModel{ID: 1, Name: "SoilMoistureSensor", Compartment: "Soil", Variable: "SoilMoisture", Unit: "mm", Interval: 3600}, d.LocationModel{Height: 154.0, Lon: 24.275, Lat: 36.115}, d.ConfigurationModel{LoggerID: 1}}
sensor2 = d.TheSensorModel{d.SensorModel{ID: 2, Name: "SoilTemperatureSensor", Compartment: "Soil", Variable: "SoilTemperatur", Unit: "degC", Interval: 3600}, d.LocationModel{Height: 154.0, Lon: 24.275, Lat: 36.225}, d.ConfigurationModel{LoggerID: 1}}
sensor3 = d.TheSensorModel{d.SensorModel{ID: 3, Name: "SoilMoistureSensor", Compartment: "Soil", Variable: "SoilMoisture", Unit: "mm", Interval: 3600}, d.LocationModel{Height: 154.0, Lon: 24.272, Lat: 36.337}, d.ConfigurationModel{LoggerID: 2}}
sensor4 = d.TheSensorModel{d.SensorModel{ID: 4, Name: "SoilTemperatureSensor", Compartment: "Soil", Variable: "SoilTemperatur", Unit: "degC", Interval: 3600}, d.LocationModel{Height: 154.0, Lon: 24.272, Lat: 36.337}, d.ConfigurationModel{LoggerID: 2}}
sensor5 = d.TheSensorModel{d.SensorModel{ID: 5, Name: "AirTemperatureSensor", Compartment: "Air", Variable: "AirTemperature", Unit: "degC", Interval: 3600}, d.LocationModel{Height: 157.0, Lon: 24.276, Lat: 36.556}, d.ConfigurationModel{LoggerID: 1}}
sensors = []d.TheSensorModel{sensor1, sensor2, sensor3, sensor4, sensor5}
)
type BaseResponse struct {
Pagination *PageURLs `json:"pagination"`
Error string `json:"error"`
}
func prepGetRequest(t *testing.T, url string) *http.Request {
req, err := http.NewRequest("GET", url, nil)
if err != nil {
t.Fatal(err)
}
return req
}
type DBMock struct{}
func (db *DBMock) Authenticate(username string, password string) (context.Context, error) {
return context.Background(), nil
}
func (db *DBMock) GetLoggersAll(userCtx context.Context, start uint64, limit uint64) ([]d.LoggerModel, error) {
return loggers, nil
}
func (db *DBMock) GetLoggerByLabel(userCtx context.Context, label string) ([]d.LoggerModel, error) {
out := []d.LoggerModel{}
for _, logger := range loggers {
if logger.Label == label {
out = append(out, logger)
}
}
return out, nil
}
func (db *DBMock) GetLoggerByID(userCtx context.Context, id string) ([]d.LoggerModel, error) {
i, _ := strconv.Atoi(id)
out := []d.LoggerModel{}
for _, logger := range loggers {
if logger.ID == i {
out = append(out, logger)
}
}
return out, nil
}
func (db *DBMock) GetSensorsAll(userCtx context.Context, start uint64, limit uint64) ([]d.TheSensorModel, error) {
out := []d.TheSensorModel{}
first := int(start)
last := first + int(limit)
for i, sensor := range sensors {
if i >= first && i < last {
out = append(out, sensor)
}
}
return out, nil
}
func (db *DBMock) GetSensorByID(userCtx context.Context, id string) ([]d.TheSensorModel, error) {
i, _ := strconv.Atoi(id)
out := []d.TheSensorModel{}
for _, sensor := range sensors {
if sensor.ID == i {
out = append(out, sensor)
}
}
return out, nil
}
func (db *DBMock) GetSensorsByName(userCtx context.Context, name string) ([]d.TheSensorModel, error) {
out := []d.TheSensorModel{}
for _, sensor := range sensors {
if sensor.Name == name {
out = append(out, sensor)
}
}
return out, nil
}
func (db *DBMock) GetSensorsByLoggerID(userCtx context.Context, start uint64, limit uint64, loggerID string) ([]d.TheSensorModel, error) {
id, _ := strconv.Atoi(loggerID)
out := []d.TheSensorModel{}
first := int(start)
last := first + int(limit)
for i, sensor := range sensors {
if i >= first && i < last && sensor.LoggerID == id {
out = append(out, sensor)
}
}
return out, nil
}
package main
import (
"github.com/gorilla/schema"
"log"
"net/url"
"reflect"
"time"
)
const (
PAGE_LIMIT = 100000
DATE_LAYOUT = "2006-01-02"
)
var (
MIN_DATE, _ = time.Parse(DATE_LAYOUT, "1900-01-01")
MAX_DATE, _ = time.Parse(DATE_LAYOUT, "2100-12-31")
DATE_LAYOUTS = []string{
// TODO:
// provide feedback when date parsing fails
// currently an empty result set is provided
// -> not too helpfull
"2006-01-02T15:04:05.999999999Z",
"2006-01-02T15:04:05.999999999",
DATE_LAYOUT,
}
)
type PageURLs struct {
Last string `json:"last"`
Next string `json:"next"`
Self string `json:"self"`
}
// type QueryDataControl struct {
// Strict bool `schema:strict`
// }
type QueryDates struct {
StartDate time.Time `schema:"startDate"`
EndDate time.Time `schema:"endDate"`
}
type QueryPaging struct {
Start uint64 `schema:"start"`
Limit uint64 `schema:"limit"`
}
type QueryParams struct {
// QueryDataControl
QueryPaging
QueryDates
}
var timeConverter = func(value string) reflect.Value {
for _, layout := range DATE_LAYOUTS {
v, err := time.Parse(layout, value)
if err == nil {
return reflect.ValueOf(v)
}
}
return reflect.Value{} // this is the same as the private const invalidType
}
func InitQueryParams(qParams map[string][]string) (QueryParams, error) {
out := QueryParams{}
pDecoder := schema.NewDecoder()
pDecoder.RegisterConverter(time.Time{}, timeConverter)
err := pDecoder.Decode(&out, qParams)
if err != nil {
return out, err
}
out = out.initPaging().initDates()
return out, nil
}
func (p QueryParams) initDates() QueryParams {
if p.StartDate.Before(MIN_DATE) || p.EndDate.After(MAX_DATE) {
p.StartDate = MIN_DATE
}
if p.EndDate.After(MAX_DATE) || p.EndDate.Before(MIN_DATE) {
p.EndDate = MAX_DATE
}
return p
}
func (p QueryParams) initPaging() QueryParams {
if (p.Limit > PAGE_LIMIT) || (p.Limit == 0) {
p.Limit = PAGE_LIMIT
}
return p
}
func (p *QueryPaging) EncodePaging(u *url.URL, length uint64) *PageURLs {
return &PageURLs{
Next: p.NextURL(u, length).String(),
Last: p.LastURL(u).String(),
Self: p.ToURL(u).String(),
}
}
func (p *QueryPaging) ToURL(u *url.URL) *url.URL {
query := url.Values{}
pEncoder := schema.NewEncoder()
err := pEncoder.Encode(p, query)
if err != nil {
log.Fatal(err)
}
u.RawQuery = query.Encode()
return u
}
func (p *QueryPaging) NextURL(u *url.URL, length uint64) *url.URL {
if length < PAGE_LIMIT {
return &url.URL{}
}
next := &QueryPaging{Start: p.Start + length, Limit: p.Limit}
return next.ToURL(u)
}
func (p *QueryPaging) LastURL(u *url.URL) *url.URL {
if p.Start < p.Limit {
return &url.URL{}
}
last := &QueryPaging{Start: p.Start - p.Limit, Limit: p.Limit}
return last.ToURL(u)
}
package params
import (
"github.com/gorilla/schema"
"log"
"fmt"
"net/url"
"reflect"
"strconv"
"time"
)
const (
PAGE_LIMIT = 100000
DATE_LAYOUT = "2006-01-02"
DATE_LAYOUT = "2006-01-02 15:04:05"
)
var (
MIN_DATE, _ = time.Parse(DATE_LAYOUT, "1900-01-01")
MAX_DATE, _ = time.Parse(DATE_LAYOUT, "2100-12-31")
MIN_DATE, _ = time.Parse(DATE_LAYOUT, "1900-01-01 00:00:00")
MAX_DATE, _ = time.Parse(DATE_LAYOUT, "2100-12-31 23:59:59")
DATE_LAYOUTS = []string{
// TODO:
// provide feedback when date parsing fails
......@@ -23,110 +22,164 @@ var (
// -> not too helpfull
"2006-01-02T15:04:05.999999999Z",
"2006-01-02T15:04:05.999999999",
"2006-01-02",
DATE_LAYOUT,
}
)
type PageURLs struct {
Last string `json:"last"`
Next string `json:"next"`
Self string `json:"self"`
}
// type QueryDataControl struct {
// Strict bool `schema:strict`
// }
type QueryDates struct {
StartDate time.Time `schema:"startDate"`
EndDate time.Time `schema:"endDate"`
}
type QueryPaging struct {
Start uint64 `schema:"start"`
Limit uint64 `schema:"limit"`
func flattenParams(params map[string][]string) (map[string]string, error) {
flatParams := map[string]string{}
for key, values := range params {
if len(values) > 1 {
return nil, fmt.Errorf("expected scalar query params, got '%v'", values)
}
flatParams[key] = values[0]
}
return flatParams, nil
}
type QueryParams struct {
// QueryDataControl
QueryPaging
QueryDates
func getDefault(m map[string]string, key string, dflt string) string {
out, found := m[key]
if !found {
out = dflt
}
return out
}
var timeConverter = func(value string) reflect.Value {
func parseDate(date string) (time.Time, error) {
for _, layout := range DATE_LAYOUTS {
v, err := time.Parse(layout, value)
if err == nil {
return reflect.ValueOf(v)
val, e := time.Parse(layout, date)
if e == nil {
return val, nil
}
}
return reflect.Value{} // this is the same as the private const invalidType
return time.Time{}, fmt.Errorf("invalid date '%v'", date)
}
func InitQueryParams(qParams map[string][]string) (QueryParams, error) {
out := QueryParams{}
pDecoder := schema.NewDecoder()
pDecoder.RegisterConverter(time.Time{}, timeConverter)
err := pDecoder.Decode(&out, qParams)
func prepDates(params map[string]string) (map[string]string, error) {
startDate, err := parseDate(getDefault(params, "startDate", MIN_DATE.Format(DATE_LAYOUT)))
if err != nil {
return nil, err
}
endDate, err := parseDate(getDefault(params, "endDate", MAX_DATE.Format(DATE_LAYOUT)))
if err != nil {
return out, err
return nil, err
}
out = out.initPaging().initDates()
return out, nil
if startDate.Before(MIN_DATE) || endDate.After(MAX_DATE) {
startDate = MIN_DATE
}
if endDate.After(MAX_DATE) || endDate.Before(MIN_DATE) {
endDate = MAX_DATE
}
params["startDate"] = startDate.Format(DATE_LAYOUT)
params["endDate"] = endDate.Format(DATE_LAYOUT)
return params, nil
}
func (p QueryParams) initDates() QueryParams {
if p.StartDate.Before(MIN_DATE) || p.EndDate.After(MAX_DATE) {
p.StartDate = MIN_DATE
func prepPaging(params map[string]string) (map[string]string, error) {
params["start"] = getDefault(params, "start", "0")
if _, err := strconv.Atoi(params["start"]); err != nil {
return nil, err
}
if p.EndDate.After(MAX_DATE) || p.EndDate.Before(MIN_DATE) {
p.EndDate = MAX_DATE
limit := getDefault(params, "limit", strconv.Itoa(PAGE_LIMIT))
value, err := strconv.Atoi(limit)
if err != nil {
return nil, err
}
return p
if (value > PAGE_LIMIT) || (value == 0) {
value = PAGE_LIMIT
}
params["limit"] = strconv.Itoa(value)
return params, nil
}
func (p QueryParams) initPaging() QueryParams {
if (p.Limit > PAGE_LIMIT) || (p.Limit == 0) {
p.Limit = PAGE_LIMIT
func PrepParams(params map[string][]string) (map[string]string, error) {
// TODO: check for unexpected params
flatParams, err := flattenParams(params)
if err != nil {
return nil, err
}
out, err := prepDates(flatParams)
if err != nil {
return nil, err
}
return p
}
func (p *QueryPaging) EncodePaging(u *url.URL, length uint64) *PageURLs {
return &PageURLs{
Next: p.NextURL(u, length).String(),
Last: p.LastURL(u).String(),
Self: p.ToURL(u).String(),
out, err = prepPaging(out)
if err != nil {
return nil, err
}
out["bad"] = getDefault(out, "badMax", "0.5")
out["doubt"] = getDefault(out, "doubtMax", "0.5")
return out, nil
}
func (p *QueryPaging) ToURL(u *url.URL) *url.URL {
query := url.Values{}
pEncoder := schema.NewEncoder()
err := pEncoder.Encode(p, query)
if err != nil {
log.Fatal(err)
func GetPages(params map[string]string, u *url.URL, length int) map[string]string {
start, _ := strconv.Atoi(params["start"])
limit, _ := strconv.Atoi(params["limit"])
out := map[string]string{
"last": lastURL(u, start, limit).String(),
"next": nextURL(u, length, start, limit).String(),
"self": thisURL(u, start, limit).String(),
}
u.RawQuery = query.Encode()
return u
return out
}
func thisURL(u *url.URL, start int, limit int) *url.URL {
return addQuery(
u, map[string]string{
"start": strconv.Itoa(start),
"limit": strconv.Itoa(limit),
},
)
}
func (p *QueryPaging) NextURL(u *url.URL, length uint64) *url.URL {
if length < PAGE_LIMIT {
func lastURL(u *url.URL, start int, limit int) *url.URL {
start = start - limit
if start < 0 {
limit = limit + start
start = 0
}
if limit <= 0 {
return &url.URL{}
}
next := &QueryPaging{Start: p.Start + length, Limit: p.Limit}
return next.ToURL(u)
return addQuery(
u, map[string]string{
"start": strconv.Itoa(start),
// "limit": strconv.Itoa(stop),
"limit": strconv.Itoa(limit),
},
)
}
func (p *QueryPaging) LastURL(u *url.URL) *url.URL {
func nextURL(u *url.URL, length int, start int, limit int) *url.URL {
if p.Start < p.Limit {
if length < limit {
return &url.URL{}
}
last := &QueryPaging{Start: p.Start - p.Limit, Limit: p.Limit}
return last.ToURL(u)
return addQuery(
u, map[string]string{
"start": strconv.Itoa(start + limit),
"limit": strconv.Itoa(limit),
},
)
}
func addQuery(u *url.URL, params map[string]string) *url.URL {
query := u.Query()
for key, value := range params {
query.Set(key, value)
}
u.RawQuery = query.Encode()
return u
}
package params
import (
"net/url"
"strconv"
"testing"
)
func TestDefaultParams(t *testing.T) {
expected := map[string]string{
"badMax": "0.5",
"doubtMax": "0.5",
"startDate": MIN_DATE.Format(DATE_LAYOUT),
"endDate": MAX_DATE.Format(DATE_LAYOUT),
"limit": strconv.Itoa(PAGE_LIMIT),
"start": "0",
}
p := map[string][]string{}
params, err := PrepParams(p)
if err != nil {
t.Fatal("failed to initialize query parameters")
}
for k, exp := range expected {
got, found := params[k]
if !found {
t.Errorf("expected key '%v' not found", k)
}
if got != exp {
t.Errorf("expected '%v' got '%v'", exp, got)
}
}
}
func TestLastURL(t *testing.T) {
u, _ := url.Parse("http://www.testing.com")
tables := []struct {
start int
limit int
expected string
}{
{0, 10, ""},
{2, 10, "http://www.testing.com?limit=2&start=0"},
{5, 10, "http://www.testing.com?limit=5&start=0"},
{40, 25, "http://www.testing.com?limit=25&start=15"},
}
for _, table := range tables {
got := lastURL(u, table.start, table.limit)
if got.String() != table.expected {
t.Errorf("expected '%v' got '%v'", table.expected, got)
}
}
}
func TestNextURL(t *testing.T) {
u, _ := url.Parse("http://www.testing.com")
tables := []struct {
start int
limit int
length int
expected string
}{
{2, 10, 100, "http://www.testing.com?limit=10&start=12"},
{0, 100, 100, "http://www.testing.com?limit=100&start=100"},
{5, 15, 5, ""},
}
for _, table := range tables {
got := nextURL(u, table.length, table.start, table.limit)
if got.String() != table.expected {
t.Errorf("expected '%v' got '%v'", table.expected, got)
}
}
}
func TestThisURL(t *testing.T) {
u, _ := url.Parse("http://www.testing.com")
expected := "http://www.testing.com?limit=100&start=20"
got := thisURL(u, 20, 100)
if got.String() != expected {
t.Errorf("expected '%v' got '%v'", expected, got)
}
}
package main
import (
"dmpapi/params"
"encoding/json"
"log"
"net/http"
)
type Envelope struct {
Pagination *params.PageURLs `json:"pagination"`
Data interface{} `json:"data"`
Error string `json:"error"`
Pagination map[string]string `json:"pagination"`
Data interface{} `json:"data"`
Error string `json:"error"`
}
func writeGetResponse(w http.ResponseWriter, paging *params.PageURLs, data interface{}, error error) {
func (e Envelope) Encode(w http.ResponseWriter) {
enc := json.NewEncoder(w)
enc.SetEscapeHTML(false)
// TODO: handle error
err := enc.Encode(e)
if err != nil {
log.Println("Envelope, Encode:", err)
}
}
func writeErrorResponse(w http.ResponseWriter, err error) {
out := Envelope{
Pagination: nil,
Data: nil,
Error: err.Error(),
}
out.Encode(w)
}
func writeGetResponse(w http.ResponseWriter, paging map[string]string, data interface{}) {
// NOTE:
// rearange so that we first decode everything, check for errors
// add potential encoding errors to the output
errorString := ""
if error != nil {
errorString = error.Error()
}
out := Envelope{
Pagination: paging,
Data: data,
Error: errorString,
}
enc := json.NewEncoder(w)
enc.SetEscapeHTML(false)
err := enc.Encode(out)
if err != nil {
log.Println("Encoding Error: ", err)
// NOTE: not sure how robust that is
// handleError(w, AppError{err, "failed to encode data", http.StatusInternalServerError})
Error: "",
}
out.Encode(w)
}
package main
import (
d "dmpapi/db"
"encoding/json"
// "fmt"
"net/http/httptest"
"testing"
)
type SensorResponse struct {
BaseResponse
Data []d.TheSensorModel `json:"data"`
}
func readSensorResponseBody(t *testing.T, rr *httptest.ResponseRecorder) SensorResponse {
res := SensorResponse{}
err := json.Unmarshal([]byte(rr