Skip to content

Commit

Permalink
API first steps (work in progress)
Browse files Browse the repository at this point in the history
Got base API schema & handler running with simple GET requests.

Still open:
* Builder UIs
* Most HTTP methods (POST, PATCH, PUT, DELETE)
  • Loading branch information
r3-gabriel committed Feb 27, 2023
1 parent eba7cb4 commit a4d3fd5
Show file tree
Hide file tree
Showing 23 changed files with 827 additions and 144 deletions.
29 changes: 24 additions & 5 deletions cache/cache_schema.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import (
"r3/db"
"r3/log"
"r3/module_option"
"r3/schema/api"
"r3/schema/article"
"r3/schema/attribute"
"r3/schema/collection"
Expand Down Expand Up @@ -44,11 +45,13 @@ var (
Schema_mx sync.RWMutex

// cached entities for regular use during normal operation
ModuleIdMap map[uuid.UUID]types.Module // all modules by ID
RelationIdMap map[uuid.UUID]types.Relation // all relations by ID
AttributeIdMap map[uuid.UUID]types.Attribute // all attributes by ID
RoleIdMap map[uuid.UUID]types.Role // all roles by ID
PgFunctionIdMap map[uuid.UUID]types.PgFunction // all PG functions by ID
ModuleIdMap map[uuid.UUID]types.Module // all modules by ID
ModuleApiNameMapId map[string]map[string]uuid.UUID // all API IDs by module+API name
RelationIdMap map[uuid.UUID]types.Relation // all relations by ID
AttributeIdMap map[uuid.UUID]types.Attribute // all attributes by ID
RoleIdMap map[uuid.UUID]types.Role // all roles by ID
PgFunctionIdMap map[uuid.UUID]types.PgFunction // all PG functions by ID
ApiIdMap map[uuid.UUID]types.Api // all APIs by ID

// schema cache
moduleIdsOrdered []uuid.UUID // all module IDs in desired order
Expand Down Expand Up @@ -132,10 +135,12 @@ func updateSchemaCache(moduleIdsUpdateOnly []uuid.UUID) error {
log.Info("cache", "starting schema processing for all modules")
moduleIdsOrdered = make([]uuid.UUID, 0)
ModuleIdMap = make(map[uuid.UUID]types.Module)
ModuleApiNameMapId = make(map[string]map[string]uuid.UUID)
RelationIdMap = make(map[uuid.UUID]types.Relation)
AttributeIdMap = make(map[uuid.UUID]types.Attribute)
RoleIdMap = make(map[uuid.UUID]types.Role)
PgFunctionIdMap = make(map[uuid.UUID]types.PgFunction)
ApiIdMap = make(map[uuid.UUID]types.Api)
} else {
log.Info("cache", "starting schema processing for one module")
}
Expand All @@ -162,6 +167,8 @@ func updateSchemaCache(moduleIdsUpdateOnly []uuid.UUID) error {
mod.PgFunctions = make([]types.PgFunction, 0)
mod.JsFunctions = make([]types.JsFunction, 0)
mod.Collections = make([]types.Collection, 0)
mod.Apis = make([]types.Api, 0)
ModuleApiNameMapId[mod.Name] = make(map[string]uuid.UUID)

// get articles
log.Info("cache", "load articles")
Expand Down Expand Up @@ -288,6 +295,18 @@ func updateSchemaCache(moduleIdsUpdateOnly []uuid.UUID) error {
return err
}

// get APIs
log.Info("cache", "load APIs")

mod.Apis, err = api.Get(mod.Id)
if err != nil {
return err
}
for _, a := range mod.Apis {
ApiIdMap[a.Id] = a
ModuleApiNameMapId[mod.Name][a.Name] = a.Id
}

// update cache map with parsed module
ModuleIdMap[mod.Id] = mod
}
Expand Down
1 change: 1 addition & 0 deletions config/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@ func SetConfigFilePath(path string) {
filePath = path
}
func SetLogLevels() {
log.SetLogLevel("api", int(GetUint64("logApi")))
log.SetLogLevel("backup", int(GetUint64("logBackup")))
log.SetLogLevel("cache", int(GetUint64("logCache")))
log.SetLogLevel("cluster", int(GetUint64("logCluster")))
Expand Down
2 changes: 1 addition & 1 deletion config/config_store.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ var (
"clusterNodeMissingAfter", "dbTimeoutCsv", "dbTimeoutDataRest",
"dbTimeoutDataWs", "dbTimeoutIcs", "filesKeepDaysDeleted",
"fileVersionsKeepCount", "fileVersionsKeepDays", "icsDaysPost",
"icsDaysPre", "icsDownload", "imagerThumbWidth", "logBackup",
"icsDaysPre", "icsDownload", "imagerThumbWidth", "logApi", "logBackup",
"logCache", "logCluster", "logCsv", "logImager", "logLdap", "logMail",
"logModule", "logServer", "logScheduler", "logTransfer", "logWebsocket",
"logsKeepDays", "productionMode", "pwForceDigit", "pwForceLower",
Expand Down
116 changes: 116 additions & 0 deletions data/data_query/data_query.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
package data_query

import (
"r3/types"
"time"

"github.com/jackc/pgx/v5/pgtype"
)

func ConvertSubQueryToDataGet(query types.Query, queryAggregator pgtype.Text,
attributeId pgtype.UUID, attributeIndex int, loginId int64, languageCode string) types.DataGet {

return types.DataGet{
RelationId: query.RelationId.Bytes,
Joins: ConvertQueryToDataJoins(query.Joins),
Expressions: []types.DataGetExpression{
types.DataGetExpression{
Aggregator: queryAggregator,
AttributeId: attributeId,
AttributeIdNm: pgtype.UUID{},
Index: attributeIndex,
},
},
Filters: ConvertQueryToDataFilter(query.Filters, loginId, languageCode),
Orders: ConvertQueryToDataOrders(query.Orders),
Limit: query.FixedLimit,
}
}

func ConvertQueryToDataFilter(filters []types.QueryFilter,
loginId int64, languageCode string) []types.DataGetFilter {

filtersOut := make([]types.DataGetFilter, len(filters))

var processSide = func(side types.QueryFilterSide) types.DataGetFilterSide {
sideOut := types.DataGetFilterSide{
AttributeId: side.AttributeId,
AttributeIndex: side.AttributeIndex,
AttributeNested: side.AttributeNested,
Brackets: side.Brackets,
Query: types.DataGet{},
QueryAggregator: side.QueryAggregator,
Value: side.Value,
}
switch side.Content {
// data
case "subQuery":
sideOut.Query = ConvertSubQueryToDataGet(side.Query, side.QueryAggregator,
side.AttributeId, side.AttributeIndex, loginId, languageCode)
case "true":
sideOut.Value = true

// date/time
case "nowDate":
t := time.Now().UTC()
sideOut.Value = time.Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0,
t.Location()).UTC().Unix() + int64(side.NowOffset.Int32)
case "nowDatetime":
sideOut.Value = time.Now().UTC().Unix() + int64(side.NowOffset.Int32)
case "nowTime":
t := time.Now().UTC()
sideOut.Value = time.Date(1970, 1, 1, t.Hour(), t.Minute(), t.Second(), 0,
t.Location()).UTC().Unix() + int64(side.NowOffset.Int32)

// user
case "languageCode":
sideOut.Value = languageCode
case "login":
sideOut.Value = loginId
}
return sideOut
}

for i, filter := range filters {

filterOut := types.DataGetFilter{
Connector: filter.Connector,
Operator: filter.Operator,
Side0: processSide(filter.Side0),
Side1: processSide(filter.Side1),
}
if i == 0 {
filterOut.Side0.Brackets++
}
if i == len(filters)-1 {
filterOut.Side1.Brackets++
}
filtersOut[i] = filterOut
}
return filtersOut
}

func ConvertQueryToDataJoins(joins []types.QueryJoin) []types.DataGetJoin {
joinsOut := make([]types.DataGetJoin, 0)
for _, join := range joins {
joinsOut = append(joinsOut, types.DataGetJoin{
AttributeId: join.AttributeId.Bytes,
Connector: join.Connector,
Index: join.Index,
IndexFrom: join.IndexFrom,
})
}
return joinsOut
}

func ConvertQueryToDataOrders(orders []types.QueryOrder) []types.DataGetOrder {
ordersOut := make([]types.DataGetOrder, 0)
for _, order := range orders {
ordersOut = append(ordersOut, types.DataGetOrder{
AttributeId: pgtype.UUID{Bytes: order.AttributeId, Valid: true},
Index: pgtype.Int4{Int32: int32(order.Index), Valid: true},
Ascending: order.Ascending,
})
}
return ordersOut
}
64 changes: 64 additions & 0 deletions db/upgrade/upgrade.go
Original file line number Diff line number Diff line change
Expand Up @@ -239,6 +239,70 @@ var upgradeFunctions = map[string]func(tx pgx.Tx) (string, error){
ALTER TYPE app.filter_side_content ADD VALUE 'nowTime';
ALTER TABLE app.query_filter_side ADD COLUMN now_offset INTEGER;
-- new API entity
ALTER TYPE instance.log_context ADD VALUE 'api';
INSERT INTO instance.config (name,value) VALUES ('logApi','2');
CREATE TABLE app.api (
id uuid NOT NULL,
module_id uuid NOT NULL,
name varchar(64) NOT NULL,
has_delete bool NOT NULL,
has_get bool NOT NULL,
has_patch bool NOT NULL,
has_post bool NOT NULL,
has_put bool NOT NULL,
limit_def int NOT NULL,
limit_max int NOT NULL,
verbose_get bool NOT NULL,
CONSTRAINT api_pkey PRIMARY KEY (id),
CONSTRAINT api_module_id_fkey FOREIGN KEY (module_id)
REFERENCES app.module (id) MATCH SIMPLE
ON UPDATE CASCADE
ON DELETE CASCADE
DEFERRABLE INITIALLY DEFERRED
NOT VALID
);
ALTER TABLE app.query ADD COLUMN api_id uuid;
ALTER TABLE app.query ADD CONSTRAINT query_api_id_fkey FOREIGN KEY (api_id)
REFERENCES app.api (id) MATCH SIMPLE
ON UPDATE CASCADE
ON DELETE CASCADE
DEFERRABLE INITIALLY DEFERRED;
CREATE INDEX IF NOT EXISTS fki_query_api_id_fkey
ON app.query USING btree (api_id ASC NULLS LAST);
ALTER TABLE app.query DROP CONSTRAINT query_single_parent;
ALTER TABLE app.query ADD CONSTRAINT query_single_parent CHECK (1 = (
CASE WHEN api_id IS NULL THEN 0 ELSE 1 END +
CASE WHEN collection_id IS NULL THEN 0 ELSE 1 END +
CASE WHEN column_id IS NULL THEN 0 ELSE 1 END +
CASE WHEN field_id IS NULL THEN 0 ELSE 1 END +
CASE WHEN form_id IS NULL THEN 0 ELSE 1 END +
CASE WHEN query_filter_query_id IS NULL THEN 0 ELSE 1
END
));
ALTER TABLE app.column ADD COLUMN api_id uuid;
ALTER TABLE app.column ADD CONSTRAINT column_api_id_fkey FOREIGN KEY (api_id)
REFERENCES app.api (id) MATCH SIMPLE
ON UPDATE CASCADE
ON DELETE CASCADE
DEFERRABLE INITIALLY DEFERRED;
CREATE INDEX IF NOT EXISTS fki_column_api_id_fkey
ON app."column" USING btree (api_id ASC NULLS LAST);
ALTER TABLE app.column DROP CONSTRAINT column_single_parent;
ALTER TABLE app.column ADD CONSTRAINT column_single_parent CHECK (1 = (
CASE WHEN api_id IS NULL THEN 0 ELSE 1 END +
CASE WHEN collection_id IS NULL THEN 0 ELSE 1 END +
CASE WHEN field_id IS NULL THEN 0 ELSE 1
END
));
`)
return "3.3", err
},
Expand Down
Loading

0 comments on commit a4d3fd5

Please sign in to comment.