backend/pkg/database: to squash, git add renamed files

This commit is contained in:
Nicholas Murray 2023-10-14 10:33:37 -07:00
parent abbfa9a93e
commit c90f3c0d88
7 changed files with 0 additions and 3617 deletions

View File

@ -1,503 +0,0 @@
package database
import (
"context"
"fmt"
"github.com/dominikbraun/graph"
"github.com/fastenhealth/fasten-onprem/backend/pkg"
"github.com/fastenhealth/fasten-onprem/backend/pkg/models"
"github.com/fastenhealth/fasten-onprem/backend/pkg/utils"
databaseModel "github.com/fastenhealth/fasten-onprem/backend/pkg/models/database"
"golang.org/x/exp/slices"
"log"
"strings"
)
type VertexResourcePlaceholder struct {
UserID string
SourceID string
ResourceID string
ResourceType string
RelatedResourcePlaceholder []*VertexResourcePlaceholder
}
func (rp *VertexResourcePlaceholder) ID() string {
return resourceKeysVertexId(rp.SourceID, rp.ResourceType, rp.ResourceID)
}
// Retrieve a list of all fhir resources (vertex), and a list of all associations (edge)
// Generate a graph
// return list of root nodes, and their flattened related resources.
func (sr *SqliteRepository) GetFlattenedResourceGraph(ctx context.Context, graphType pkg.ResourceGraphType, options models.ResourceGraphOptions) (map[string][]*models.ResourceBase, *models.ResourceGraphMetadata, error) {
currentUser, currentUserErr := sr.GetCurrentUser(ctx)
if currentUserErr != nil {
return nil, nil, currentUserErr
}
//initialize the graph results metadata
resourceGraphMetadata := models.ResourceGraphMetadata{
TotalElements: 0,
PageSize: 20, //TODO: replace this with pkg.DefaultPageSize
Page: options.Page,
}
// Get list of all (non-reciprocal) relationships
var relatedResourceRelationships []models.RelatedResource
// SELECT * FROM related_resources WHERE user_id = "53c1e930-63af-46c9-b760-8e83cbc1abd9";
result := sr.GormClient.WithContext(ctx).
Where(models.RelatedResource{
ResourceBaseUserID: currentUser.ID,
}).
Find(&relatedResourceRelationships)
if result.Error != nil {
return nil, nil, result.Error
}
//Generate Graph
// TODO optimization: eventually cache the graph in a database/storage, and update when new resources are added.
g := graph.New(resourceVertexId, graph.Directed(), graph.Acyclic(), graph.Rooted())
//// Get list of all resources TODO - REPLACED THIS
//wrappedResourceModels, err := sr.ListResources(ctx, models.ListResourceQueryOptions{})
//if err != nil {
// return nil, err
//}
//add vertices to the graph (must be done first)
//we don't want to request all resources from the database, so we will create a placeholder vertex for each resource.
//we will then use the vertex id to lookup the resource from the database.
//this is a bit of a hack, but it allows us to use the graph library without having to load all resources into memory.
//create a placeholder vertex for each resource (ensuring uniqueness)
resourcePlaceholders := map[string]VertexResourcePlaceholder{}
for _, relationship := range relatedResourceRelationships {
//create placeholders
fromResourcePlaceholder := VertexResourcePlaceholder{
UserID: relationship.ResourceBaseUserID.String(),
SourceID: relationship.ResourceBaseSourceID.String(),
ResourceID: relationship.ResourceBaseSourceResourceID,
ResourceType: relationship.ResourceBaseSourceResourceType,
}
toResourcePlaceholder := VertexResourcePlaceholder{
UserID: relationship.RelatedResourceUserID.String(),
SourceID: relationship.RelatedResourceSourceID.String(),
ResourceID: relationship.RelatedResourceSourceResourceID,
ResourceType: relationship.RelatedResourceSourceResourceType,
}
//add placeholders to map, if they don't already exist
if _, ok := resourcePlaceholders[fromResourcePlaceholder.ID()]; !ok {
resourcePlaceholders[fromResourcePlaceholder.ID()] = fromResourcePlaceholder
}
if _, ok := resourcePlaceholders[toResourcePlaceholder.ID()]; !ok {
resourcePlaceholders[toResourcePlaceholder.ID()] = toResourcePlaceholder
}
}
for ndx, _ := range resourcePlaceholders {
resourcePlaceholder := resourcePlaceholders[ndx]
log.Printf("Adding vertex: %v", resourcePlaceholder.ID())
err := g.AddVertex(
&resourcePlaceholder,
)
if err != nil {
return nil, nil, fmt.Errorf("an error occurred while adding vertex: %v", err)
}
}
//add recriprocial relationships (depending on the graph type)
relatedResourceRelationships = sr.PopulateGraphTypeReciprocalRelationships(graphType, relatedResourceRelationships)
//add edges to graph
for _, relationship := range relatedResourceRelationships {
err := g.AddEdge(
resourceKeysVertexId(relationship.ResourceBaseSourceID.String(), relationship.ResourceBaseSourceResourceType, relationship.ResourceBaseSourceResourceID),
resourceKeysVertexId(relationship.RelatedResourceSourceID.String(), relationship.RelatedResourceSourceResourceType, relationship.RelatedResourceSourceResourceID),
)
if err != nil {
//this may occur because vertices may not exist
sr.Logger.Warnf("ignoring, an error occurred while adding edge: %v", err)
}
}
//// simplify graph if possible.
//graph.TransitiveReduction(g)
// AdjacencyMap computes and returns an adjacency map containing all vertices in the graph.
//
// There is an entry for each vertex, and each of those entries is another map whose keys are
// the hash values of the adjacent vertices. The value is an Edge instance that stores the
// source and target hash values (these are the same as the map keys) as well as edge metadata.
// map[string]map[string]Edge[string]{
// "A": map[string]Edge[string]{
// "B": {Source: "A", Target: "B"}
// "C": {Source: "A", Target: "C"}
// }
// }
adjacencyMap, err := g.AdjacencyMap()
if err != nil {
return nil, nil, fmt.Errorf("error while generating AdjacencyMap: %v", err)
}
// For a directed graph, PredecessorMap is the complement of AdjacencyMap. This is because in a directed graph, only
// vertices joined by an outgoing edge are considered adjacent to the current vertex, whereas
// predecessors are the vertices joined by an ingoing edge.
// ie. "empty" verticies in this map are "root" nodes.
predecessorMap, err := g.PredecessorMap()
if err != nil {
return nil, nil, fmt.Errorf("error while generating PredecessorMap: %v", err)
}
// Doing this in one massive function, because passing graph by reference is difficult due to generics.
// Step 1: use predecessorMap to find all "root" resources (eg. MedicalHistory - encounters and conditions). store those nodes in their respective lists.
resourcePlaceholderListDictionary := map[string][]*VertexResourcePlaceholder{}
sources, _, sourceFlattenLevel := getSourcesAndSinksForGraphType(graphType)
for vertexId, val := range predecessorMap {
if len(val) != 0 {
//skip any nodes/verticies/resources that are not "root"
continue
}
resourcePlaceholder, err := g.Vertex(vertexId)
if err != nil {
//could not find this vertex in graph, ignoring
log.Printf("could not find vertex in graph: %v", err)
continue
}
//check if this "root" node (which has no predecessors) is a valid source type
foundSourceType := ""
foundSourceLevel := -1
for ndx, sourceResourceTypes := range sources {
log.Printf("testing resourceType: %s", resourcePlaceholder.ResourceType)
if slices.Contains(sourceResourceTypes, strings.ToLower(resourcePlaceholder.ResourceType)) {
foundSourceType = resourcePlaceholder.ResourceType
foundSourceLevel = ndx
break
}
}
if foundSourceLevel == -1 {
continue //skip this resourcePlaceholder, it is not a valid source type
}
if _, ok := resourcePlaceholderListDictionary[foundSourceType]; !ok {
resourcePlaceholderListDictionary[foundSourceType] = []*VertexResourcePlaceholder{}
}
resourcePlaceholderListDictionary[foundSourceType] = append(resourcePlaceholderListDictionary[foundSourceType], resourcePlaceholder)
}
// Step 2: now that we've created a relationship graph using placeholders, we need to determine which page of resources to return
// and look up the actual resources from the database.
resourceListDictionary, totalElements, err := sr.InflateResourceGraphAtPage(resourcePlaceholderListDictionary, options.Page)
if err != nil {
return nil, nil, fmt.Errorf("error while paginating & inflating resource graph: %v", err)
}
resourceGraphMetadata.TotalElements = totalElements
// Step 3: define a function. When given a resource, should find all related resources, flatten the heirarchy and set the RelatedResourceFhir list
flattenRelatedResourcesFn := func(resource *models.ResourceBase) {
// this is a "root" encounter, which is not related to any condition, we should add it to the Unknown encounters list
vertexId := resourceVertexId(&VertexResourcePlaceholder{
ResourceType: resource.SourceResourceType,
ResourceID: resource.SourceResourceID,
SourceID: resource.SourceID.String(),
UserID: resource.UserID.String(),
})
sr.Logger.Debugf("populating resourcePlaceholder: %s", vertexId)
resource.RelatedResource = []*models.ResourceBase{}
//get all the resource placeholders associated with this node
//TODO: handle error?
graph.DFS(g, vertexId, func(relatedVertexId string) bool {
relatedResourcePlaceholder, _ := g.Vertex(relatedVertexId)
//skip the current resourcePlaceholder if it's referenced in this list.
//also skip the current resourcePlaceholder if its a Binary resourcePlaceholder (which is a special case)
if vertexId != resourceVertexId(relatedResourcePlaceholder) && relatedResourcePlaceholder.ResourceType != "Binary" {
relatedResource, err := sr.GetResourceByResourceTypeAndId(ctx, relatedResourcePlaceholder.ResourceType, relatedResourcePlaceholder.ResourceID)
if err != nil {
sr.Logger.Warnf("ignoring, cannot safely handle error which occurred while getting related resource: %v", err)
return true
}
resource.RelatedResource = append(
resource.RelatedResource,
relatedResource,
)
}
return false
})
}
// Step 4: flatten resources (if needed) and sort them
for resourceType, _ := range resourceListDictionary {
sourceFlatten, sourceFlattenOk := sourceFlattenLevel[strings.ToLower(resourceType)]
if sourceFlattenOk && sourceFlatten == true {
//if flatten is set to true, we want to flatten the graph. This is usually for non primary source types (eg. Encounter is a source type, but Condition is the primary source type)
// Step 3: populate related resources for each encounter, flattened
for ndx, _ := range resourceListDictionary[resourceType] {
// this is a "root" encounter, which is not related to any condition, we should add it to the Unknown encounters list
flattenRelatedResourcesFn(resourceListDictionary[resourceType][ndx])
//sort all related resources (by date, desc)
resourceListDictionary[resourceType][ndx].RelatedResource = utils.SortResourcePtrListByDate(resourceListDictionary[resourceType][ndx].RelatedResource)
}
} else {
// if flatten is set to false, we want to preserve the top relationships in the graph heirarchy. This is usually for primary source types (eg. Condition is the primary source type)
// we want to ensure context is preserved, so we will flatten the graph futher down in the heirarchy
// Step 4: find all encounters referenced by the root conditions, populate them, then add them to the condition as RelatedResourceFhir
for ndx, _ := range resourceListDictionary[resourceType] {
// this is a "root" condition,
resourceListDictionary[resourceType][ndx].RelatedResource = []*models.ResourceBase{}
currentResource := resourceListDictionary[resourceType][ndx]
vertexId := resourceKeysVertexId(currentResource.SourceID.String(), currentResource.SourceResourceType, currentResource.SourceResourceID)
for relatedVertexId, _ := range adjacencyMap[vertexId] {
relatedResourcePlaceholder, _ := g.Vertex(relatedVertexId)
relatedResourceFhir, err := sr.GetResourceByResourceTypeAndId(ctx, relatedResourcePlaceholder.ResourceType, relatedResourcePlaceholder.ResourceID)
if err != nil {
sr.Logger.Warnf("ignoring, cannot safely handle error which occurred while getting related resource (flatten=false): %v", err)
continue
}
flattenRelatedResourcesFn(relatedResourceFhir)
resourceListDictionary[resourceType][ndx].RelatedResource = append(resourceListDictionary[resourceType][ndx].RelatedResource, relatedResourceFhir)
}
//sort all related resources (by date, desc)
resourceListDictionary[resourceType][ndx].RelatedResource = utils.SortResourcePtrListByDate(resourceListDictionary[resourceType][ndx].RelatedResource)
}
}
resourceListDictionary[resourceType] = utils.SortResourcePtrListByDate(resourceListDictionary[resourceType])
}
// Step 5: return the populated resource list dictionary
return resourceListDictionary, &resourceGraphMetadata, nil
}
// LoadResourceGraphAtPage - this function will take a dictionary of placeholder "sources" graph and load the actual resources from the database, for a specific page
// - first, it will load all the "source" resources (eg. Encounter, Condition, etc)
// - sort the root resources by date, desc
// - use the page number + page size to determine which root resources to return
// - return a dictionary of "source" resource lists
func (sr *SqliteRepository) InflateResourceGraphAtPage(resourcePlaceholderListDictionary map[string][]*VertexResourcePlaceholder, page int) (map[string][]*models.ResourceBase, int, error) {
totalElements := 0
// Step 3a: since we cant calulate the sort order until the resources are loaded, we need to load all the root resources first.
//TODO: maybe its more performant to query each resource by type/id/source, since they are indexed already?
rootWrappedResourceModels := []models.ResourceBase{}
for resourceType, _ := range resourcePlaceholderListDictionary {
// resourcePlaceholderListDictionary contains top level resource types (eg. Encounter, Condition, etc)
selectList := [][]interface{}{}
for ndx, _ := range resourcePlaceholderListDictionary[resourceType] {
selectList = append(selectList, []interface{}{
resourcePlaceholderListDictionary[resourceType][ndx].UserID,
resourcePlaceholderListDictionary[resourceType][ndx].SourceID,
resourcePlaceholderListDictionary[resourceType][ndx].ResourceType,
resourcePlaceholderListDictionary[resourceType][ndx].ResourceID,
})
}
tableName, err := databaseModel.GetTableNameByResourceType(resourceType)
if err != nil {
return nil, totalElements, err
}
var tableWrappedResourceModels []models.ResourceBase
sr.GormClient.
Where("(user_id, source_id, source_resource_type, source_resource_id) IN ?", selectList).
Table(tableName).
Find(&tableWrappedResourceModels)
//append these resources to the rootWrappedResourceModels list
rootWrappedResourceModels = append(rootWrappedResourceModels, tableWrappedResourceModels...)
}
//sort
rootWrappedResourceModels = utils.SortResourceListByDate(rootWrappedResourceModels)
//calculate total elements
totalElements = len(rootWrappedResourceModels)
//paginate (by calculating window for the slice)
rootWrappedResourceModels = utils.PaginateResourceList(rootWrappedResourceModels, page, 20) //todo: replace size with pkg.ResourceListPageSize
// Step 3b: now that we have the root resources, lets generate a dictionary of resource lists, keyed by resource type
resourceListDictionary := map[string][]*models.ResourceBase{}
for ndx, _ := range rootWrappedResourceModels {
resourceType := rootWrappedResourceModels[ndx].SourceResourceType
if _, ok := resourceListDictionary[resourceType]; !ok {
resourceListDictionary[resourceType] = []*models.ResourceBase{}
}
resourceListDictionary[resourceType] = append(resourceListDictionary[resourceType], &rootWrappedResourceModels[ndx])
}
// Step 4: return the populated resource list dictionary
return resourceListDictionary, totalElements, nil
}
//We need to support the following types of graphs:
// - Medical History
// - AddressBook (contacts)
// - Medications
// - Billing Report
//edges are always "strongly connected", however "source" nodes (roots, like Condition or Encounter -- depending on ) are only one way.
//add an edge from every resource to its related resource. Keep in mind that FHIR resources may not contain reciprocal edges, so we ensure the graph is rooted by flipping any
//related resources that are "Condition" or "Encounter"
func (sr *SqliteRepository) PopulateGraphTypeReciprocalRelationships(graphType pkg.ResourceGraphType, relationships []models.RelatedResource) []models.RelatedResource {
reciprocalRelationships := []models.RelatedResource{}
//prioritized lists of sources and sinks for the graph. We will use these to determine which resources are "root" nodes.
sources, sinks, _ := getSourcesAndSinksForGraphType(graphType)
for _, relationship := range relationships {
//calculate the
resourceAGraphSourceLevel := foundResourceGraphSource(relationship.ResourceBaseSourceResourceType, sources)
resourceBGraphSourceLevel := foundResourceGraphSource(relationship.RelatedResourceSourceResourceType, sources)
resourceAGraphSinkLevel := foundResourceGraphSink(relationship.ResourceBaseSourceResourceType, sinks)
resourceBGraphSinkLevel := foundResourceGraphSink(relationship.RelatedResourceSourceResourceType, sinks)
if resourceAGraphSourceLevel > -1 && resourceBGraphSourceLevel > -1 {
//handle the case where both resources are "sources" (eg. MedicalHistory - Condition or Encounter)
if resourceAGraphSourceLevel <= resourceBGraphSourceLevel {
//A is a higher priority than B, so we will add an edge from A to B
reciprocalRelationships = append(reciprocalRelationships, relationship)
} else {
//B is a higher priority than A, so we will add an edge from B to A (flipped relationship)
reciprocalRelationships = append(reciprocalRelationships, models.RelatedResource{
ResourceBaseUserID: relationship.RelatedResourceUserID,
ResourceBaseSourceID: relationship.RelatedResourceSourceID,
ResourceBaseSourceResourceType: relationship.RelatedResourceSourceResourceType,
ResourceBaseSourceResourceID: relationship.RelatedResourceSourceResourceID,
RelatedResourceUserID: relationship.ResourceBaseUserID,
RelatedResourceSourceID: relationship.ResourceBaseSourceID,
RelatedResourceSourceResourceType: relationship.ResourceBaseSourceResourceType,
RelatedResourceSourceResourceID: relationship.ResourceBaseSourceResourceID,
})
}
} else if resourceAGraphSourceLevel > -1 || resourceBGraphSinkLevel > -1 {
//resource A is a Source, or resource B is a sink, normal A -> B relationship (edge)
reciprocalRelationships = append(reciprocalRelationships, relationship)
} else if resourceBGraphSourceLevel > -1 || resourceAGraphSinkLevel > -1 {
//resource B is a Source, or resource A is a sink, create B -> A relationship (edge)
reciprocalRelationships = append(reciprocalRelationships, models.RelatedResource{
ResourceBaseUserID: relationship.RelatedResourceUserID,
ResourceBaseSourceID: relationship.RelatedResourceSourceID,
ResourceBaseSourceResourceType: relationship.RelatedResourceSourceResourceType,
ResourceBaseSourceResourceID: relationship.RelatedResourceSourceResourceID,
RelatedResourceUserID: relationship.ResourceBaseUserID,
RelatedResourceSourceID: relationship.ResourceBaseSourceID,
RelatedResourceSourceResourceType: relationship.ResourceBaseSourceResourceType,
RelatedResourceSourceResourceID: relationship.ResourceBaseSourceResourceID,
})
} else {
//this is a regular pair of resources, create reciprocal edges
reciprocalRelationships = append(reciprocalRelationships, relationship)
reciprocalRelationships = append(reciprocalRelationships, models.RelatedResource{
ResourceBaseUserID: relationship.RelatedResourceUserID,
ResourceBaseSourceID: relationship.RelatedResourceSourceID,
ResourceBaseSourceResourceType: relationship.RelatedResourceSourceResourceType,
ResourceBaseSourceResourceID: relationship.RelatedResourceSourceResourceID,
RelatedResourceUserID: relationship.ResourceBaseUserID,
RelatedResourceSourceID: relationship.ResourceBaseSourceID,
RelatedResourceSourceResourceType: relationship.ResourceBaseSourceResourceType,
RelatedResourceSourceResourceID: relationship.ResourceBaseSourceResourceID,
})
}
}
return reciprocalRelationships
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Utilities
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
func getSourcesAndSinksForGraphType(graphType pkg.ResourceGraphType) ([][]string, [][]string, map[string]bool) {
var sources [][]string
var sinks [][]string
var sourceFlattenRelated map[string]bool
switch graphType {
case pkg.ResourceGraphTypeMedicalHistory:
sources = [][]string{
{"condition", "composition"},
{"encounter", "explanationofbenefit"},
}
sinks = [][]string{
{"location", "device", "organization", "practitioner", "medication", "patient", "coverage"}, //resources that are shared across multiple conditions
{"binary"},
}
sourceFlattenRelated = map[string]bool{
"encounter": true,
}
break
case pkg.ResourceGraphTypeAddressBook:
sources = [][]string{
{"practitioner", "organization"},
{"practitionerrole", "careteam", "location"},
}
sinks = [][]string{
{"condition", "composition", "explanationofbenefits"}, //resources that are shared across multiple practitioners
{"encounter", "medication", "patient"},
}
sourceFlattenRelated = map[string]bool{}
}
return sources, sinks, sourceFlattenRelated
}
//source resource types are resources that are at the root of the graph, nothing may reference them directly
// loop though the list of source resource types, and see if the checkResourceType is one of them
func foundResourceGraphSource(checkResourceType string, sourceResourceTypes [][]string) int {
found := -1
for i, sourceResourceType := range sourceResourceTypes {
if slices.Contains(sourceResourceType, strings.ToLower(checkResourceType)) {
found = i
break
}
}
return found
}
//sink resource types are the leaves of the graph, they must not reference anything else. (only be referenced)
func foundResourceGraphSink(checkResourceType string, sinkResourceTypes [][]string) int {
found := -1
for i, sinkResourceType := range sinkResourceTypes {
if slices.Contains(sinkResourceType, strings.ToLower(checkResourceType)) {
found = i
break
}
}
return found
}
// helper function for GetResourceGraph, creating a "hash" for the resource
func resourceVertexId(resourcePlaceholder *VertexResourcePlaceholder) string {
return resourceKeysVertexId(resourcePlaceholder.SourceID, resourcePlaceholder.ResourceType, resourcePlaceholder.ResourceID)
}
func resourceKeysVertexId(sourceId string, resourceType string, resourceId string) string {
return strings.ToLower(fmt.Sprintf("%s/%s/%s", sourceId, resourceType, resourceId))
}

View File

@ -1,683 +0,0 @@
package database
import (
"context"
"fmt"
"github.com/fastenhealth/fasten-onprem/backend/pkg/models"
databaseModel "github.com/fastenhealth/fasten-onprem/backend/pkg/models/database"
"github.com/iancoleman/strcase"
"github.com/samber/lo"
"golang.org/x/exp/maps"
"golang.org/x/exp/slices"
"gorm.io/gorm"
"strconv"
"strings"
"time"
)
type SearchParameterType string
const (
//simple types
SearchParameterTypeNumber SearchParameterType = "number"
SearchParameterTypeDate SearchParameterType = "date"
SearchParameterTypeUri SearchParameterType = "uri"
SearchParameterTypeKeyword SearchParameterType = "keyword" //this is a literal/string primitive.
//complex types
SearchParameterTypeString SearchParameterType = "string"
SearchParameterTypeToken SearchParameterType = "token"
SearchParameterTypeReference SearchParameterType = "reference"
SearchParameterTypeQuantity SearchParameterType = "quantity"
SearchParameterTypeComposite SearchParameterType = "composite"
SearchParameterTypeSpecial SearchParameterType = "special"
)
const TABLE_ALIAS = "fhir"
// Allows users to use SearchParameters to query resources
// Can generate simple or complex queries, depending on the SearchParameter type:
//
// eg. Simple
//
// eg. Complex
// SELECT fhir.*
// FROM fhir_observation as fhir, json_each(fhir.code) as codeJson
// WHERE (
//
// (codeJson.value ->> '$.code' = "29463-7" AND codeJson.value ->> '$.system' = "http://loinc.org")
// OR (codeJson.value ->> '$.code' = "3141-9" AND codeJson.value ->> '$.system' = "http://loinc.org")
// OR (codeJson.value ->> '$.code' = "27113001" AND codeJson.value ->> '$.system' = "http://snomed.info/sct")
//
// )
// AND (user_id = "6efcd7c5-3f29-4f0d-926d-a66ff68bbfc2")
// GROUP BY `fhir`.`id`
func (sr *SqliteRepository) QueryResources(ctx context.Context, query models.QueryResource) (interface{}, error) {
sqlQuery, err := sr.sqlQueryResources(ctx, query)
if err != nil {
return nil, err
}
if query.Aggregations != nil && (query.Aggregations.GroupBy != nil || query.Aggregations.CountBy != nil) {
results := []map[string]interface{}{}
clientResp := sqlQuery.Find(&results)
return results, clientResp.Error
} else {
results := []models.ResourceBase{}
clientResp := sqlQuery.Find(&results)
return results, clientResp.Error
}
}
// see QueryResources
// this function has all the logic, but should only be called directly for testing
func (sr *SqliteRepository) sqlQueryResources(ctx context.Context, query models.QueryResource) (*gorm.DB, error) {
//todo, until we actually parse the select statement, we will just return all resources based on "from"
//SECURITY: this is required to ensure that only valid resource types are queried (since it's controlled by the user)
if !slices.Contains(databaseModel.GetAllowedResourceTypes(), query.From) {
return nil, fmt.Errorf("invalid resource type %s", query.From)
}
if queryValidate := query.Validate(); queryValidate != nil {
return nil, queryValidate
}
//find the associated Gorm Model for this query
queryModel, err := databaseModel.NewFhirResourceModelByType(query.From)
if err != nil {
return nil, err
}
//SECURITY: this would be unsafe as the user controls the query.From value, however we've validated it is a valid resource type above
fromClauses := []string{fmt.Sprintf("%s as %s", strcase.ToSnake("Fhir"+query.From), TABLE_ALIAS)}
whereClauses := []string{}
whereNamedParameters := map[string]interface{}{}
//find the FHIR search types associated with each where clause. Any unknown parameters will be ignored.
searchCodeToTypeLookup := queryModel.GetSearchParameters()
for searchParamCodeWithModifier, searchParamCodeValueOrValuesWithPrefix := range query.Where {
searchParameter, err := ProcessSearchParameter(searchParamCodeWithModifier, searchCodeToTypeLookup)
if err != nil {
return nil, err
}
searchParameterValueOperatorTree, err := ProcessSearchParameterValueIntoOperatorTree(searchParameter, searchParamCodeValueOrValuesWithPrefix)
if err != nil {
return nil, err
}
for ndxANDlevel, searchParameterValueOperatorAND := range searchParameterValueOperatorTree {
whereORClauses := []string{}
for ndxORlevel, searchParameterValueOperatorOR := range searchParameterValueOperatorAND {
whereORClause, clauseNamedParameters, err := SearchCodeToWhereClause(searchParameter, searchParameterValueOperatorOR, fmt.Sprintf("%d_%d", ndxANDlevel, ndxORlevel))
if err != nil {
return nil, err
}
//add generated where clause to the list, and add the named parameters to the map of existing named parameters
whereORClauses = append(whereORClauses, whereORClause)
maps.Copy(whereNamedParameters, clauseNamedParameters)
}
whereClauses = append(whereClauses, fmt.Sprintf("(%s)", strings.Join(whereORClauses, " OR ")))
}
fromClause, err := SearchCodeToFromClause(searchParameter)
if err != nil {
return nil, err
}
if len(fromClause) > 0 {
fromClauses = append(fromClauses, fromClause)
}
}
//SECURITY: for safety, we will always add/override the current user_id to the where clause. This is to ensure that the user doesnt attempt to override this value in their own where clause
currentUser, currentUserErr := sr.GetCurrentUser(ctx)
if currentUserErr != nil {
return nil, currentUserErr
}
whereNamedParameters["user_id"] = currentUser.ID.String()
whereClauses = append(whereClauses, "(user_id = @user_id)")
//defaults
selectClauses := []string{fmt.Sprintf("%s.*", TABLE_ALIAS)}
groupClause := fmt.Sprintf("%s.id", TABLE_ALIAS)
orderClause := fmt.Sprintf("%s.sort_date DESC", TABLE_ALIAS)
if query.Aggregations != nil {
//Handle Aggregations
if query.Aggregations.CountBy != nil {
//populate the group by and order by clause with the count by values
query.Aggregations.OrderBy = &models.QueryResourceAggregation{
Field: "*",
Function: "count",
}
query.Aggregations.GroupBy = query.Aggregations.CountBy
if query.Aggregations.GroupBy.Field == "*" {
//we need to get the count of all resources, so we need to remove the group by clause and replace it by
// `source_resource_type` which will be the same for all resources
query.Aggregations.GroupBy.Field = "source_resource_type"
}
}
//process order by clause
if query.Aggregations.OrderBy != nil {
orderAsc := true //default to ascending, switch to desc if parameter is a date type.
if !(query.Aggregations.OrderBy.Field == "*") {
orderAggregationParam, err := ProcessAggregationParameter(*query.Aggregations.OrderBy, searchCodeToTypeLookup)
if err != nil {
return nil, err
}
orderAggregationFromClause, err := SearchCodeToFromClause(orderAggregationParam.SearchParameter)
if err != nil {
return nil, err
}
fromClauses = append(fromClauses, orderAggregationFromClause)
//if the order by is a date type, we need to order by DESC (most recent first)
if orderAggregationParam.Type == SearchParameterTypeDate {
orderAsc = false
}
orderClause = AggregationParameterToClause(orderAggregationParam)
if orderAsc {
orderClause = fmt.Sprintf("%s ASC", orderClause)
} else {
orderClause = fmt.Sprintf("%s DESC", orderClause)
}
} else {
orderClause = fmt.Sprintf("%s(%s) DESC", query.Aggregations.OrderBy.Function, query.Aggregations.OrderBy.Field)
}
}
//process group by clause
if query.Aggregations.GroupBy != nil {
groupAggregationParam, err := ProcessAggregationParameter(*query.Aggregations.GroupBy, searchCodeToTypeLookup)
if err != nil {
return nil, err
}
groupAggregationFromClause, err := SearchCodeToFromClause(groupAggregationParam.SearchParameter)
if err != nil {
return nil, err
}
fromClauses = append(fromClauses, groupAggregationFromClause)
groupClause = AggregationParameterToClause(groupAggregationParam)
selectClauses = []string{
fmt.Sprintf("%s as %s", groupClause, "label"),
}
if query.Aggregations.OrderBy == nil || query.Aggregations.OrderBy.Field == "*" {
selectClauses = append(selectClauses, fmt.Sprintf("%s as %s", "count(*)", "value"))
orderClause = fmt.Sprintf("%s DESC", "count(*)")
} else {
//use the orderBy aggregation as the value
orderAggregationParam, err := ProcessAggregationParameter(*query.Aggregations.OrderBy, searchCodeToTypeLookup)
if err != nil {
return nil, err
}
orderSelectClause := AggregationParameterToClause(orderAggregationParam)
selectClauses = append(selectClauses, fmt.Sprintf("%s as %s", orderSelectClause, "value"))
}
}
}
//ensure Where and From clauses are unique
whereClauses = lo.Uniq(whereClauses)
whereClauses = lo.Compact(whereClauses)
fromClauses = lo.Uniq(fromClauses)
fromClauses = lo.Compact(fromClauses)
sqlQuery := sr.GormClient.WithContext(ctx).
Select(strings.Join(selectClauses, ", ")).
Where(strings.Join(whereClauses, " AND "), whereNamedParameters).
Group(groupClause).
Order(orderClause).
Table(strings.Join(fromClauses, ", "))
//add limit and offset clauses if present
if query.Limit != nil {
sqlQuery = sqlQuery.Limit(*query.Limit)
}
if query.Offset != nil {
sqlQuery = sqlQuery.Offset(*query.Offset)
}
return sqlQuery, nil
}
/// INTERNAL functionality. These functions are exported for testing, but are not available in the Interface
//TODO: dont export these, instead use casting to convert the interface to the SqliteRepository struct, then call ehese functions directly
type SearchParameter struct {
Name string
Type SearchParameterType
Modifier string
}
type AggregationParameter struct {
SearchParameter
Function string //count, sum, avg, min, max, etc
}
// Lists in the SearchParameterValueOperatorTree are AND'd together, and items within each SearchParameterValueOperatorTree list are OR'd together
// For example, the following would be AND'd together, and then OR'd with the next SearchParameterValueOperatorTree
//
// {
// {SearchParameterValue{Value: "foo"}, SearchParameterValue{Value: "bar"}}
// {SearchParameterValue{Value: "baz"}},
// }
//
// This would result in the following SQL:
//
// (value = "foo" OR value = "bar") AND (value = "baz")
type SearchParameterValueOperatorTree [][]SearchParameterValue
type SearchParameterValue struct {
Prefix string
Value interface{}
SecondaryValues map[string]interface{}
}
// SearchParameters are made up of parameter names and modifiers. For example, "name" and "name:exact" are both valid search parameters
// This function will parse the searchCodeWithModifier and return the SearchParameter
func ProcessSearchParameter(searchCodeWithModifier string, searchParamTypeLookup map[string]string) (SearchParameter, error) {
searchParameter := SearchParameter{}
//determine the searchCode searchCodeModifier
//TODO: this is only applicable to string, token, reference and uri type (however unknown names & modifiers are ignored)
if searchCodeParts := strings.SplitN(searchCodeWithModifier, ":", 2); len(searchCodeParts) == 2 {
searchParameter.Name = searchCodeParts[0]
searchParameter.Modifier = searchCodeParts[1]
} else {
searchParameter.Name = searchCodeParts[0]
searchParameter.Modifier = ""
}
//next, determine the searchCodeType for this Resource (or throw an error if it is unknown)
searchParamTypeStr, searchParamTypeOk := searchParamTypeLookup[searchParameter.Name]
if !searchParamTypeOk {
return searchParameter, fmt.Errorf("unknown search parameter: %s", searchParameter.Name)
} else {
searchParameter.Type = SearchParameterType(searchParamTypeStr)
}
//if this is a token search parameter with a modifier, we need to throw an error
if searchParameter.Type == SearchParameterTypeToken && len(searchParameter.Modifier) > 0 {
return searchParameter, fmt.Errorf("token search parameter %s cannot have a modifier", searchParameter.Name)
}
return searchParameter, nil
}
// ProcessSearchParameterValueIntoOperatorTree searchParamCodeValueOrValuesWithPrefix may be a single string, or a list of strings
// each string, may itself be a concatenation of multiple values, seperated by a comma
// so we need to do three stages of processing:
// 1. split the searchParamCodeValueOrValuesWithPrefix into a list of strings
// 2. split each string into a list of values
// 3. use the ProcessSearchParameterValue function to split each value into a list of prefixes and values
// these are then stored in a multidimentional list of SearchParameterValueOperatorTree
// top level is AND'd together, and each item within the lists are OR'd together
//
// For example, searchParamCodeValueOrValuesWithPrefix may be:
//
// "code": "29463-7,3141-9,27113001"
// "code": ["le29463-7", "gt3141-9", "27113001"]
func ProcessSearchParameterValueIntoOperatorTree(searchParameter SearchParameter, searchParamCodeValueOrValuesWithPrefix interface{}) (SearchParameterValueOperatorTree, error) {
searchParamCodeValuesWithPrefix := []string{}
switch v := searchParamCodeValueOrValuesWithPrefix.(type) {
case string:
searchParamCodeValuesWithPrefix = append(searchParamCodeValuesWithPrefix, v)
break
case []string:
searchParamCodeValuesWithPrefix = v
break
default:
return nil, fmt.Errorf("invalid search parameter value type %T, must be a string or a list of strings (%s=%v)", v, searchParameter.Name, searchParamCodeValueOrValuesWithPrefix)
}
//generate a SearchParameterValueOperatorTree, because we may have multiple OR and AND operators for the same search parameter.
//ie, (code = "foo" OR code = "bar") AND (code = "baz")
searchParamCodeValueOperatorTree := SearchParameterValueOperatorTree{}
//loop through each searchParamCodeValueWithPrefix, and split it into a list of values (comma seperated)
for _, searchParamCodeValuesInANDClause := range searchParamCodeValuesWithPrefix {
searchParameterValuesOperatorOR := []SearchParameterValue{}
for _, searchParamCodeValueInORClause := range strings.Split(searchParamCodeValuesInANDClause, ",") {
searchParameterValue, err := ProcessSearchParameterValue(searchParameter, searchParamCodeValueInORClause)
if err != nil {
return nil, err
}
searchParameterValuesOperatorOR = append(searchParameterValuesOperatorOR, searchParameterValue)
}
searchParamCodeValueOperatorTree = append(searchParamCodeValueOperatorTree, searchParameterValuesOperatorOR)
}
return searchParamCodeValueOperatorTree, nil
}
// ProcessSearchParameterValue searchValueWithPrefix may or may not have a prefix which needs to be parsed
// this function will parse the searchValueWithPrefix and return the SearchParameterValue
// for example, "eq2018-01-01" would return a SearchParameterValue with a prefix of "eq" and a value of "2018-01-01"
// and "2018-01-01" would return a SearchParameterValue with a value of "2018-01-01"
//
// some query types, like token, quantity and reference, have secondary values that need to be parsed
// for example, code="http://loinc.org|29463-7" would return a SearchParameterValue with a value of "29463-7" and a secondary value of { "codeSystem": "http://loinc.org" }
func ProcessSearchParameterValue(searchParameter SearchParameter, searchValueWithPrefix string) (SearchParameterValue, error) {
searchParameterValue := SearchParameterValue{
SecondaryValues: map[string]interface{}{},
Value: searchValueWithPrefix,
}
if (searchParameter.Type == SearchParameterTypeString || searchParameter.Type == SearchParameterTypeUri || searchParameter.Type == SearchParameterTypeKeyword) && len(searchParameterValue.Value.(string)) == 0 {
return searchParameterValue, fmt.Errorf("invalid search parameter value: (%s=%s)", searchParameter.Name, searchParameterValue.Value)
}
//certain types (like number,date and quanitty have a prefix that needs to be parsed)
if searchParameter.Type == SearchParameterTypeNumber || searchParameter.Type == SearchParameterTypeDate || searchParameter.Type == SearchParameterTypeQuantity {
//loop though all known/allowed prefixes, and determine if the searchValueWithPrefix starts with one of them
allowedPrefixes := []string{"eq", "ne", "gt", "lt", "ge", "le", "sa", "eb", "ap"}
for _, allowedPrefix := range allowedPrefixes {
if strings.HasPrefix(searchValueWithPrefix, allowedPrefix) {
searchParameterValue.Prefix = allowedPrefix
searchParameterValue.Value = strings.TrimPrefix(searchValueWithPrefix, allowedPrefix)
break
}
}
}
//certain Types (like token, quantity, reference) have secondary query values that need to be parsed (delimited by "|") value
if searchParameter.Type == SearchParameterTypeQuantity {
if searchParameterValueParts := strings.SplitN(searchParameterValue.Value.(string), "|", 3); len(searchParameterValueParts) == 1 {
searchParameterValue.Value = searchParameterValueParts[0]
} else if len(searchParameterValueParts) == 2 {
searchParameterValue.Value = searchParameterValueParts[0]
if len(searchParameterValueParts[1]) > 0 {
searchParameterValue.SecondaryValues[searchParameter.Name+"System"] = searchParameterValueParts[1]
}
} else if len(searchParameterValueParts) == 3 {
searchParameterValue.Value = searchParameterValueParts[0]
if len(searchParameterValueParts[1]) > 0 {
searchParameterValue.SecondaryValues[searchParameter.Name+"System"] = searchParameterValueParts[1]
}
if len(searchParameterValueParts[2]) > 0 {
searchParameterValue.SecondaryValues[searchParameter.Name+"Code"] = searchParameterValueParts[2]
}
}
} else if searchParameter.Type == SearchParameterTypeToken {
if searchParameterValueParts := strings.SplitN(searchParameterValue.Value.(string), "|", 2); len(searchParameterValueParts) == 1 {
searchParameterValue.Value = searchParameterValueParts[0] //this is a code
if len(searchParameterValue.Value.(string)) == 0 {
return searchParameterValue, fmt.Errorf("invalid search parameter value: (%s=%s)", searchParameter.Name, searchParameterValue.Value)
}
} else if len(searchParameterValueParts) == 2 {
//if theres 2 parts, first is always system, second is always the code. Either one may be emty. If both are emty this is invalid.
searchParameterValue.SecondaryValues[searchParameter.Name+"System"] = searchParameterValueParts[0]
searchParameterValue.Value = searchParameterValueParts[1]
if len(searchParameterValueParts[0]) == 0 && len(searchParameterValueParts[1]) == 0 {
return searchParameterValue, fmt.Errorf("invalid search parameter value: (%s=%s)", searchParameter.Name, searchParameterValue.Value)
}
}
} else if searchParameter.Type == SearchParameterTypeReference {
//todo
return searchParameterValue, fmt.Errorf("search parameter type not yet implemented: %s", searchParameter.Type)
}
//certain types (Quantity and Number) need to be converted to Float64
if searchParameter.Type == SearchParameterTypeQuantity || searchParameter.Type == SearchParameterTypeNumber {
if conv, err := strconv.ParseFloat(searchParameterValue.Value.(string), 64); err == nil {
searchParameterValue.Value = conv
} else {
return searchParameterValue, fmt.Errorf("invalid search parameter value (NaN): (%s=%s)", searchParameter.Name, searchParameterValue.Value)
}
} else if searchParameter.Type == SearchParameterTypeDate {
//other types (like date) need to be converted to a time.Time
if conv, err := time.Parse(time.RFC3339, searchParameterValue.Value.(string)); err == nil {
searchParameterValue.Value = conv
} else {
// fallback to parsing just a date (without time)
if conv, err := time.Parse("2006-01-02", searchParameterValue.Value.(string)); err == nil {
searchParameterValue.Value = conv
} else {
return searchParameterValue, fmt.Errorf("invalid search parameter value (invalid date): (%s=%s)", searchParameter.Name, searchParameterValue.Value)
}
}
}
return searchParameterValue, nil
}
func NamedParameterWithSuffix(parameterName string, suffix string) string {
return fmt.Sprintf("%s_%s", parameterName, suffix)
}
// SearchCodeToWhereClause converts a searchCode and searchCodeValue to a where clause and a map of named parameters
func SearchCodeToWhereClause(searchParam SearchParameter, searchParamValue SearchParameterValue, namedParameterSuffix string) (string, map[string]interface{}, error) {
//add named parameters to the lookup map. Basically, this is a map of all the named parameters that will be used in the where clause we're generating
searchClauseNamedParams := map[string]interface{}{
NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix): searchParamValue.Value,
}
for k, v := range searchParamValue.SecondaryValues {
searchClauseNamedParams[NamedParameterWithSuffix(k, namedParameterSuffix)] = v
}
//parse the searchCode and searchCodeValue to determine the correct where clause
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//SIMPLE SEARCH PARAMETERS
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
switch searchParam.Type {
case SearchParameterTypeNumber, SearchParameterTypeDate:
if searchParamValue.Prefix == "" || searchParamValue.Prefix == "eq" {
return fmt.Sprintf("(%s = @%s)", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)), searchClauseNamedParams, nil
} else if searchParamValue.Prefix == "lt" || searchParamValue.Prefix == "eb" {
return fmt.Sprintf("(%s < @%s)", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)), searchClauseNamedParams, nil
} else if searchParamValue.Prefix == "le" {
return fmt.Sprintf("(%s <= @%s)", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)), searchClauseNamedParams, nil
} else if searchParamValue.Prefix == "gt" || searchParamValue.Prefix == "sa" {
return fmt.Sprintf("(%s > @%s)", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)), searchClauseNamedParams, nil
} else if searchParamValue.Prefix == "ge" {
return fmt.Sprintf("(%s >= @%s)", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)), searchClauseNamedParams, nil
} else if searchParamValue.Prefix == "ne" {
return fmt.Sprintf("(%s <> @%s)", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)), searchClauseNamedParams, nil
} else if searchParam.Modifier == "ap" {
return "", nil, fmt.Errorf("search modifier 'ap' not supported for search parameter type %s (%s=%s)", searchParam.Type, searchParam.Name, searchParamValue.Value)
}
case SearchParameterTypeUri:
if searchParam.Modifier == "" {
return fmt.Sprintf("(%s = @%s)", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)), searchClauseNamedParams, nil
} else if searchParam.Modifier == "below" {
searchClauseNamedParams[NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)] = searchParamValue.Value.(string) + "%" // column starts with "http://example.com"
return fmt.Sprintf("(%s LIKE @%s)", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)), searchClauseNamedParams, nil
} else if searchParam.Modifier == "above" {
return "", nil, fmt.Errorf("search modifier 'above' not supported for search parameter type %s (%s=%s)", searchParam.Type, searchParam.Name, searchParamValue.Value)
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//COMPLEX SEARCH PARAMETERS
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
case SearchParameterTypeString:
if searchParam.Modifier == "" {
searchClauseNamedParams[NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)] = searchParamValue.Value.(string) + "%" // "eve" matches "Eve" and "Evelyn"
return fmt.Sprintf("(%sJson.value LIKE @%s)", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)), searchClauseNamedParams, nil
} else if searchParam.Modifier == "exact" {
// "eve" matches "eve" (not "Eve" or "EVE")
return fmt.Sprintf("(%sJson.value = @%s)", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)), searchClauseNamedParams, nil
} else if searchParam.Modifier == "contains" {
searchClauseNamedParams[NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)] = "%" + searchParamValue.Value.(string) + "%" // "eve" matches "Eve", "Evelyn" and "Severine"
return fmt.Sprintf("(%sJson.value LIKE @%s)", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)), searchClauseNamedParams, nil
}
case SearchParameterTypeQuantity:
//setup the clause
var clause string
if searchParamValue.Prefix == "" || searchParamValue.Prefix == "eq" {
//TODO: when no prefix is specified, we need to search using BETWEEN (+/- 0.05)
clause = fmt.Sprintf("%sJson.value ->> '$.value' = @%s", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix))
} else if searchParamValue.Prefix == "lt" || searchParamValue.Prefix == "eb" {
clause = fmt.Sprintf("%sJson.value ->> '$.value' < @%s", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix))
} else if searchParamValue.Prefix == "le" {
clause = fmt.Sprintf("%sJson.value ->> '$.value' <= @%s", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix))
} else if searchParamValue.Prefix == "gt" || searchParamValue.Prefix == "sa" {
clause = fmt.Sprintf("%sJson.value ->> '$.value' > @%s", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix))
} else if searchParamValue.Prefix == "ge" {
clause = fmt.Sprintf("%sJson.value ->> '$.value' >= @%s", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix))
} else if searchParamValue.Prefix == "ne" {
clause = fmt.Sprintf("%sJson.value ->> '$.value' <> @%s", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix))
} else if searchParamValue.Prefix == "ap" {
return "", nil, fmt.Errorf("search modifier 'ap' not supported for search parameter type %s (%s=%s)", searchParam.Type, searchParam.Name, searchParamValue.Value)
}
//append the code and/or system clauses (if required)
//this looks like unnecessary code, however its required to ensure consistent tests
allowedSecondaryKeys := []string{"code", "system"}
for _, k := range allowedSecondaryKeys {
namedParameterKey := fmt.Sprintf("%s%s", searchParam.Name, strings.Title(k))
if _, ok := searchParamValue.SecondaryValues[namedParameterKey]; ok {
clause += fmt.Sprintf(` AND %sJson.value ->> '$.%s' = @%s`, searchParam.Name, k, NamedParameterWithSuffix(namedParameterKey, namedParameterSuffix))
}
}
return fmt.Sprintf("(%s)", clause), searchClauseNamedParams, nil
case SearchParameterTypeToken:
//unfortunately we don't know the datatype of this token, however, we're already preprocessed this field in backend/pkg/models/database/generate.go
// all of the following datatypes will be stored in a JSON object with the following structure:
// {
// "system": "http://example.com",
// "code": "example-code",
// "text": "example display"
// }
// primitive datatypes will not have a system or text, just a code (e.g. "code": true or "code": "http://www.example.com")
//
// - Coding - https://hl7.org/fhir/r4/datatypes.html#Coding
// - Identifier - https://hl7.org/fhir/r4/datatypes.html#Identifier
// - ContactPoint - https://hl7.org/fhir/r4/datatypes.html#ContactPoint
// - CodeableConcept - https://hl7.org/fhir/r4/datatypes.html#CodeableConcept
// - code - https://hl7.org/fhir/r4/datatypes.html#code
// - boolean - https://hl7.org/fhir/r4/datatypes.html#boolean
// - uri - https://hl7.org/fhir/r4/datatypes.html#uri
// - string - https://hl7.org/fhir/r4/datatypes.html#string
//TODO: support ":text" modifier
//setup the clause
clause := []string{}
if searchParamValue.Value.(string) != "" {
clause = append(clause, fmt.Sprintf("%sJson.value ->> '$.code' = @%s", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)))
}
//append the code and/or system clauses (if required)
//this looks like unnecessary code, however its required to ensure consistent tests
allowedSecondaryKeys := []string{"system"}
for _, k := range allowedSecondaryKeys {
namedParameterKey := fmt.Sprintf("%s%s", searchParam.Name, strings.Title(k))
if _, ok := searchParamValue.SecondaryValues[namedParameterKey]; ok {
clause = append(clause, fmt.Sprintf(`%sJson.value ->> '$.%s' = @%s`, searchParam.Name, k, NamedParameterWithSuffix(namedParameterKey, namedParameterSuffix)))
}
}
return fmt.Sprintf("(%s)", strings.Join(clause, " AND ")), searchClauseNamedParams, nil
case SearchParameterTypeKeyword:
//setup the clause
return fmt.Sprintf("(%s = @%s)", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)), searchClauseNamedParams, nil
case SearchParameterTypeReference:
return "", nil, fmt.Errorf("search parameter type %s not supported", searchParam.Type)
}
return "", searchClauseNamedParams, nil
}
func SearchCodeToFromClause(searchParam SearchParameter) (string, error) {
//complex search parameters (e.g. token, reference, quantities, special) require the use of `json_*` FROM clauses
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//COMPLEX SEARCH PARAMETERS
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
switch searchParam.Type {
case SearchParameterTypeQuantity, SearchParameterTypeToken, SearchParameterTypeString:
//setup the clause
return fmt.Sprintf("json_each(%s.%s) as %sJson", TABLE_ALIAS, searchParam.Name, searchParam.Name), nil
}
return "", nil
}
func AggregationParameterToClause(aggParameter AggregationParameter) string {
var clause string
switch aggParameter.Type {
case SearchParameterTypeQuantity, SearchParameterTypeString:
//setup the clause
clause = fmt.Sprintf("(%sJson.value ->> '$.%s')", aggParameter.Name, aggParameter.Modifier)
case SearchParameterTypeToken:
//modifier is optional for token types.
if aggParameter.Modifier != "" {
clause = fmt.Sprintf("(%sJson.value ->> '$.%s')", aggParameter.Name, aggParameter.Modifier)
} else {
//if no modifier is specified, use the system and code to generate the clause
//((codeJson.value ->> '$.system') || '|' || (codeJson.value ->> '$.code'))
clause = fmt.Sprintf("((%sJson.value ->> '$.system') || '|' || (%sJson.value ->> '$.code'))", aggParameter.Name, aggParameter.Name)
}
default:
clause = fmt.Sprintf("%s.%s", TABLE_ALIAS, aggParameter.Name)
}
if len(aggParameter.Function) > 0 {
clause = fmt.Sprintf("%s(%s)", aggParameter.Function, clause)
}
return clause
}
// ProcessAggregationParameter processes the aggregation parameters which are fields with optional properties:
// Fields that are primitive types (number, uri) must not have any property specified:
// eg. `probability`
//
// Fields that are complex types (token, quantity) must have a property specified:
// eg. `identifier:code`
//
// if the a property is specified, its set as the modifier, and used when generating the SQL query groupBy, orderBy, etc clause
func ProcessAggregationParameter(aggregationFieldWithFn models.QueryResourceAggregation, searchParamTypeLookup map[string]string) (AggregationParameter, error) {
aggregationParameter := AggregationParameter{
SearchParameter: SearchParameter{},
Function: aggregationFieldWithFn.Function,
}
//determine the searchCode searchCodeModifier
//TODO: this is only applicable to string, token, reference and uri type (however unknown names & modifiers are ignored)
if aggregationFieldParts := strings.SplitN(aggregationFieldWithFn.Field, ":", 2); len(aggregationFieldParts) == 2 {
aggregationParameter.Name = aggregationFieldParts[0]
aggregationParameter.Modifier = aggregationFieldParts[1]
} else {
aggregationParameter.Name = aggregationFieldParts[0]
aggregationParameter.Modifier = ""
}
//next, determine the searchCodeType for this Resource (or throw an error if it is unknown)
searchParamTypeStr, searchParamTypeOk := searchParamTypeLookup[aggregationParameter.Name]
if !searchParamTypeOk {
return aggregationParameter, fmt.Errorf("unknown search parameter in aggregation: %s", aggregationParameter.Name)
} else {
aggregationParameter.Type = SearchParameterType(searchParamTypeStr)
}
//primitive types should not have a modifier, we need to throw an error
if aggregationParameter.Type == SearchParameterTypeNumber || aggregationParameter.Type == SearchParameterTypeUri || aggregationParameter.Type == SearchParameterTypeKeyword || aggregationParameter.Type == SearchParameterTypeDate {
if len(aggregationParameter.Modifier) > 0 {
return aggregationParameter, fmt.Errorf("primitive aggregation parameter %s cannot have a property (%s)", aggregationParameter.Name, aggregationParameter.Modifier)
}
} else if aggregationParameter.Type == SearchParameterTypeToken {
//modifier is optional for token types
} else {
//complex types must have a modifier
if len(aggregationParameter.Modifier) == 0 {
return aggregationParameter, fmt.Errorf("complex aggregation parameter %s must have a property", aggregationParameter.Name)
}
}
return aggregationParameter, nil
}

View File

@ -1,518 +0,0 @@
package database
import (
"context"
"fmt"
"github.com/fastenhealth/fasten-onprem/backend/pkg"
mock_config "github.com/fastenhealth/fasten-onprem/backend/pkg/config/mock"
"github.com/fastenhealth/fasten-onprem/backend/pkg/event_bus"
"github.com/fastenhealth/fasten-onprem/backend/pkg/models"
"github.com/golang/mock/gomock"
"github.com/sirupsen/logrus"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"gorm.io/gorm"
"io/ioutil"
"log"
"os"
"strings"
"testing"
)
// Define the suite, and absorb the built-in basic suite
// functionality from testify - including a T() method which
// returns the current testing context
type RepositorySqlTestSuite struct {
suite.Suite
MockCtrl *gomock.Controller
TestDatabase *os.File
TestRepository DatabaseRepository
}
// BeforeTest has a function to be executed right before the test starts and receives the suite and test names as input
func (suite *RepositorySqlTestSuite) BeforeTest(suiteName, testName string) {
suite.MockCtrl = gomock.NewController(suite.T())
dbFile, err := ioutil.TempFile("", fmt.Sprintf("%s.*.db", testName))
if err != nil {
log.Fatal(err)
}
suite.TestDatabase = dbFile
fakeConfig := mock_config.NewMockInterface(suite.MockCtrl)
fakeConfig.EXPECT().GetString("database.location").Return(suite.TestDatabase.Name()).AnyTimes()
fakeConfig.EXPECT().GetString("database.type").Return("sqlite").AnyTimes()
fakeConfig.EXPECT().GetString("log.level").Return("INFO").AnyTimes()
dbRepo, err := NewRepository(fakeConfig, logrus.WithField("test", suite.T().Name()), event_bus.NewNoopEventBusServer())
require.NoError(suite.T(), err)
suite.TestRepository = dbRepo
userModel := &models.User{
Username: "test_username",
Password: "testpassword",
Email: "test@test.com",
}
err = suite.TestRepository.CreateUser(context.Background(), userModel)
require.NoError(suite.T(), err)
}
// AfterTest has a function to be executed right after the test finishes and receives the suite and test names as input
func (suite *RepositorySqlTestSuite) AfterTest(suiteName, testName string) {
suite.MockCtrl.Finish()
os.Remove(suite.TestDatabase.Name())
}
// In order for 'go test' to run this suite, we need to create
// a normal test function and pass our suite to suite.Run
func TestRepositorySqlTestSuite(t *testing.T) {
suite.Run(t, new(RepositorySqlTestSuite))
}
func (suite *RepositorySqlTestSuite) TestQueryResources_SQL() {
//setup
sqliteRepo := suite.TestRepository.(*SqliteRepository)
sqliteRepo.GormClient = sqliteRepo.GormClient.Session(&gorm.Session{DryRun: true})
//test
authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username")
sqlQuery, err := sqliteRepo.sqlQueryResources(authContext, models.QueryResource{
Select: []string{},
Where: map[string]interface{}{
"code": "test_code",
},
From: "Observation",
})
require.NoError(suite.T(), err)
var results []map[string]interface{}
statement := sqlQuery.Find(&results).Statement
sqlString := statement.SQL.String()
sqlParams := statement.Vars
//assert
require.NoError(suite.T(), err)
require.Equal(suite.T(),
strings.Join([]string{
"SELECT fhir.*",
"FROM fhir_observation as fhir, json_each(fhir.code) as codeJson",
"WHERE ((codeJson.value ->> '$.code' = ?)) AND (user_id = ?)",
"GROUP BY `fhir`.`id`",
"ORDER BY fhir.sort_date DESC",
}, " "),
sqlString)
require.Equal(suite.T(), sqlParams, []interface{}{
"test_code", "00000000-0000-0000-0000-000000000000",
})
}
func (suite *RepositorySqlTestSuite) TestQueryResources_SQL_WithMultipleWhereConditions() {
//setup
sqliteRepo := suite.TestRepository.(*SqliteRepository)
sqliteRepo.GormClient = sqliteRepo.GormClient.Session(&gorm.Session{DryRun: true})
//test
authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username")
sqlQuery, err := sqliteRepo.sqlQueryResources(authContext, models.QueryResource{
Select: []string{},
Where: map[string]interface{}{
"code": "test_code",
"category": "12345",
},
From: "Observation",
})
require.NoError(suite.T(), err)
var results []map[string]interface{}
statement := sqlQuery.Find(&results).Statement
sqlString := statement.SQL.String()
sqlParams := statement.Vars
//assert
require.NoError(suite.T(), err)
require.Equal(suite.T(),
strings.Join([]string{
"SELECT fhir.*",
"FROM fhir_observation as fhir, json_each(fhir.code) as codeJson, json_each(fhir.category) as categoryJson",
"WHERE ((codeJson.value ->> '$.code' = ?)) AND ((categoryJson.value ->> '$.code' = ?)) AND (user_id = ?)",
"GROUP BY `fhir`.`id`",
"ORDER BY fhir.sort_date DESC",
}, " "),
sqlString)
require.Equal(suite.T(), sqlParams, []interface{}{
"test_code", "12345", "00000000-0000-0000-0000-000000000000",
})
}
func (suite *RepositorySqlTestSuite) TestQueryResources_SQL_WithPrimitiveOrderByAggregation() {
//setup
sqliteRepo := suite.TestRepository.(*SqliteRepository)
sqliteRepo.GormClient = sqliteRepo.GormClient.Session(&gorm.Session{DryRun: true})
//test
authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username")
sqlQuery, err := sqliteRepo.sqlQueryResources(authContext, models.QueryResource{
Select: []string{},
Where: map[string]interface{}{
"activityCode": "test_code",
},
From: "CarePlan",
Aggregations: &models.QueryResourceAggregations{OrderBy: &models.QueryResourceAggregation{Field: "instantiatesUri"}},
})
require.NoError(suite.T(), err)
var results []map[string]interface{}
statement := sqlQuery.Find(&results).Statement
sqlString := statement.SQL.String()
sqlParams := statement.Vars
//assert
require.NoError(suite.T(), err)
require.Equal(suite.T(),
strings.Join([]string{
"SELECT fhir.*",
"FROM fhir_care_plan as fhir, json_each(fhir.activityCode) as activityCodeJson",
"WHERE ((activityCodeJson.value ->> '$.code' = ?)) AND (user_id = ?)",
"GROUP BY `fhir`.`id`",
"ORDER BY fhir.instantiatesUri ASC",
}, " "), sqlString)
require.Equal(suite.T(), sqlParams, []interface{}{
"test_code", "00000000-0000-0000-0000-000000000000",
})
}
func (suite *RepositorySqlTestSuite) TestQueryResources_SQL_WithKeywordOrderByAggregation() {
//setup
sqliteRepo := suite.TestRepository.(*SqliteRepository)
sqliteRepo.GormClient = sqliteRepo.GormClient.Session(&gorm.Session{DryRun: true})
//test
authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username")
sqlQuery, err := sqliteRepo.sqlQueryResources(authContext, models.QueryResource{
Select: []string{},
Where: map[string]interface{}{},
From: "CarePlan",
Aggregations: &models.QueryResourceAggregations{OrderBy: &models.QueryResourceAggregation{Field: "id"}},
})
require.NoError(suite.T(), err)
var results []map[string]interface{}
statement := sqlQuery.Find(&results).Statement
sqlString := statement.SQL.String()
sqlParams := statement.Vars
//assert
require.NoError(suite.T(), err)
require.Equal(suite.T(),
strings.Join([]string{
"SELECT fhir.*",
"FROM fhir_care_plan as fhir",
"WHERE (user_id = ?)",
"GROUP BY `fhir`.`id`",
"ORDER BY fhir.id ASC",
}, " "), sqlString)
require.Equal(suite.T(), sqlParams, []interface{}{
"00000000-0000-0000-0000-000000000000",
})
}
func (suite *RepositorySqlTestSuite) TestQueryResources_SQL_WithComplexOrderByAggregation() {
//setup
sqliteRepo := suite.TestRepository.(*SqliteRepository)
sqliteRepo.GormClient = sqliteRepo.GormClient.Session(&gorm.Session{DryRun: true})
//test
authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username")
sqlQuery, err := sqliteRepo.sqlQueryResources(authContext, models.QueryResource{
Select: []string{},
Where: map[string]interface{}{
"code": "test_code",
},
From: "Observation",
Aggregations: &models.QueryResourceAggregations{OrderBy: &models.QueryResourceAggregation{Field: "valueString:value"}},
})
require.NoError(suite.T(), err)
var results []map[string]interface{}
statement := sqlQuery.Find(&results).Statement
sqlString := statement.SQL.String()
sqlParams := statement.Vars
//assert
require.NoError(suite.T(), err)
require.Equal(suite.T(),
strings.Join([]string{
"SELECT fhir.*",
"FROM fhir_observation as fhir, json_each(fhir.code) as codeJson, json_each(fhir.valueString) as valueStringJson",
"WHERE ((codeJson.value ->> '$.code' = ?)) AND (user_id = ?)",
"GROUP BY `fhir`.`id`",
"ORDER BY (valueStringJson.value ->> '$.value') ASC",
}, " "), sqlString)
require.Equal(suite.T(), sqlParams, []interface{}{
"test_code", "00000000-0000-0000-0000-000000000000",
})
}
func (suite *RepositorySqlTestSuite) TestQueryResources_SQL_WithPrimitiveCountByAggregation() {
//setup
sqliteRepo := suite.TestRepository.(*SqliteRepository)
sqliteRepo.GormClient = sqliteRepo.GormClient.Session(&gorm.Session{DryRun: true})
//test
authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username")
sqlQuery, err := sqliteRepo.sqlQueryResources(authContext, models.QueryResource{
Select: []string{},
Where: map[string]interface{}{
"activityCode": "test_code",
},
From: "CarePlan",
Aggregations: &models.QueryResourceAggregations{CountBy: &models.QueryResourceAggregation{Field: "instantiatesUri"}},
})
require.NoError(suite.T(), err)
var results []map[string]interface{}
statement := sqlQuery.Find(&results).Statement
sqlString := statement.SQL.String()
sqlParams := statement.Vars
//assert
require.NoError(suite.T(), err)
require.Equal(suite.T(),
strings.Join([]string{
"SELECT fhir.instantiatesUri as label, count(*) as value",
"FROM fhir_care_plan as fhir, json_each(fhir.activityCode) as activityCodeJson",
"WHERE ((activityCodeJson.value ->> '$.code' = ?)) AND (user_id = ?)",
"GROUP BY `fhir`.`instantiatesUri`",
"ORDER BY count(*) DESC",
}, " "), sqlString)
require.Equal(suite.T(), sqlParams, []interface{}{
"test_code", "00000000-0000-0000-0000-000000000000",
})
}
func (suite *RepositorySqlTestSuite) TestQueryResources_SQL_WithKeywordCountByAggregation() {
//setup
sqliteRepo := suite.TestRepository.(*SqliteRepository)
sqliteRepo.GormClient = sqliteRepo.GormClient.Session(&gorm.Session{DryRun: true})
//test
authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username")
sqlQuery, err := sqliteRepo.sqlQueryResources(authContext, models.QueryResource{
Select: []string{},
Where: map[string]interface{}{
"activityCode": "test_code",
},
From: "CarePlan",
Aggregations: &models.QueryResourceAggregations{CountBy: &models.QueryResourceAggregation{Field: "source_resource_type"}},
})
require.NoError(suite.T(), err)
var results []map[string]interface{}
statement := sqlQuery.Find(&results).Statement
sqlString := statement.SQL.String()
sqlParams := statement.Vars
//assert
require.NoError(suite.T(), err)
require.Equal(suite.T(),
strings.Join([]string{
"SELECT fhir.source_resource_type as label, count(*) as value",
"FROM fhir_care_plan as fhir, json_each(fhir.activityCode) as activityCodeJson",
"WHERE ((activityCodeJson.value ->> '$.code' = ?)) AND (user_id = ?)",
"GROUP BY `fhir`.`source_resource_type`",
"ORDER BY count(*) DESC",
}, " "), sqlString)
require.Equal(suite.T(), sqlParams, []interface{}{
"test_code", "00000000-0000-0000-0000-000000000000",
})
}
func (suite *RepositorySqlTestSuite) TestQueryResources_SQL_WithWildcardCountByAggregation() {
//setup
sqliteRepo := suite.TestRepository.(*SqliteRepository)
sqliteRepo.GormClient = sqliteRepo.GormClient.Session(&gorm.Session{DryRun: true})
//test
authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username")
sqlQuery, err := sqliteRepo.sqlQueryResources(authContext, models.QueryResource{
Select: []string{},
Where: map[string]interface{}{},
From: "CarePlan",
Aggregations: &models.QueryResourceAggregations{CountBy: &models.QueryResourceAggregation{Field: "*"}},
})
require.NoError(suite.T(), err)
var results []map[string]interface{}
statement := sqlQuery.Find(&results).Statement
sqlString := statement.SQL.String()
sqlParams := statement.Vars
//assert
require.NoError(suite.T(), err)
require.Equal(suite.T(),
strings.Join([]string{
"SELECT fhir.source_resource_type as label, count(*) as value",
"FROM fhir_care_plan as fhir",
"WHERE (user_id = ?)",
"GROUP BY `fhir`.`source_resource_type`",
"ORDER BY count(*) DESC",
}, " "), sqlString)
require.Equal(suite.T(), sqlParams, []interface{}{
"00000000-0000-0000-0000-000000000000",
})
}
func (suite *RepositorySqlTestSuite) TestQueryResources_SQL_WithComplexCountByAggregation() {
//setup
sqliteRepo := suite.TestRepository.(*SqliteRepository)
sqliteRepo.GormClient = sqliteRepo.GormClient.Session(&gorm.Session{DryRun: true})
//test
authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username")
sqlQuery, err := sqliteRepo.sqlQueryResources(authContext, models.QueryResource{
Select: []string{},
Where: map[string]interface{}{
"code": "test_code",
},
From: "Observation",
Aggregations: &models.QueryResourceAggregations{CountBy: &models.QueryResourceAggregation{Field: "code:code"}},
})
require.NoError(suite.T(), err)
var results []map[string]interface{}
statement := sqlQuery.Find(&results).Statement
sqlString := statement.SQL.String()
sqlParams := statement.Vars
//assert
require.NoError(suite.T(), err)
require.Equal(suite.T(),
strings.Join([]string{
"SELECT (codeJson.value ->> '$.code') as label, count(*) as value",
"FROM fhir_observation as fhir, json_each(fhir.code) as codeJson",
"WHERE ((codeJson.value ->> '$.code' = ?)) AND (user_id = ?)",
"GROUP BY (codeJson.value ->> '$.code')",
"ORDER BY count(*) DESC",
}, " "), sqlString)
require.Equal(suite.T(), sqlParams, []interface{}{
"test_code", "00000000-0000-0000-0000-000000000000",
})
}
func (suite *RepositorySqlTestSuite) TestQueryResources_SQL_WithComplexGroupByWithOrderByMaxFnAggregation() {
//setup
sqliteRepo := suite.TestRepository.(*SqliteRepository)
sqliteRepo.GormClient = sqliteRepo.GormClient.Session(&gorm.Session{DryRun: true})
//test
authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username")
sqlQuery, err := sqliteRepo.sqlQueryResources(authContext, models.QueryResource{
Select: []string{},
Where: map[string]interface{}{
"code": "test_code",
},
From: "Observation",
Aggregations: &models.QueryResourceAggregations{
GroupBy: &models.QueryResourceAggregation{Field: "code:code"},
OrderBy: &models.QueryResourceAggregation{Field: "sort_date", Function: "max"},
},
})
require.NoError(suite.T(), err)
var results []map[string]interface{}
statement := sqlQuery.Find(&results).Statement
sqlString := statement.SQL.String()
sqlParams := statement.Vars
//assert
require.NoError(suite.T(), err)
require.Equal(suite.T(),
strings.Join([]string{
"SELECT (codeJson.value ->> '$.code') as label, max(fhir.sort_date) as value",
"FROM fhir_observation as fhir, json_each(fhir.code) as codeJson",
"WHERE ((codeJson.value ->> '$.code' = ?)) AND (user_id = ?)",
"GROUP BY (codeJson.value ->> '$.code')",
"ORDER BY max(fhir.sort_date) DESC",
}, " "), sqlString)
require.Equal(suite.T(), sqlParams, []interface{}{
"test_code", "00000000-0000-0000-0000-000000000000",
})
}
func (suite *RepositorySqlTestSuite) TestQueryResources_SQL_WithTokenGroupByNoModifier() {
//setup
sqliteRepo := suite.TestRepository.(*SqliteRepository)
sqliteRepo.GormClient = sqliteRepo.GormClient.Session(&gorm.Session{DryRun: true})
//test
authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username")
sqlQuery, err := sqliteRepo.sqlQueryResources(authContext, models.QueryResource{
Select: []string{},
Where: map[string]interface{}{},
From: "Observation",
Aggregations: &models.QueryResourceAggregations{
GroupBy: &models.QueryResourceAggregation{Field: "code"},
},
})
require.NoError(suite.T(), err)
var results []map[string]interface{}
statement := sqlQuery.Find(&results).Statement
sqlString := statement.SQL.String()
sqlParams := statement.Vars
//assert
require.NoError(suite.T(), err)
require.Equal(suite.T(),
strings.Join([]string{
"SELECT ((codeJson.value ->> '$.system') || '|' || (codeJson.value ->> '$.code')) as label, count(*) as value",
"FROM fhir_observation as fhir, json_each(fhir.code) as codeJson",
"WHERE (user_id = ?)",
"GROUP BY ((codeJson.value ->> '$.system') || '|' || (codeJson.value ->> '$.code'))",
"ORDER BY count(*) DESC",
}, " "), sqlString)
require.Equal(suite.T(), sqlParams, []interface{}{
"00000000-0000-0000-0000-000000000000",
})
}
func (suite *RepositorySqlTestSuite) TestQueryResources_SQL_WithTokenGroupByNoModifierWithLimit() {
//setup
sqliteRepo := suite.TestRepository.(*SqliteRepository)
sqliteRepo.GormClient = sqliteRepo.GormClient.Session(&gorm.Session{DryRun: true})
//test
authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username")
limit := 10
sqlQuery, err := sqliteRepo.sqlQueryResources(authContext, models.QueryResource{
Select: []string{},
Where: map[string]interface{}{},
From: "Observation",
Limit: &limit,
Aggregations: &models.QueryResourceAggregations{
GroupBy: &models.QueryResourceAggregation{Field: "code"},
},
})
require.NoError(suite.T(), err)
var results []map[string]interface{}
statement := sqlQuery.Find(&results).Statement
sqlString := statement.SQL.String()
sqlParams := statement.Vars
//assert
require.NoError(suite.T(), err)
require.Equal(suite.T(),
strings.Join([]string{
"SELECT ((codeJson.value ->> '$.system') || '|' || (codeJson.value ->> '$.code')) as label, count(*) as value",
"FROM fhir_observation as fhir, json_each(fhir.code) as codeJson",
"WHERE (user_id = ?)",
"GROUP BY ((codeJson.value ->> '$.system') || '|' || (codeJson.value ->> '$.code'))",
"ORDER BY count(*) DESC",
"LIMIT 10",
}, " "), sqlString)
require.Equal(suite.T(), sqlParams, []interface{}{
"00000000-0000-0000-0000-000000000000",
})
}

View File

@ -1,304 +0,0 @@
package database
import (
"context"
"github.com/fastenhealth/fasten-onprem/backend/pkg"
mock_config "github.com/fastenhealth/fasten-onprem/backend/pkg/config/mock"
"github.com/fastenhealth/fasten-onprem/backend/pkg/event_bus"
"github.com/fastenhealth/fasten-onprem/backend/pkg/models"
"github.com/sirupsen/logrus"
"github.com/stretchr/testify/require"
"gorm.io/gorm"
"strings"
"testing"
"time"
)
// mimic tests from https://hl7.org/fhir/r4/search.html#token
func TestProcessSearchParameter(t *testing.T) {
//setup
t.Parallel()
var processSearchParameterTests = []struct {
searchParameterWithModifier string // input
searchParameterLookup map[string]string // input (allowed search parameters)
expected SearchParameter
expectedError bool // expected result
}{
{"test", map[string]string{"test": "string"}, SearchParameter{Type: "string", Name: "test", Modifier: ""}, false},
{"test:begin", map[string]string{"test": "string"}, SearchParameter{Type: "string", Name: "test", Modifier: "begin"}, false},
{"unknown:doesntmatter", map[string]string{"test": "string"}, SearchParameter{}, true}, //unknown search parameter shoudl throw error
{"unknown", map[string]string{"test": "string"}, SearchParameter{}, true}, //unknown search parameter shoudl throw error
{"test", map[string]string{"test": "faketype"}, SearchParameter{Type: "faketype", Name: "test", Modifier: ""}, false},
{"id", map[string]string{"id": "keyword"}, SearchParameter{Type: "keyword", Name: "id", Modifier: ""}, false},
{"given", map[string]string{"given": "string"}, SearchParameter{Type: "string", Name: "given", Modifier: ""}, false},
{"given:contains", map[string]string{"given": "string"}, SearchParameter{Type: "string", Name: "given", Modifier: "contains"}, false},
{"given:exact", map[string]string{"given": "string"}, SearchParameter{Type: "string", Name: "given", Modifier: "exact"}, false},
{"url:below", map[string]string{"url": "string"}, SearchParameter{Type: "string", Name: "url", Modifier: "below"}, false},
{"url:above", map[string]string{"url": "string"}, SearchParameter{Type: "string", Name: "url", Modifier: "above"}, false},
{"display:text", map[string]string{"display": "token"}, SearchParameter{}, true},
}
//test && assert
for ndx, tt := range processSearchParameterTests {
actual, actualErr := ProcessSearchParameter(tt.searchParameterWithModifier, tt.searchParameterLookup)
if tt.expectedError {
require.Error(t, actualErr, "Expected error but got none for processSearchParameterTests[%d] %s", ndx, tt.searchParameterWithModifier)
} else {
require.NoError(t, actualErr, "Expected no error but got one for processSearchParameterTests[%d] %s", ndx, tt.searchParameterWithModifier)
require.Equal(t, tt.expected, actual)
}
}
}
// mimic tests from https://hl7.org/fhir/r4/search.html#token
func TestProcessSearchParameterValue(t *testing.T) {
//setup
t.Parallel()
var processSearchParameterValueTests = []struct {
searchParameter SearchParameter // input
searchValueWithPrefix string // input (search value)
expected SearchParameterValue
expectedError bool // expected result
}{
{SearchParameter{Type: "number", Name: "probability", Modifier: ""}, "gt0.8", SearchParameterValue{Value: 0.8, Prefix: "gt", SecondaryValues: map[string]interface{}{}}, false},
{SearchParameter{Type: "number", Name: "probability", Modifier: ""}, "100", SearchParameterValue{Value: float64(100), Prefix: "", SecondaryValues: map[string]interface{}{}}, false},
{SearchParameter{Type: "number", Name: "probability", Modifier: ""}, "100.00", SearchParameterValue{Value: float64(100), Prefix: "", SecondaryValues: map[string]interface{}{}}, false},
{SearchParameter{Type: "number", Name: "probability", Modifier: ""}, "1e2", SearchParameterValue{Value: float64(100), Prefix: "", SecondaryValues: map[string]interface{}{}}, false},
{SearchParameter{Type: "number", Name: "probability", Modifier: ""}, "lt100", SearchParameterValue{Value: float64(100), Prefix: "lt", SecondaryValues: map[string]interface{}{}}, false},
{SearchParameter{Type: "number", Name: "probability", Modifier: ""}, "le100", SearchParameterValue{Value: float64(100), Prefix: "le", SecondaryValues: map[string]interface{}{}}, false},
{SearchParameter{Type: "number", Name: "probability", Modifier: ""}, "gt100", SearchParameterValue{Value: float64(100), Prefix: "gt", SecondaryValues: map[string]interface{}{}}, false},
{SearchParameter{Type: "number", Name: "probability", Modifier: ""}, "ge100", SearchParameterValue{Value: float64(100), Prefix: "ge", SecondaryValues: map[string]interface{}{}}, false},
{SearchParameter{Type: "number", Name: "probability", Modifier: ""}, "ne100", SearchParameterValue{Value: float64(100), Prefix: "ne", SecondaryValues: map[string]interface{}{}}, false},
{SearchParameter{Type: "number", Name: "probability", Modifier: ""}, "unknown100", SearchParameterValue{}, true}, //unknown prefix, invalid number error
{SearchParameter{Type: "number", Name: "probability", Modifier: ""}, "", SearchParameterValue{}, true}, //empty string, invalid number error
{SearchParameter{Type: "date", Name: "issueDate", Modifier: ""}, "eq2013-01-14", SearchParameterValue{Value: time.Date(2013, time.January, 14, 0, 0, 0, 0, time.UTC), Prefix: "eq", SecondaryValues: map[string]interface{}{}}, false},
{SearchParameter{Type: "date", Name: "issueDate", Modifier: ""}, "ne2013-01-14", SearchParameterValue{Value: time.Date(2013, time.January, 14, 0, 0, 0, 0, time.UTC), Prefix: "ne", SecondaryValues: map[string]interface{}{}}, false},
{SearchParameter{Type: "date", Name: "issueDate", Modifier: ""}, "lt2013-01-14T10:00:00Z", SearchParameterValue{Value: time.Date(2013, time.January, 14, 10, 0, 0, 0, time.UTC), Prefix: "lt", SecondaryValues: map[string]interface{}{}}, false},
{SearchParameter{Type: "date", Name: "issueDate", Modifier: ""}, "lt2013-01-14T10:00", SearchParameterValue{}, true}, //missing seconds
{SearchParameter{Type: "date", Name: "issueDate", Modifier: ""}, "lt2013-01-14T10:00Z", SearchParameterValue{}, true}, //missing timezone
{SearchParameter{Type: "date", Name: "issueDate", Modifier: ""}, "unknown2013-01-14T10:00:00Z", SearchParameterValue{}, true}, //unkown prefix, causes invalid date error
{SearchParameter{Type: "date", Name: "issueDate", Modifier: ""}, "", SearchParameterValue{}, true}, //empty date, invalid date error
{SearchParameter{Type: "string", Name: "given", Modifier: ""}, "eve", SearchParameterValue{Value: "eve", Prefix: "", SecondaryValues: map[string]interface{}{}}, false},
{SearchParameter{Type: "string", Name: "given", Modifier: "contains"}, "eve", SearchParameterValue{Value: "eve", Prefix: "", SecondaryValues: map[string]interface{}{}}, false},
{SearchParameter{Type: "string", Name: "given", Modifier: "exact"}, "Eve", SearchParameterValue{Value: "Eve", Prefix: "", SecondaryValues: map[string]interface{}{}}, false},
{SearchParameter{Type: "string", Name: "given", Modifier: ""}, "", SearchParameterValue{}, true}, //empty string, invalid string error
{SearchParameter{Type: "uri", Name: "url", Modifier: ""}, "http://acme.org/fhir/ValueSet/123", SearchParameterValue{Value: "http://acme.org/fhir/ValueSet/123", Prefix: "", SecondaryValues: map[string]interface{}{}}, false},
{SearchParameter{Type: "uri", Name: "url", Modifier: "below"}, "http://acme.org/fhir/", SearchParameterValue{Value: "http://acme.org/fhir/", Prefix: "", SecondaryValues: map[string]interface{}{}}, false},
{SearchParameter{Type: "uri", Name: "url", Modifier: "above"}, "http://acme.org/fhir/", SearchParameterValue{Value: "http://acme.org/fhir/", Prefix: "", SecondaryValues: map[string]interface{}{}}, false},
{SearchParameter{Type: "uri", Name: "url", Modifier: ""}, "urn:oid:1.2.3.4.5", SearchParameterValue{Value: "urn:oid:1.2.3.4.5", Prefix: "", SecondaryValues: map[string]interface{}{}}, false},
{SearchParameter{Type: "uri", Name: "url", Modifier: ""}, "", SearchParameterValue{}, true}, //emtpy uri, invalid uri error
{SearchParameter{Type: "token", Name: "identifier", Modifier: ""}, "http://acme.org/patient|2345", SearchParameterValue{Value: "2345", Prefix: "", SecondaryValues: map[string]interface{}{"identifierSystem": "http://acme.org/patient"}}, false},
{SearchParameter{Type: "token", Name: "gender", Modifier: ""}, "male", SearchParameterValue{Value: "male", Prefix: "", SecondaryValues: map[string]interface{}{}}, false},
{SearchParameter{Type: "token", Name: "gender", Modifier: "not"}, "male", SearchParameterValue{Value: "male", Prefix: "", SecondaryValues: map[string]interface{}{}}, false},
{SearchParameter{Type: "token", Name: "section", Modifier: "not"}, "48765-2", SearchParameterValue{Value: "48765-2", Prefix: "", SecondaryValues: map[string]interface{}{}}, false},
{SearchParameter{Type: "token", Name: "active", Modifier: ""}, "true", SearchParameterValue{Value: "true", Prefix: "", SecondaryValues: map[string]interface{}{}}, false},
{SearchParameter{Type: "token", Name: "code", Modifier: ""}, "http://acme.org/conditions/codes|ha125", SearchParameterValue{Value: "ha125", Prefix: "", SecondaryValues: map[string]interface{}{"codeSystem": "http://acme.org/conditions/codes"}}, false},
{SearchParameter{Type: "token", Name: "code", Modifier: ""}, "ha125", SearchParameterValue{Value: "ha125", Prefix: "", SecondaryValues: map[string]interface{}{}}, false},
{SearchParameter{Type: "token", Name: "identifier", Modifier: "otype"}, "http://terminology.hl7.org/CodeSystem/v2-0203|MR|446053", SearchParameterValue{Value: "MR|446053", Prefix: "", SecondaryValues: map[string]interface{}{"identifierSystem": "http://terminology.hl7.org/CodeSystem/v2-0203"}}, false},
{SearchParameter{Type: "token", Name: "code", Modifier: ""}, "|", SearchParameterValue{}, true}, //empty value should throw an error
{SearchParameter{Type: "token", Name: "code", Modifier: ""}, "", SearchParameterValue{}, true}, //empty value should throw an error
{SearchParameter{Type: "token", Name: "code", Modifier: ""}, "http://acme.org/conditions/codes|", SearchParameterValue{Value: "", Prefix: "", SecondaryValues: map[string]interface{}{"codeSystem": "http://acme.org/conditions/codes"}}, false},
{SearchParameter{Type: "token", Name: "code", Modifier: ""}, "|807-1", SearchParameterValue{Value: "807-1", Prefix: "", SecondaryValues: map[string]interface{}{"codeSystem": ""}}, false},
{SearchParameter{Type: "quantity", Name: "valueQuantity", Modifier: ""}, "5.4|http://unitsofmeasure.org|mg", SearchParameterValue{Value: float64(5.4), Prefix: "", SecondaryValues: map[string]interface{}{"valueQuantitySystem": "http://unitsofmeasure.org", "valueQuantityCode": "mg"}}, false},
{SearchParameter{Type: "quantity", Name: "valueQuantity", Modifier: ""}, "5.40e-3|http://unitsofmeasure.org|g", SearchParameterValue{Value: float64(0.0054), Prefix: "", SecondaryValues: map[string]interface{}{"valueQuantitySystem": "http://unitsofmeasure.org", "valueQuantityCode": "g"}}, false},
{SearchParameter{Type: "quantity", Name: "valueQuantity", Modifier: ""}, "5.4||mg", SearchParameterValue{Value: float64(5.4), Prefix: "", SecondaryValues: map[string]interface{}{"valueQuantityCode": "mg"}}, false},
{SearchParameter{Type: "quantity", Name: "valueQuantity", Modifier: ""}, "5.4", SearchParameterValue{Value: float64(5.4), Prefix: "", SecondaryValues: map[string]interface{}{}}, false},
{SearchParameter{Type: "quantity", Name: "valueQuantity", Modifier: ""}, "le5.4|http://unitsofmeasure.org|mg", SearchParameterValue{Value: float64(5.4), Prefix: "le", SecondaryValues: map[string]interface{}{"valueQuantitySystem": "http://unitsofmeasure.org", "valueQuantityCode": "mg"}}, false},
{SearchParameter{Type: "quantity", Name: "valueQuantity", Modifier: ""}, "ap5.4|http://unitsofmeasure.org|mg", SearchParameterValue{Value: float64(5.4), Prefix: "ap", SecondaryValues: map[string]interface{}{"valueQuantitySystem": "http://unitsofmeasure.org", "valueQuantityCode": "mg"}}, false},
{SearchParameter{Type: "quantity", Name: "valueQuantity", Modifier: ""}, "unknown5.4", SearchParameterValue{}, true}, //unknown prefix, causes invalid number
{SearchParameter{Type: "quantity", Name: "valueQuantity", Modifier: ""}, "ap5.4|http://unitsofmeasure.org|mg|additional", SearchParameterValue{Value: float64(5.4), Prefix: "ap", SecondaryValues: map[string]interface{}{"valueQuantitySystem": "http://unitsofmeasure.org", "valueQuantityCode": "mg|additional"}}, false},
{SearchParameter{Type: "quantity", Name: "valueQuantity", Modifier: ""}, "5.4||", SearchParameterValue{Value: float64(5.4), Prefix: "", SecondaryValues: map[string]interface{}{}}, false},
{SearchParameter{Type: "quantity", Name: "valueQuantity", Modifier: ""}, "", SearchParameterValue{}, true},
{SearchParameter{Type: "keyword", Name: "id", Modifier: ""}, "1234", SearchParameterValue{Value: "1234", SecondaryValues: map[string]interface{}{}}, false},
}
//test && assert
for ndx, tt := range processSearchParameterValueTests {
actual, actualErr := ProcessSearchParameterValue(tt.searchParameter, tt.searchValueWithPrefix)
if tt.expectedError {
require.Error(t, actualErr, "Expected error but got none for processSearchParameterValueTests[%d] %s=%s", ndx, tt.searchParameter.Name, tt.searchValueWithPrefix)
} else {
require.NoError(t, actualErr, "Expected no error but got one for processSearchParameterValueTests[%d] %s", ndx, tt.searchParameter.Name, tt.searchValueWithPrefix)
require.Equal(t, tt.expected, actual)
}
}
}
func TestSearchCodeToWhereClause(t *testing.T) {
//setup
var searchCodeToWhereClauseTests = []struct {
searchParameter SearchParameter
searchValue SearchParameterValue
searchLevelSuffix string
expectedClause string
expectedNamedParams map[string]interface{}
expectedError bool
}{
{SearchParameter{Type: "number", Name: "probability", Modifier: ""}, SearchParameterValue{Value: float64(100), Prefix: "gt", SecondaryValues: map[string]interface{}{}}, "0_0", "(probability > @probability_0_0)", map[string]interface{}{"probability_0_0": float64(100)}, false},
{SearchParameter{Type: "date", Name: "issueDate", Modifier: ""}, SearchParameterValue{Value: time.Date(2013, time.January, 14, 10, 0, 0, 0, time.UTC), Prefix: "lt", SecondaryValues: map[string]interface{}{}}, "1_1", "(issueDate < @issueDate_1_1)", map[string]interface{}{"issueDate_1_1": time.Date(2013, time.January, 14, 10, 0, 0, 0, time.UTC)}, false},
{SearchParameter{Type: "string", Name: "given", Modifier: ""}, SearchParameterValue{Value: "eve", Prefix: "", SecondaryValues: map[string]interface{}{}}, "0_0", "(givenJson.value LIKE @given_0_0)", map[string]interface{}{"given_0_0": "eve%"}, false},
{SearchParameter{Type: "string", Name: "given", Modifier: "contains"}, SearchParameterValue{Value: "eve", Prefix: "", SecondaryValues: map[string]interface{}{}}, "0_0", "(givenJson.value LIKE @given_0_0)", map[string]interface{}{"given_0_0": "%eve%"}, false},
{SearchParameter{Type: "string", Name: "given", Modifier: "exact"}, SearchParameterValue{Value: "eve", Prefix: "", SecondaryValues: map[string]interface{}{}}, "0_0", "(givenJson.value = @given_0_0)", map[string]interface{}{"given_0_0": "eve"}, false},
{SearchParameter{Type: "uri", Name: "url", Modifier: "below"}, SearchParameterValue{Value: "http://acme.org/fhir/", Prefix: "", SecondaryValues: map[string]interface{}{}}, "0_0", "(url LIKE @url_0_0)", map[string]interface{}{"url_0_0": "http://acme.org/fhir/%"}, false},
{SearchParameter{Type: "uri", Name: "url", Modifier: "above"}, SearchParameterValue{Value: "http://acme.org/fhir/", Prefix: "", SecondaryValues: map[string]interface{}{}}, "0_0", "", map[string]interface{}{}, true}, //above modifier not supported
{SearchParameter{Type: "quantity", Name: "valueQuantity", Modifier: ""}, SearchParameterValue{Value: float64(5.4), Prefix: "", SecondaryValues: map[string]interface{}{"valueQuantityCode": "mg"}}, "0_0", "(valueQuantityJson.value ->> '$.value' = @valueQuantity_0_0 AND valueQuantityJson.value ->> '$.code' = @valueQuantityCode_0_0)", map[string]interface{}{"valueQuantity_0_0": float64(5.4), "valueQuantityCode_0_0": "mg"}, false},
{SearchParameter{Type: "quantity", Name: "valueQuantity", Modifier: ""}, SearchParameterValue{Value: float64(5.4), Prefix: "", SecondaryValues: map[string]interface{}{}}, "0_0", "(valueQuantityJson.value ->> '$.value' = @valueQuantity_0_0)", map[string]interface{}{"valueQuantity_0_0": float64(5.4)}, false},
{SearchParameter{Type: "quantity", Name: "valueQuantity", Modifier: ""}, SearchParameterValue{Value: float64(5.4), Prefix: "le", SecondaryValues: map[string]interface{}{"valueQuantitySystem": "http://unitsofmeasure.org", "valueQuantityCode": "mg"}}, "0_0", "(valueQuantityJson.value ->> '$.value' <= @valueQuantity_0_0 AND valueQuantityJson.value ->> '$.code' = @valueQuantityCode_0_0 AND valueQuantityJson.value ->> '$.system' = @valueQuantitySystem_0_0)", map[string]interface{}{"valueQuantity_0_0": float64(5.4), "valueQuantitySystem_0_0": "http://unitsofmeasure.org", "valueQuantityCode_0_0": "mg"}, false},
{SearchParameter{Type: "quantity", Name: "valueQuantity", Modifier: ""}, SearchParameterValue{Value: float64(5.4), Prefix: "ap", SecondaryValues: map[string]interface{}{"valueQuantitySystem": "http://unitsofmeasure.org", "valueQuantityCode": "mg"}}, "0_0", "", map[string]interface{}{}, true}, //ap modifier not supported
{SearchParameter{Type: "quantity", Name: "valueQuantity", Modifier: ""}, SearchParameterValue{Value: float64(5.4), Prefix: "ne", SecondaryValues: map[string]interface{}{"valueQuantitySystem": "http://unitsofmeasure.org", "valueQuantityCode": "mg"}}, "0_0", "(valueQuantityJson.value ->> '$.value' <> @valueQuantity_0_0 AND valueQuantityJson.value ->> '$.code' = @valueQuantityCode_0_0 AND valueQuantityJson.value ->> '$.system' = @valueQuantitySystem_0_0)", map[string]interface{}{"valueQuantity_0_0": float64(5.4), "valueQuantitySystem_0_0": "http://unitsofmeasure.org", "valueQuantityCode_0_0": "mg"}, false},
{SearchParameter{Type: "token", Name: "code", Modifier: ""}, SearchParameterValue{Value: "ha125", Prefix: "", SecondaryValues: map[string]interface{}{"codeSystem": "http://acme.org/conditions/codes"}}, "0_0", "(codeJson.value ->> '$.code' = @code_0_0 AND codeJson.value ->> '$.system' = @codeSystem_0_0)", map[string]interface{}{"code_0_0": "ha125", "codeSystem_0_0": "http://acme.org/conditions/codes"}, false},
{SearchParameter{Type: "token", Name: "code", Modifier: ""}, SearchParameterValue{Value: "ha125", Prefix: "", SecondaryValues: map[string]interface{}{}}, "0_0", "(codeJson.value ->> '$.code' = @code_0_0)", map[string]interface{}{"code_0_0": "ha125"}, false},
{SearchParameter{Type: "token", Name: "identifier", Modifier: "otype"}, SearchParameterValue{Value: "MR|446053", Prefix: "", SecondaryValues: map[string]interface{}{"identifierSystem": "http://terminology.hl7.org/CodeSystem/v2-0203"}}, "0_0", "(identifierJson.value ->> '$.code' = @identifier_0_0 AND identifierJson.value ->> '$.system' = @identifierSystem_0_0)", map[string]interface{}{"identifier_0_0": "MR|446053", "identifierSystem_0_0": "http://terminology.hl7.org/CodeSystem/v2-0203"}, false},
{SearchParameter{Type: "keyword", Name: "id", Modifier: ""}, SearchParameterValue{Value: "1234", Prefix: "", SecondaryValues: map[string]interface{}{}}, "0_0", "(id = @id_0_0)", map[string]interface{}{"id_0_0": "1234"}, false},
}
//test && assert
for ndx, tt := range searchCodeToWhereClauseTests {
actualClause, actualNamedParams, actualErr := SearchCodeToWhereClause(tt.searchParameter, tt.searchValue, tt.searchLevelSuffix)
if tt.expectedError {
require.Error(t, actualErr, "Expected error but got none for searchCodeToWhereClauseTests[%d] %s=%s", ndx, tt.searchParameter.Name, tt.searchValue.Value)
} else {
require.NoError(t, actualErr, "Expected no error but got one for searchCodeToWhereClauseTests[%d] %s=%s", ndx, tt.searchParameter.Name, tt.searchValue.Value)
require.Equal(t, tt.expectedClause, actualClause)
require.Equal(t, tt.expectedNamedParams, actualNamedParams)
}
}
}
// TODO
func TestSearchCodeToFromClause(t *testing.T) {
//setup
var searchCodeToFromClauseTests = []struct {
searchParameter SearchParameter
expectedClause string
expectedError bool
}{
{SearchParameter{Type: "number", Name: "probability", Modifier: ""}, "", false},
{SearchParameter{Type: "date", Name: "issueDate", Modifier: ""}, "", false},
{SearchParameter{Type: "keyword", Name: "id", Modifier: ""}, "", false},
{SearchParameter{Type: "token", Name: "hello", Modifier: ""}, "json_each(fhir.hello) as helloJson", false},
}
//test && assert
for ndx, tt := range searchCodeToFromClauseTests {
actualClause, actualErr := SearchCodeToFromClause(tt.searchParameter)
if tt.expectedError {
require.Error(t, actualErr, "Expected error but got none for searchCodeToFromClauseTests[%d] %s", ndx, tt.searchParameter.Name)
} else {
require.NoError(t, actualErr, "Expected no error but got one for searchCodeToFromClauseTests[%d] %s", ndx, tt.searchParameter.Name)
require.Equal(t, tt.expectedClause, actualClause)
}
}
}
//Aggregation tests
// mimic tests from https://hl7.org/fhir/r4/search.html#token
func TestProcessAggregationParameter(t *testing.T) {
//setup
t.Parallel()
var processSearchParameterTests = []struct {
aggregationFieldWithFn models.QueryResourceAggregation // input
searchParameterLookup map[string]string // input (allowed search parameters)
expected AggregationParameter
expectedError bool // expected result
}{
//primitive types
{models.QueryResourceAggregation{Field: "test"}, map[string]string{"test": "keyword"}, AggregationParameter{SearchParameter: SearchParameter{Type: "keyword", Name: "test", Modifier: ""}}, false},
{models.QueryResourceAggregation{Field: "test"}, map[string]string{"test": "number"}, AggregationParameter{SearchParameter: SearchParameter{Type: "number", Name: "test", Modifier: ""}}, false},
{models.QueryResourceAggregation{Field: "test"}, map[string]string{"test": "uri"}, AggregationParameter{SearchParameter: SearchParameter{Type: "uri", Name: "test", Modifier: ""}}, false},
{models.QueryResourceAggregation{Field: "test"}, map[string]string{"test": "date"}, AggregationParameter{SearchParameter: SearchParameter{Type: "date", Name: "test", Modifier: ""}}, false},
{models.QueryResourceAggregation{Field: "test:hello"}, map[string]string{"test": "keyword"}, AggregationParameter{SearchParameter: SearchParameter{Type: "date", Name: "test", Modifier: ""}}, true}, //cannot have a modifier
{models.QueryResourceAggregation{Field: "test:hello"}, map[string]string{"test": "number"}, AggregationParameter{SearchParameter: SearchParameter{Type: "date", Name: "test", Modifier: ""}}, true}, //cannot have a modifier
{models.QueryResourceAggregation{Field: "test:hello"}, map[string]string{"test": "uri"}, AggregationParameter{SearchParameter: SearchParameter{Type: "date", Name: "test", Modifier: ""}}, true}, //cannot have a modifier
{models.QueryResourceAggregation{Field: "test:hello"}, map[string]string{"test": "date"}, AggregationParameter{SearchParameter: SearchParameter{Type: "date", Name: "test", Modifier: ""}}, true}, //cannot have a modifier
//complex types
{models.QueryResourceAggregation{Field: "test"}, map[string]string{"test": "reference"}, AggregationParameter{SearchParameter: SearchParameter{Type: "reference", Name: "test", Modifier: ""}}, true}, //complex types should throw an error when missing modifier
{models.QueryResourceAggregation{Field: "test"}, map[string]string{"test": "string"}, AggregationParameter{SearchParameter: SearchParameter{Type: "string", Name: "test", Modifier: ""}}, true}, //complex types should throw an error when missing modifier
{models.QueryResourceAggregation{Field: "test"}, map[string]string{"test": "quantity"}, AggregationParameter{SearchParameter: SearchParameter{Type: "quantity", Name: "test", Modifier: ""}}, true}, //complex types should throw an error when missing modifier
{models.QueryResourceAggregation{Field: "test:hello"}, map[string]string{"test": "reference"}, AggregationParameter{SearchParameter: SearchParameter{Type: "reference", Name: "test", Modifier: "hello"}}, false},
{models.QueryResourceAggregation{Field: "test:hello"}, map[string]string{"test": "string"}, AggregationParameter{SearchParameter: SearchParameter{Type: "string", Name: "test", Modifier: "hello"}}, false},
{models.QueryResourceAggregation{Field: "test:hello"}, map[string]string{"test": "quantity"}, AggregationParameter{SearchParameter: SearchParameter{Type: "quantity", Name: "test", Modifier: "hello"}}, false},
//token type
{models.QueryResourceAggregation{Field: "code"}, map[string]string{"code": "token"}, AggregationParameter{SearchParameter: SearchParameter{Type: "token", Name: "code", Modifier: ""}}, false},
{models.QueryResourceAggregation{Field: "code:code"}, map[string]string{"code": "token"}, AggregationParameter{SearchParameter: SearchParameter{Type: "token", Name: "code", Modifier: "code"}}, false},
}
//test && assert
for ndx, tt := range processSearchParameterTests {
actual, actualErr := ProcessAggregationParameter(tt.aggregationFieldWithFn, tt.searchParameterLookup)
if tt.expectedError {
require.Error(t, actualErr, "Expected error but got none for processAggregationParameterTests[%d] %s", ndx, tt.aggregationFieldWithFn)
} else {
require.NoError(t, actualErr, "Expected no error but got one for processAggregationParameterTests[%d] %s", ndx, tt.aggregationFieldWithFn)
require.Equal(t, tt.expected, actual)
}
}
}
func (suite *RepositoryTestSuite) TestQueryResources_SQL() {
//setup
fakeConfig := mock_config.NewMockInterface(suite.MockCtrl)
fakeConfig.EXPECT().GetString("database.location").Return(suite.TestDatabase.Name()).AnyTimes()
fakeConfig.EXPECT().GetString("database.type").Return("sqlite").AnyTimes()
fakeConfig.EXPECT().GetString("log.level").Return("INFO").AnyTimes()
dbRepo, err := NewRepository(fakeConfig, logrus.WithField("test", suite.T().Name()), event_bus.NewNoopEventBusServer())
require.NoError(suite.T(), err)
userModel := &models.User{
Username: "test_username",
Password: "testpassword",
Email: "test@test.com",
}
err = dbRepo.CreateUser(context.Background(), userModel)
require.NoError(suite.T(), err)
sqliteRepo := dbRepo.(*SqliteRepository)
sqliteRepo.GormClient = sqliteRepo.GormClient.Session(&gorm.Session{DryRun: true})
//test
authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username")
sqlQuery, err := sqliteRepo.sqlQueryResources(authContext, models.QueryResource{
Select: []string{},
Where: map[string]interface{}{
"code": "test_code",
},
From: "Observation",
})
require.NoError(suite.T(), err)
var results []map[string]interface{}
statement := sqlQuery.Find(&results).Statement
sqlString := statement.SQL.String()
sqlParams := statement.Vars
//assert
require.NoError(suite.T(), err)
require.Equal(suite.T(), sqlString,
strings.Join([]string{
"SELECT fhir.*",
"FROM fhir_observation as fhir, json_each(fhir.code) as codeJson",
"WHERE ((codeJson.value ->> '$.code' = ?)) AND (user_id = ?) GROUP BY `fhir`.`id`",
"ORDER BY fhir.sort_date DESC"}, " "))
require.Equal(suite.T(), sqlParams, []interface{}{
"test_code", "00000000-0000-0000-0000-000000000000",
})
}

View File

@ -1,97 +0,0 @@
package database
import (
"context"
"fmt"
"github.com/fastenhealth/fasten-onprem/backend/pkg/models"
"github.com/google/uuid"
)
// LoadSettings will retrieve settings from the database, store them in the AppConfig object, and return a Settings struct
func (sr *SqliteRepository) LoadUserSettings(ctx context.Context) (*models.UserSettings, error) {
currentUser, currentUserErr := sr.GetCurrentUser(ctx)
if currentUserErr != nil {
return nil, currentUserErr
}
settingsEntries := []models.UserSettingEntry{}
if err := sr.GormClient.
WithContext(ctx).
Where(models.UserSettingEntry{
UserID: currentUser.ID,
}).
Find(&settingsEntries).Error; err != nil {
return nil, fmt.Errorf("Could not get settings from DB: %v", err)
}
settings := models.UserSettings{}
for _, settingsEntry := range settingsEntries {
err := settings.FromUserSettingsEntry(&settingsEntry)
if err != nil {
return nil, fmt.Errorf("Could not get settings from DB: %v", err)
}
}
return &settings, nil
}
// testing
// curl -d '{"metrics": { "notify_level": 5, "status_filter_attributes": 5, "status_threshold": 5 }}' -H "Content-Type: application/json" -X POST http://localhost:9090/api/settings
// SaveSettings will update settings in AppConfig object, then save the settings to the database.
func (sr *SqliteRepository) SaveUserSettings(ctx context.Context, newSettings *models.UserSettings) error {
currentUser, currentUserErr := sr.GetCurrentUser(ctx)
if currentUserErr != nil {
return currentUserErr
}
//retrieve current settings from the database
currentSettingsEntries := []models.UserSettingEntry{}
if err := sr.GormClient.
WithContext(ctx).
Where(models.UserSettingEntry{
UserID: currentUser.ID,
}).
Find(&currentSettingsEntries).Error; err != nil {
return fmt.Errorf("Could not get settings from DB: %v", err)
}
//update settingsEntries
newSettingsEntries, err := newSettings.ToUserSettingsEntry(currentSettingsEntries)
if err != nil {
return fmt.Errorf("merge new settings with DB: %v", err)
}
for ndx, settingsEntry := range newSettingsEntries {
// store in database.
//TODO: this should be `sr.gormClient.Updates(&settingsEntries).Error`
err := sr.GormClient.
WithContext(ctx).
Model(&models.UserSettingEntry{}).
Where([]uuid.UUID{settingsEntry.ID}).
Select("setting_value_numeric", "setting_value_string", "setting_value_bool", "setting_value_array").
Updates(newSettingsEntries[ndx]).Error
if err != nil {
return err
}
}
return nil
}
func (sr *SqliteRepository) PopulateDefaultUserSettings(ctx context.Context, userId uuid.UUID) error {
//retrieve current settings from the database
settingsEntries := []models.UserSettingEntry{}
settingsEntries = append(settingsEntries, models.UserSettingEntry{
UserID: userId,
SettingKeyName: "dashboard_locations",
SettingKeyDescription: "remote dashboard locations (github gists)",
SettingDataType: "array",
SettingValueArray: []string{},
})
return sr.GormClient.WithContext(ctx).Create(settingsEntries).Error
}

View File

@ -1,104 +0,0 @@
package database
import (
"context"
"fmt"
"github.com/fastenhealth/fasten-onprem/backend/pkg"
"github.com/fastenhealth/fasten-onprem/backend/pkg/config"
"github.com/fastenhealth/fasten-onprem/backend/pkg/event_bus"
"github.com/fastenhealth/fasten-onprem/backend/pkg/models"
"github.com/sirupsen/logrus"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"io/ioutil"
"log"
"os"
"testing"
)
// Define the suite, and absorb the built-in basic suite
// functionality from testify - including a T() method which
// returns the current testing context
type RepositorySettingsTestSuite struct {
suite.Suite
TestDatabase *os.File
TestConfig config.Interface
TestRepository DatabaseRepository
TestUser *models.User
}
// BeforeTest has a function to be executed right before the test starts and receives the suite and test names as input
func (suite *RepositorySettingsTestSuite) BeforeTest(suiteName, testName string) {
dbFile, err := ioutil.TempFile("", fmt.Sprintf("%s.*.db", testName))
if err != nil {
log.Fatal(err)
}
suite.TestDatabase = dbFile
testConfig, err := config.Create()
require.NoError(suite.T(), err)
testConfig.SetDefault("database.location", suite.TestDatabase.Name())
testConfig.SetDefault("log.level", "INFO")
suite.TestConfig = testConfig
dbRepo, err := NewRepository(testConfig, logrus.WithField("test", suite.T().Name()), event_bus.NewNoopEventBusServer())
require.NoError(suite.T(), err)
suite.TestRepository = dbRepo
userModel := &models.User{
Username: "test_username",
Password: "testpassword",
Email: "test@test.com",
}
err = suite.TestRepository.CreateUser(context.Background(), userModel)
suite.TestUser = userModel
require.NoError(suite.T(), err)
}
// AfterTest has a function to be executed right after the test finishes and receives the suite and test names as input
func (suite *RepositorySettingsTestSuite) AfterTest(suiteName, testName string) {
os.Remove(suite.TestDatabase.Name())
}
// In order for 'go test' to run this suite, we need to create
// a normal test function and pass our suite to suite.Run
func TestRepositorySettingsTestSuite(t *testing.T) {
suite.Run(t, new(RepositorySettingsTestSuite))
}
func (suite *RepositorySettingsTestSuite) TestLoadUserSettings() {
//setup
authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username")
//test
userSettings, err := suite.TestRepository.LoadUserSettings(authContext)
require.NoError(suite.T(), err)
//assert
require.Equal(suite.T(), userSettings, &models.UserSettings{
DashboardLocations: []string{},
})
}
func (suite *RepositorySettingsTestSuite) TestSaveUserSettings() {
//setup
authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username")
//test
err := suite.TestRepository.SaveUserSettings(authContext, &models.UserSettings{
DashboardLocations: []string{"https://gist.github.com/AnalogJ/a56ded05cc6766b377268f14719cb84d"},
})
require.NoError(suite.T(), err)
userSettings, err := suite.TestRepository.LoadUserSettings(authContext)
require.NoError(suite.T(), err)
//assert
require.Equal(suite.T(), userSettings, &models.UserSettings{
DashboardLocations: []string{
"https://gist.github.com/AnalogJ/a56ded05cc6766b377268f14719cb84d",
},
})
}

File diff suppressed because it is too large Load Diff