diff --git a/backend/pkg/database/factory.go b/backend/pkg/database/factory.go index e0c0a483..7575d44b 100644 --- a/backend/pkg/database/factory.go +++ b/backend/pkg/database/factory.go @@ -12,6 +12,8 @@ func NewRepository(appConfig config.Interface, globalLogger logrus.FieldLogger, switch pkg.DatabaseRepositoryType(appConfig.GetString("database.type")) { case pkg.DatabaseRepositoryTypeSqlite: return newSqliteRepository(appConfig, globalLogger, eventBus) + case pkg.DatabaseRepositoryTypePostgres: + return newPostgresRepository(appConfig, globalLogger, eventBus) default: return nil, errors.DatabaseTypeNotSupportedError(appConfig.GetString("database.type")) } diff --git a/backend/pkg/database/gorm_common.go b/backend/pkg/database/gorm_common.go new file mode 100644 index 00000000..b892993f --- /dev/null +++ b/backend/pkg/database/gorm_common.go @@ -0,0 +1,1117 @@ +package database + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "net/url" + "strings" + "time" + + "github.com/fastenhealth/fasten-onprem/backend/pkg" + "github.com/fastenhealth/fasten-onprem/backend/pkg/config" + "github.com/fastenhealth/fasten-onprem/backend/pkg/event_bus" + "github.com/fastenhealth/fasten-onprem/backend/pkg/models" + databaseModel "github.com/fastenhealth/fasten-onprem/backend/pkg/models/database" + "github.com/fastenhealth/fasten-onprem/backend/pkg/utils" + sourceModel "github.com/fastenhealth/fasten-sources/clients/models" + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "github.com/sirupsen/logrus" + "gorm.io/datatypes" + "gorm.io/gorm" +) + +type GormRepository struct { + AppConfig config.Interface + Logger logrus.FieldLogger + + GormClient *gorm.DB + + EventBus event_bus.Interface +} + +func (gr *GormRepository) Migrate() error { + err := gr.GormClient.AutoMigrate( + &models.User{}, + &models.SourceCredential{}, + &models.BackgroundJob{}, + &models.Glossary{}, + &models.UserSettingEntry{}, + ) + if err != nil { + return fmt.Errorf("Failed to automigrate! - %v", err) + } + return nil +} + +func (gr *GormRepository) Close() error { + return nil +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// +// User +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +func (gr *GormRepository) CreateUser(ctx context.Context, user *models.User) error { + if err := user.HashPassword(user.Password); err != nil { + return err + } + record := gr.GormClient.Create(user) + if record.Error != nil { + return record.Error + } + + //create user settings + err := gr.PopulateDefaultUserSettings(ctx, user.ID) + if err != nil { + return err + } + return nil +} +func (gr *GormRepository) GetUserByUsername(ctx context.Context, username string) (*models.User, error) { + var foundUser models.User + result := gr.GormClient.WithContext(ctx).Where(models.User{Username: username}).First(&foundUser) + return &foundUser, result.Error +} + +// TODO: check for error, right now we return a nil which may cause a panic. +// TODO: can we cache the current user? //SECURITY: +func (gr *GormRepository) GetCurrentUser(ctx context.Context) (*models.User, error) { + username := ctx.Value(pkg.ContextKeyTypeAuthUsername) + if username == nil { + ginCtx, ginCtxOk := ctx.(*gin.Context) + if !ginCtxOk { + return nil, fmt.Errorf("could not convert context to gin context") + } + var exists bool + username, exists = ginCtx.Get(pkg.ContextKeyTypeAuthUsername) + if !exists { + return nil, fmt.Errorf("could not extract username from context") + } + } + + var currentUser models.User + usernameStr, usernameStrOk := username.(string) + if !usernameStrOk { + return nil, fmt.Errorf("could not convert username to string: %v", username) + } + + result := gr.GormClient. + WithContext(ctx). + First(¤tUser, map[string]interface{}{"username": usernameStr}) + + if result.Error != nil { + return nil, fmt.Errorf("could not retrieve current user: %v", result.Error) + } + + return ¤tUser, nil +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// +// Glossary +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +func (gr *GormRepository) CreateGlossaryEntry(ctx context.Context, glossaryEntry *models.Glossary) error { + record := gr.GormClient.WithContext(ctx).Create(glossaryEntry) + if record.Error != nil { + return record.Error + } + return nil +} + +func (gr *GormRepository) GetGlossaryEntry(ctx context.Context, code string, codeSystem string) (*models.Glossary, error) { + var foundGlossaryEntry models.Glossary + result := gr.GormClient.WithContext(ctx). + Where(models.Glossary{Code: code, CodeSystem: codeSystem}). + First(&foundGlossaryEntry) + return &foundGlossaryEntry, result.Error +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// +// Summary +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +func (gr *GormRepository) GetSummary(ctx context.Context) (*models.Summary, error) { + currentUser, currentUserErr := gr.GetCurrentUser(ctx) + if currentUserErr != nil { + return nil, currentUserErr + } + + // we want a count of all resources for this user by type + var resourceCountResults []map[string]interface{} + + resourceTypes := databaseModel.GetAllowedResourceTypes() + for _, resourceType := range resourceTypes { + tableName, err := databaseModel.GetTableNameByResourceType(resourceType) + if err != nil { + return nil, err + } + var count int64 + result := gr.GormClient.WithContext(ctx). + Table(tableName). + Where(models.OriginBase{ + UserID: currentUser.ID, + }). + Count(&count) + if result.Error != nil { + return nil, result.Error + } + if count == 0 { + continue //don't add resource counts if the count is 0 + } + resourceCountResults = append(resourceCountResults, map[string]interface{}{ + "resource_type": resourceType, + "count": count, + }) + } + + // we want a list of all sources (when they were last updated) + sources, err := gr.GetSources(ctx) + if err != nil { + return nil, err + } + + // we want the main Patient for each source + patients, err := gr.GetPatientForSources(ctx) + if err != nil { + return nil, err + } + + if resourceCountResults == nil { + resourceCountResults = []map[string]interface{}{} + } + summary := &models.Summary{ + Sources: sources, + ResourceTypeCounts: resourceCountResults, + Patients: patients, + } + + return summary, nil +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// +// Resource +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +// This function will create a new resource if it does not exist, or update an existing resource if it does exist. +// It will also create associations between fhir resources +// This function is called directly by fasten-sources +func (gr *GormRepository) UpsertRawResource(ctx context.Context, sourceCredential sourceModel.SourceCredential, rawResource sourceModel.RawResourceFhir) (bool, error) { + + source := sourceCredential.(*models.SourceCredential) + + //convert from a raw resource (from fasten-sources) to a ResourceFhir (which matches the database models) + wrappedResourceModel := &models.ResourceBase{ + OriginBase: models.OriginBase{ + ModelBase: models.ModelBase{}, + UserID: source.UserID, + SourceID: source.ID, + SourceResourceID: rawResource.SourceResourceID, + SourceResourceType: rawResource.SourceResourceType, + }, + SortTitle: rawResource.SortTitle, + SortDate: rawResource.SortDate, + ResourceRaw: datatypes.JSON(rawResource.ResourceRaw), + RelatedResource: nil, + } + if len(rawResource.SourceUri) > 0 { + wrappedResourceModel.SourceUri = &rawResource.SourceUri + } + + //create associations + //note: we create the association in the related_resources table **before** the model actually exists. + //note: these associations are not reciprocal, (i.e. if Procedure references Location, Location may not reference Procedure) + if rawResource.ReferencedResources != nil && len(rawResource.ReferencedResources) > 0 { + for _, referencedResource := range rawResource.ReferencedResources { + parts := strings.Split(referencedResource, "/") + if len(parts) != 2 { + continue + } + + relatedResource := &models.ResourceBase{ + OriginBase: models.OriginBase{ + SourceID: source.ID, + SourceResourceType: parts[0], + SourceResourceID: parts[1], + }, + RelatedResource: nil, + } + err := gr.AddResourceAssociation( + ctx, + source, + wrappedResourceModel.SourceResourceType, + wrappedResourceModel.SourceResourceID, + source, + relatedResource.SourceResourceType, + relatedResource.SourceResourceID, + ) + if err != nil { + return false, err + } + } + } + + return gr.UpsertResource(ctx, wrappedResourceModel) + +} + +// UpsertResource +// this method will upsert a resource, however it will not create associations. +// UPSERT operation +// - call FindOrCreate +// - check if the resource exists +// - if it does not exist, insert it +// +// - if no error during FindOrCreate && no rows affected (nothing was created) +// - update the resource using Updates operation +func (gr *GormRepository) UpsertResource(ctx context.Context, wrappedResourceModel *models.ResourceBase) (bool, error) { + currentUser, currentUserErr := gr.GetCurrentUser(ctx) + if currentUserErr != nil { + return false, currentUserErr + } + + wrappedResourceModel.UserID = currentUser.ID + cachedResourceRaw := wrappedResourceModel.ResourceRaw + + gr.Logger.Infof("insert/update FHIRResource (%v) %v", wrappedResourceModel.SourceResourceType, wrappedResourceModel.SourceResourceID) + wrappedFhirResourceModel, err := databaseModel.NewFhirResourceModelByType(wrappedResourceModel.SourceResourceType) + if err != nil { + return false, err + } + + wrappedFhirResourceModel.SetOriginBase(wrappedResourceModel.OriginBase) + wrappedFhirResourceModel.SetSortTitle(wrappedResourceModel.SortTitle) + wrappedFhirResourceModel.SetSortDate(wrappedResourceModel.SortDate) + wrappedFhirResourceModel.SetSourceUri(wrappedResourceModel.SourceUri) + + //TODO: this takes too long, we need to find a way to do this processing faster or in the background async. + err = wrappedFhirResourceModel.PopulateAndExtractSearchParameters(json.RawMessage(wrappedResourceModel.ResourceRaw)) + if err != nil { + gr.Logger.Warnf("ignoring: an error occurred while extracting SearchParameters using FHIRPath (%s/%s): %v", wrappedResourceModel.SourceResourceType, wrappedResourceModel.SourceResourceID, err) + //wrappedFhirResourceModel.SetResourceRaw(wrappedResourceModel.ResourceRaw) + } + + eventSourceSync := models.NewEventSourceSync( + currentUser.ID.String(), + wrappedFhirResourceModel.GetSourceID().String(), + wrappedFhirResourceModel.GetSourceResourceType(), + wrappedFhirResourceModel.GetSourceResourceID(), + ) + + err = gr.EventBus.PublishMessage(eventSourceSync) + if err != nil { + gr.Logger.Warnf("ignoring: an error occurred while publishing event to eventBus (%s/%s): %v", wrappedResourceModel.SourceResourceType, wrappedResourceModel.SourceResourceID, err) + } + + createResult := gr.GormClient.WithContext(ctx).Where(models.OriginBase{ + SourceID: wrappedFhirResourceModel.GetSourceID(), + SourceResourceID: wrappedFhirResourceModel.GetSourceResourceID(), + SourceResourceType: wrappedFhirResourceModel.GetSourceResourceType(), //TODO: and UpdatedAt > old UpdatedAt + }).Omit("RelatedResource.*").FirstOrCreate(wrappedFhirResourceModel) + + if createResult.Error != nil { + return false, createResult.Error + } else if createResult.RowsAffected == 0 { + //at this point, wrappedResourceModel contains the data found in the database. + // check if the database resource matches the new resource. + if wrappedResourceModel.ResourceRaw.String() != string(cachedResourceRaw) { + updateResult := createResult.Omit("RelatedResource.*").Updates(wrappedResourceModel) + return updateResult.RowsAffected > 0, updateResult.Error + } else { + return false, nil + } + + } else { + //resource was created + return createResult.RowsAffected > 0, createResult.Error + } +} + +func (gr *GormRepository) ListResources(ctx context.Context, queryOptions models.ListResourceQueryOptions) ([]models.ResourceBase, error) { + currentUser, currentUserErr := gr.GetCurrentUser(ctx) + if currentUserErr != nil { + return nil, currentUserErr + } + + queryParam := models.OriginBase{ + UserID: currentUser.ID, + } + + if len(queryOptions.SourceResourceType) > 0 { + queryParam.SourceResourceType = queryOptions.SourceResourceType + } + + if len(queryOptions.SourceID) > 0 { + sourceUUID, err := uuid.Parse(queryOptions.SourceID) + if err != nil { + return nil, err + } + + queryParam.SourceID = sourceUUID + } + if len(queryOptions.SourceResourceID) > 0 { + queryParam.SourceResourceID = queryOptions.SourceResourceID + } + + manifestJson, _ := json.MarshalIndent(queryParam, "", " ") + gr.Logger.Debugf("THE QUERY OBJECT===========> %v", string(manifestJson)) + + var wrappedResourceModels []models.ResourceBase + queryBuilder := gr.GormClient.WithContext(ctx) + if len(queryOptions.SourceResourceType) > 0 { + tableName, err := databaseModel.GetTableNameByResourceType(queryOptions.SourceResourceType) + if err != nil { + return nil, err + } + queryBuilder = queryBuilder. + Where(queryParam). + Table(tableName) + + if queryOptions.Limit > 0 { + queryBuilder = queryBuilder.Limit(queryOptions.Limit).Offset(queryOptions.Offset) + } + return wrappedResourceModels, queryBuilder.Find(&wrappedResourceModels).Error + } else { + if queryOptions.Limit > 0 { + queryBuilder = queryBuilder.Limit(queryOptions.Limit).Offset(queryOptions.Offset) + } + //there is no FHIR Resource name specified, so we're querying across all FHIR resources + return gr.getResourcesFromAllTables(queryBuilder, queryParam) + } +} + +// TODO: should this be deprecated? (replaced by ListResources) +func (gr *GormRepository) GetResourceByResourceTypeAndId(ctx context.Context, sourceResourceType string, sourceResourceId string) (*models.ResourceBase, error) { + currentUser, currentUserErr := gr.GetCurrentUser(ctx) + if currentUserErr != nil { + return nil, currentUserErr + } + + tableName, err := databaseModel.GetTableNameByResourceType(sourceResourceType) + if err != nil { + return nil, err + } + + queryParam := models.OriginBase{ + UserID: currentUser.ID, + SourceResourceType: sourceResourceType, + SourceResourceID: sourceResourceId, + } + + var wrappedResourceModel models.ResourceBase + results := gr.GormClient.WithContext(ctx). + Where(queryParam). + Table(tableName). + First(&wrappedResourceModel) + + return &wrappedResourceModel, results.Error +} + +// we need to figure out how to get the source resource type from the source resource id, or if we're searching across every table :( +func (gr *GormRepository) GetResourceBySourceId(ctx context.Context, sourceId string, sourceResourceId string) (*models.ResourceBase, error) { + currentUser, currentUserErr := gr.GetCurrentUser(ctx) + if currentUserErr != nil { + return nil, currentUserErr + } + + sourceIdUUID, err := uuid.Parse(sourceId) + if err != nil { + return nil, err + } + + queryParam := models.OriginBase{ + UserID: currentUser.ID, + SourceID: sourceIdUUID, + SourceResourceID: sourceResourceId, + } + + //there is no FHIR Resource name specified, so we're querying across all FHIR resources + wrappedResourceModels, err := gr.getResourcesFromAllTables(gr.GormClient.WithContext(ctx), queryParam) + if len(wrappedResourceModels) > 0 { + return &wrappedResourceModels[0], err + } else { + return nil, fmt.Errorf("no resource found with source id %s and source resource id %s", sourceId, sourceResourceId) + } +} + +// Get the patient for each source (for the current user) +func (gr *GormRepository) GetPatientForSources(ctx context.Context) ([]models.ResourceBase, error) { + currentUser, currentUserErr := gr.GetCurrentUser(ctx) + if currentUserErr != nil { + return nil, currentUserErr + } + + //SELECT * FROM resource_bases WHERE user_id = "" and source_resource_type = "Patient" GROUP BY source_id + + tableName, err := databaseModel.GetTableNameByResourceType("Patient") + if err != nil { + return nil, err + } + + var wrappedResourceModels []models.ResourceBase + results := gr.GormClient.WithContext(ctx). + //Group("source_id"). //broken in Postgres. + Where(models.OriginBase{ + UserID: currentUser.ID, + SourceResourceType: "Patient", + }). + Table(tableName). + Find(&wrappedResourceModels) + + return wrappedResourceModels, results.Error +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// +// Resource Associations +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +// verifyAssociationPermission ensure that the sources are "owned" by the same user, and that the user is the current user +func (gr *GormRepository) verifyAssociationPermission(ctx context.Context, sourceUserID uuid.UUID, relatedSourceUserID uuid.UUID) error { + currentUser, currentUserErr := gr.GetCurrentUser(ctx) + if currentUserErr != nil { + return currentUserErr + } + if sourceUserID != relatedSourceUserID { + return fmt.Errorf("user id's must match when adding associations") + } else if sourceUserID != currentUser.ID { + return fmt.Errorf("user id's must match current user") + } + + return nil +} + +func (gr *GormRepository) AddResourceAssociation(ctx context.Context, source *models.SourceCredential, resourceType string, resourceId string, relatedSource *models.SourceCredential, relatedResourceType string, relatedResourceId string) error { + //ensure that the sources are "owned" by the same user + err := gr.verifyAssociationPermission(ctx, source.UserID, relatedSource.UserID) + if err != nil { + return err + } + + err = gr.GormClient.WithContext(ctx).Table("related_resources").Create(map[string]interface{}{ + "resource_base_user_id": source.UserID, + "resource_base_source_id": source.ID, + "resource_base_source_resource_type": resourceType, + "resource_base_source_resource_id": resourceId, + "related_resource_user_id": relatedSource.UserID, + "related_resource_source_id": relatedSource.ID, + "related_resource_source_resource_type": relatedResourceType, + "related_resource_source_resource_id": relatedResourceId, + }).Error + uniqueConstraintError := errors.New("constraint failed: UNIQUE constraint failed") + if err != nil { + if strings.HasPrefix(err.Error(), uniqueConstraintError.Error()) { + gr.Logger.Warnf("Ignoring an error when creating a related_resource association for %s/%s: %v", resourceType, resourceId, err) + //we can safely ignore this error + return nil + } + } + return err +} + +func (gr *GormRepository) RemoveResourceAssociation(ctx context.Context, source *models.SourceCredential, resourceType string, resourceId string, relatedSource *models.SourceCredential, relatedResourceType string, relatedResourceId string) error { + //ensure that the sources are "owned" by the same user + err := gr.verifyAssociationPermission(ctx, source.UserID, relatedSource.UserID) + if err != nil { + return err + } + + //manually delete association + results := gr.GormClient.WithContext(ctx). + //Table("related_resources"). + Delete(&models.RelatedResource{}, map[string]interface{}{ + "resource_base_user_id": source.UserID, + "resource_base_source_id": source.ID, + "resource_base_source_resource_type": resourceType, + "resource_base_source_resource_id": resourceId, + "related_resource_user_id": relatedSource.UserID, + "related_resource_source_id": relatedSource.ID, + "related_resource_source_resource_type": relatedResourceType, + "related_resource_source_resource_id": relatedResourceId, + }) + + if results.Error != nil { + return results.Error + } else if results.RowsAffected == 0 { + return fmt.Errorf("no association found for %s/%s and %s/%s", resourceType, resourceId, relatedResourceType, relatedResourceId) + } + return nil +} + +func (gr *GormRepository) FindResourceAssociationsByTypeAndId(ctx context.Context, source *models.SourceCredential, resourceType string, resourceId string) ([]models.RelatedResource, error) { + currentUser, currentUserErr := gr.GetCurrentUser(ctx) + if currentUserErr != nil { + return nil, currentUserErr + } + + if source.UserID != currentUser.ID { + return nil, fmt.Errorf("source credential must match the current user id") + } + + // SELECT * FROM related_resources WHERE user_id = "53c1e930-63af-46c9-b760-8e83cbc1abd9"; + var relatedResources []models.RelatedResource + result := gr.GormClient.WithContext(ctx). + Where(models.RelatedResource{ + ResourceBaseUserID: currentUser.ID, + ResourceBaseSourceID: source.ID, + ResourceBaseSourceResourceType: resourceType, + ResourceBaseSourceResourceID: resourceId, + }). + Find(&relatedResources) + return relatedResources, result.Error +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// +// Resource Composition (Grouping) +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +// AddResourceComposition +// this will group resources together into a "Composition" -- primarily to group related Encounters & Conditions into one semantic root. +// algorithm: +// - find source for each resource +// - (SECURITY) ensure the current user and the source for each resource matches +// - check if there is a Composition resource Type already. +// - if Composition type already exists: +// - update "relatesTo" field with additional data. +// - else: +// - Create a Composition resource type (populated with "relatesTo" references to all provided Resources) +// +// - add AddResourceAssociation for all resources linked to the Composition resource +// - store the Composition resource +// TODO: determine if we should be using a List Resource instead of a Composition resource +func (gr *GormRepository) AddResourceComposition(ctx context.Context, compositionTitle string, resources []*models.ResourceBase) error { + currentUser, currentUserErr := gr.GetCurrentUser(ctx) + if currentUserErr != nil { + return currentUserErr + } + + //generate placeholder source + placeholderSource := models.SourceCredential{UserID: currentUser.ID, SourceType: "manual", ModelBase: models.ModelBase{ID: uuid.MustParse("00000000-0000-0000-0000-000000000000")}} + + existingCompositionResources := []*models.ResourceBase{} + rawResourceLookupTable := map[string]*models.ResourceBase{} + + //find the source for each resource we'd like to merge. (for ownership verification) + sourceLookup := map[uuid.UUID]*models.SourceCredential{} + for _, resource := range resources { + if resource.SourceResourceType == pkg.FhirResourceTypeComposition { + //skip, Composition resources don't have a valid SourceCredential + existingCompositionResources = append(existingCompositionResources, resource) + + //compositions may include existing resources, make sure we handle these + for _, related := range resource.RelatedResource { + rawResourceLookupTable[fmt.Sprintf("%s/%s", related.SourceResourceType, related.SourceResourceID)] = related + } + continue + } + + if _, sourceOk := sourceLookup[resource.SourceID]; !sourceOk { + //source has not been added yet, lets query for it. + sourceCred, err := gr.GetSource(ctx, resource.SourceID.String()) + if err != nil { + return fmt.Errorf("could not find source %s", resource.SourceID.String()) + } + sourceLookup[resource.SourceID] = sourceCred + } + + rawResourceLookupTable[fmt.Sprintf("%s/%s", resource.SourceResourceType, resource.SourceResourceID)] = resource + } + + // SECURITY: ensure the current user and the source for each resource matches + for _, source := range sourceLookup { + if source.UserID != currentUser.ID { + return fmt.Errorf("source must be owned by the current user: %s vs %s", source.UserID, currentUser.ID) + } + } + + // - check if there is a Composition resource Type already. + var compositionResource *models.ResourceBase + + if len(existingCompositionResources) > 0 { + //- if Composition type already exists in this set + // - update "relatesTo" field with additional data. + compositionResource = existingCompositionResources[0] + + //disassociate all existing remaining composition resources. + for _, existingCompositionResource := range existingCompositionResources[1:] { + for _, relatedResource := range existingCompositionResource.RelatedResource { + if err := gr.RemoveResourceAssociation( + ctx, + &placeholderSource, + existingCompositionResource.SourceResourceType, + existingCompositionResource.SourceResourceID, + sourceLookup[relatedResource.SourceID], + relatedResource.SourceResourceType, + relatedResource.SourceResourceID, + ); err != nil { + //ignoring errors, could be due to duplicate edges + return fmt.Errorf("an error occurred while removing resource association: %v", err) + } + } + + //remove this resource + compositionTable, err := databaseModel.GetTableNameByResourceType("Composition") + if err != nil { + return fmt.Errorf("an error occurred while finding Composition resource table: %v", err) + } + //TODO: we may need to delete with using the FhirComposition struct type + deleteResult := gr.GormClient.WithContext(ctx). + Table(compositionTable). + Delete(existingCompositionResource) + if deleteResult.Error != nil { + return fmt.Errorf("an error occurred while removing Composition resource(%s/%s): %v", existingCompositionResource.SourceResourceType, existingCompositionResource.SourceID, err) + } else if deleteResult.RowsAffected != 1 { + return fmt.Errorf("composition resource was not deleted %s/%s", existingCompositionResource.SourceResourceType, existingCompositionResource.SourceID) + } + } + + } else { + //- else: + // - Create a Composition resource type (populated with "relatesTo" references to all provided Resources) + compositionResource = &models.ResourceBase{ + OriginBase: models.OriginBase{ + UserID: placeholderSource.UserID, // + SourceID: placeholderSource.ID, //Empty SourceID expected ("0000-0000-0000-0000") + SourceResourceType: pkg.FhirResourceTypeComposition, + SourceResourceID: uuid.New().String(), + }, + } + } + + // - Generate an "updated" RawResource json blob + rawCompositionResource := models.ResourceComposition{ + Title: compositionTitle, + RelatesTo: []models.ResourceCompositionRelatesTo{}, + } + + for relatedResourceKey, _ := range rawResourceLookupTable { + rawCompositionResource.RelatesTo = append(rawCompositionResource.RelatesTo, models.ResourceCompositionRelatesTo{ + Target: models.ResourceCompositionRelatesToTarget{ + TargetReference: models.ResourceCompositionRelatesToTargetReference{ + Reference: relatedResourceKey, + }, + }, + }) + } + + rawResourceJson, err := json.Marshal(rawCompositionResource) + if err != nil { + return err + } + compositionResource.ResourceRaw = rawResourceJson + + compositionResource.SortTitle = &compositionTitle + compositionResource.RelatedResource = utils.SortResourcePtrListByDate(resources) + compositionResource.SortDate = compositionResource.RelatedResource[0].SortDate + + //store the Composition resource + _, err = gr.UpsertResource(ctx, compositionResource) + if err != nil { + return err + } + + // - add AddResourceAssociation for all resources linked to the Composition resource + for _, resource := range rawResourceLookupTable { + if err := gr.AddResourceAssociation( + ctx, + &placeholderSource, + compositionResource.SourceResourceType, + compositionResource.SourceResourceID, + sourceLookup[resource.SourceID], + resource.SourceResourceType, + resource.SourceResourceID, + ); err != nil { + return err + } + } + + return nil +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// +// SourceCredential +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +func (gr *GormRepository) CreateSource(ctx context.Context, sourceCreds *models.SourceCredential) error { + currentUser, currentUserErr := gr.GetCurrentUser(ctx) + if currentUserErr != nil { + return currentUserErr + } + sourceCreds.UserID = currentUser.ID + + //Assign will **always** update the source credential in the DB with data passed into this function. + return gr.GormClient.WithContext(ctx). + Where(models.SourceCredential{ + UserID: sourceCreds.UserID, + SourceType: sourceCreds.SourceType, + Patient: sourceCreds.Patient}). + Assign(*sourceCreds).FirstOrCreate(sourceCreds).Error +} + +func (gr *GormRepository) UpdateSource(ctx context.Context, sourceCreds *models.SourceCredential) error { + currentUser, currentUserErr := gr.GetCurrentUser(ctx) + if currentUserErr != nil { + return currentUserErr + } + sourceCreds.UserID = currentUser.ID + + //Assign will **always** update the source credential in the DB with data passed into this function. + return gr.GormClient.WithContext(ctx). + Where(models.SourceCredential{ + ModelBase: models.ModelBase{ID: sourceCreds.ID}, + UserID: sourceCreds.UserID, + SourceType: sourceCreds.SourceType, + }).Updates(models.SourceCredential{ + AccessToken: sourceCreds.AccessToken, + RefreshToken: sourceCreds.RefreshToken, + ExpiresAt: sourceCreds.ExpiresAt, + DynamicClientId: sourceCreds.DynamicClientId, + DynamicClientRegistrationMode: sourceCreds.DynamicClientRegistrationMode, + DynamicClientJWKS: sourceCreds.DynamicClientJWKS, + LatestBackgroundJobID: sourceCreds.LatestBackgroundJobID, + }).Error +} + +func (gr *GormRepository) GetSource(ctx context.Context, sourceId string) (*models.SourceCredential, error) { + currentUser, currentUserErr := gr.GetCurrentUser(ctx) + if currentUserErr != nil { + return nil, currentUserErr + } + + sourceUUID, err := uuid.Parse(sourceId) + if err != nil { + return nil, err + } + + var sourceCred models.SourceCredential + results := gr.GormClient.WithContext(ctx). + Where(models.SourceCredential{UserID: currentUser.ID, ModelBase: models.ModelBase{ID: sourceUUID}}). + Preload("LatestBackgroundJob"). + First(&sourceCred) + + return &sourceCred, results.Error +} + +func (gr *GormRepository) GetSourceSummary(ctx context.Context, sourceId string) (*models.SourceSummary, error) { + currentUser, currentUserErr := gr.GetCurrentUser(ctx) + if currentUserErr != nil { + return nil, currentUserErr + } + + sourceUUID, err := uuid.Parse(sourceId) + if err != nil { + return nil, err + } + + sourceSummary := &models.SourceSummary{} + + source, err := gr.GetSource(ctx, sourceId) + if err != nil { + return nil, err + } + sourceSummary.Source = source + + //group by resource type and return counts + // SELECT source_resource_type as resource_type, COUNT(*) as count FROM resource_bases WHERE source_id = "53c1e930-63af-46c9-b760-8e83cbc1abd9" GROUP BY source_resource_type; + + var resourceTypeCounts []map[string]interface{} + + resourceTypes := databaseModel.GetAllowedResourceTypes() + for _, resourceType := range resourceTypes { + tableName, err := databaseModel.GetTableNameByResourceType(resourceType) + if err != nil { + return nil, err + } + var count int64 + result := gr.GormClient.WithContext(ctx). + Table(tableName). + Where(models.OriginBase{ + UserID: currentUser.ID, + SourceID: sourceUUID, + }). + Count(&count) + if result.Error != nil { + return nil, result.Error + } + if count == 0 { + continue //don't add resource counts if the count is 0 + } + resourceTypeCounts = append(resourceTypeCounts, map[string]interface{}{ + "source_id": sourceId, + "resource_type": resourceType, + "count": count, + }) + } + + sourceSummary.ResourceTypeCounts = resourceTypeCounts + + //set patient + patientTableName, err := databaseModel.GetTableNameByResourceType("Patient") + if err != nil { + return nil, err + } + var wrappedPatientResourceModel models.ResourceBase + patientResults := gr.GormClient.WithContext(ctx). + Where(models.OriginBase{ + UserID: currentUser.ID, + SourceResourceType: "Patient", + SourceID: sourceUUID, + }). + Table(patientTableName). + First(&wrappedPatientResourceModel) + + if patientResults.Error != nil { + return nil, patientResults.Error + } + sourceSummary.Patient = &wrappedPatientResourceModel + + return sourceSummary, nil +} + +func (gr *GormRepository) GetSources(ctx context.Context) ([]models.SourceCredential, error) { + currentUser, currentUserErr := gr.GetCurrentUser(ctx) + if currentUserErr != nil { + return nil, currentUserErr + } + + var sourceCreds []models.SourceCredential + results := gr.GormClient.WithContext(ctx). + Where(models.SourceCredential{UserID: currentUser.ID}). + Preload("LatestBackgroundJob"). + Find(&sourceCreds) + + return sourceCreds, results.Error +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// +// Background Job +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +func (gr *GormRepository) CreateBackgroundJob(ctx context.Context, backgroundJob *models.BackgroundJob) error { + currentUser, currentUserErr := gr.GetCurrentUser(ctx) + if currentUserErr != nil { + return currentUserErr + } + + backgroundJob.UserID = currentUser.ID + + record := gr.GormClient.Create(backgroundJob) + return record.Error +} + +func (gr *GormRepository) GetBackgroundJob(ctx context.Context, backgroundJobId string) (*models.BackgroundJob, error) { + currentUser, currentUserErr := gr.GetCurrentUser(ctx) + if currentUserErr != nil { + return nil, currentUserErr + } + + backgroundJobUUID, err := uuid.Parse(backgroundJobId) + if err != nil { + return nil, err + } + + var backgroundJob models.BackgroundJob + results := gr.GormClient.WithContext(ctx). + Where(models.SourceCredential{UserID: currentUser.ID, ModelBase: models.ModelBase{ID: backgroundJobUUID}}). + First(&backgroundJob) + + return &backgroundJob, results.Error +} + +func (gr *GormRepository) UpdateBackgroundJob(ctx context.Context, backgroundJob *models.BackgroundJob) error { + currentUser, currentUserErr := gr.GetCurrentUser(ctx) + if currentUserErr != nil { + return currentUserErr + } + backgroundJob.UserID = currentUser.ID + + return gr.GormClient.WithContext(ctx). + Where(models.BackgroundJob{ + ModelBase: models.ModelBase{ID: backgroundJob.ID}, + UserID: backgroundJob.UserID, + }).Updates(models.BackgroundJob{ + JobStatus: backgroundJob.JobStatus, + Data: backgroundJob.Data, + LockedTime: backgroundJob.LockedTime, + DoneTime: backgroundJob.DoneTime, + Retries: backgroundJob.Retries, + Schedule: backgroundJob.Schedule, + }).Error +} + +func (gr *GormRepository) ListBackgroundJobs(ctx context.Context, queryOptions models.BackgroundJobQueryOptions) ([]models.BackgroundJob, error) { + currentUser, currentUserErr := gr.GetCurrentUser(ctx) + if currentUserErr != nil { + return nil, currentUserErr + } + + queryParam := models.BackgroundJob{ + UserID: currentUser.ID, + } + + if queryOptions.JobType != nil { + queryParam.JobType = *queryOptions.JobType + } + if queryOptions.Status != nil { + queryParam.JobStatus = *queryOptions.Status + } + + var backgroundJobs []models.BackgroundJob + query := gr.GormClient.WithContext(ctx). + //Group("source_id"). //broken in Postgres. + Where(queryParam).Limit(queryOptions.Limit).Order("locked_time DESC") + + if queryOptions.Offset > 0 { + query = query.Offset(queryOptions.Offset) + } + + return backgroundJobs, query.Find(&backgroundJobs).Error +} + +func (gr *GormRepository) BackgroundJobCheckpoint(ctx context.Context, checkpointData map[string]interface{}, errorData map[string]interface{}) { + gr.Logger.Info("begin checkpointing background job...") + if len(checkpointData) == 0 && len(errorData) == 0 { + gr.Logger.Info("no changes detected. Skipping checkpoint") + return //nothing to do + } + defer gr.Logger.Info("end checkpointing background job") + + currentUser, currentUserErr := gr.GetCurrentUser(ctx) + if currentUserErr != nil { + gr.Logger.Warning("could not find current user info context. Ignoring checkpoint", currentUserErr) + return + } + + //make sure we do an atomic update + backgroundJobId, ok := ctx.Value(pkg.ContextKeyTypeBackgroundJobID).(string) + if !ok { + gr.Logger.Warning("could not find background job id in context. Ignoring checkpoint") + return + } + backgroundJobUUID, err := uuid.Parse(backgroundJobId) + if err != nil { + gr.Logger.Warning("could not parse background job id. Ignoring checkpoint", err) + return + } + //https://gorm.io/docs/advanced_query.html#Locking-FOR-UPDATE + //TODO: if using another database type (not SQLITE) we need to make sure we use the correct locking strategy + //This is not a problem in SQLITE because it does database (or table) level locking by default + //var backgroundJob models.BackgroundJob + //gr.GormClient.Clauses(clause.Locking{Strength: "UPDATE"}).Find(&backgroundJob) + + txErr := gr.GormClient.Transaction(func(tx *gorm.DB) error { + //retrieve the background job by id + var backgroundJob models.BackgroundJob + backgroundJobFindResults := tx.WithContext(ctx). + Where(models.BackgroundJob{ + ModelBase: models.ModelBase{ID: backgroundJobUUID}, + UserID: currentUser.ID, + }). + First(&backgroundJob) + if backgroundJobFindResults.Error != nil { + return backgroundJobFindResults.Error + } + + //deserialize the job data + var backgroundJobSyncData models.BackgroundJobSyncData + if backgroundJob.Data != nil { + err := json.Unmarshal(backgroundJob.Data, &backgroundJobSyncData) + if err != nil { + return err + } + } + + //update the job data with new data provided by the calling functiion + changed := false + if len(checkpointData) > 0 { + backgroundJobSyncData.CheckpointData = checkpointData + changed = true + } + if len(errorData) > 0 { + backgroundJobSyncData.ErrorData = errorData + changed = true + } + + //define a background job with the fields we're going to update + now := time.Now() + updatedBackgroundJob := models.BackgroundJob{ + LockedTime: &now, + } + if changed { + serializedData, err := json.Marshal(backgroundJobSyncData) + if err != nil { + return err + } + updatedBackgroundJob.Data = serializedData + + } + + return tx.WithContext(ctx). + Where(models.BackgroundJob{ + ModelBase: models.ModelBase{ID: backgroundJobUUID}, + UserID: currentUser.ID, + }).Updates(updatedBackgroundJob).Error + }) + + if txErr != nil { + gr.Logger.Warning("could not find or update background job. Ignoring checkpoint", txErr) + } + +} + +// when server restarts, we should unlock all locked jobs, and set their status to failed +// SECURITY: this is global, and effects all users. +func (gr *GormRepository) CancelAllLockedBackgroundJobsAndFail() error { + now := time.Now() + return gr.GormClient. + Where(models.BackgroundJob{JobStatus: pkg.BackgroundJobStatusLocked}). + Updates(models.BackgroundJob{ + JobStatus: pkg.BackgroundJobStatusFailed, + DoneTime: &now, + }).Error + +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// +// Utilities +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +func sqlitePragmaString(pragmas map[string]string) string { + q := url.Values{} + for key, val := range pragmas { + q.Add("_pragma", fmt.Sprintf("%s=%s", key, val)) + } + + queryStr := q.Encode() + if len(queryStr) > 0 { + return "?" + queryStr + } + return "" +} + +// Internal function +// This function will return a list of resources from all FHIR tables in the database +// The query allows us to set the source id, source resource id, source resource type +// SECURITY: this function assumes the user has already been authenticated +// TODO: theres probably a more efficient way of doing this with GORM +func (gr *GormRepository) getResourcesFromAllTables(queryBuilder *gorm.DB, queryParam models.OriginBase) ([]models.ResourceBase, error) { + wrappedResourceModels := []models.ResourceBase{} + resourceTypes := databaseModel.GetAllowedResourceTypes() + for _, resourceType := range resourceTypes { + tableName, err := databaseModel.GetTableNameByResourceType(resourceType) + if err != nil { + return nil, err + } + var tempWrappedResourceModels []models.ResourceBase + results := queryBuilder. + Where(queryParam). + Table(tableName). + Find(&tempWrappedResourceModels) + if results.Error != nil { + return nil, results.Error + } + wrappedResourceModels = append(wrappedResourceModels, tempWrappedResourceModels...) + } + return wrappedResourceModels, nil +} diff --git a/backend/pkg/database/gorm_repository_graph.go b/backend/pkg/database/gorm_repository_graph.go new file mode 100644 index 00000000..1ee372d3 --- /dev/null +++ b/backend/pkg/database/gorm_repository_graph.go @@ -0,0 +1,504 @@ +package database + +import ( + "context" + "fmt" + "log" + "strings" + + "github.com/dominikbraun/graph" + "github.com/fastenhealth/fasten-onprem/backend/pkg" + "github.com/fastenhealth/fasten-onprem/backend/pkg/models" + databaseModel "github.com/fastenhealth/fasten-onprem/backend/pkg/models/database" + "github.com/fastenhealth/fasten-onprem/backend/pkg/utils" + "golang.org/x/exp/slices" +) + +type VertexResourcePlaceholder struct { + UserID string + SourceID string + ResourceID string + ResourceType string + RelatedResourcePlaceholder []*VertexResourcePlaceholder +} + +func (rp *VertexResourcePlaceholder) ID() string { + return resourceKeysVertexId(rp.SourceID, rp.ResourceType, rp.ResourceID) +} + +// Retrieve a list of all fhir resources (vertex), and a list of all associations (edge) +// Generate a graph +// return list of root nodes, and their flattened related resources. +func (gr *GormRepository) GetFlattenedResourceGraph(ctx context.Context, graphType pkg.ResourceGraphType, options models.ResourceGraphOptions) (map[string][]*models.ResourceBase, *models.ResourceGraphMetadata, error) { + currentUser, currentUserErr := gr.GetCurrentUser(ctx) + if currentUserErr != nil { + return nil, nil, currentUserErr + } + + //initialize the graph results metadata + resourceGraphMetadata := models.ResourceGraphMetadata{ + TotalElements: 0, + PageSize: 20, //TODO: replace this with pkg.DefaultPageSize + Page: options.Page, + } + + // Get list of all (non-reciprocal) relationships + var relatedResourceRelationships []models.RelatedResource + + // SELECT * FROM related_resources WHERE user_id = "53c1e930-63af-46c9-b760-8e83cbc1abd9"; + result := gr.GormClient.WithContext(ctx). + Where(models.RelatedResource{ + ResourceBaseUserID: currentUser.ID, + }). + Find(&relatedResourceRelationships) + if result.Error != nil { + return nil, nil, result.Error + } + + //Generate Graph + // TODO optimization: eventually cache the graph in a database/storage, and update when new resources are added. + g := graph.New(resourceVertexId, graph.Directed(), graph.Acyclic(), graph.Rooted()) + + //// Get list of all resources TODO - REPLACED THIS + //wrappedResourceModels, err := gr.ListResources(ctx, models.ListResourceQueryOptions{}) + //if err != nil { + // return nil, err + //} + + //add vertices to the graph (must be done first) + //we don't want to request all resources from the database, so we will create a placeholder vertex for each resource. + //we will then use the vertex id to lookup the resource from the database. + //this is a bit of a hack, but it allows us to use the graph library without having to load all resources into memory. + + //create a placeholder vertex for each resource (ensuring uniqueness) + resourcePlaceholders := map[string]VertexResourcePlaceholder{} + for _, relationship := range relatedResourceRelationships { + + //create placeholders + fromResourcePlaceholder := VertexResourcePlaceholder{ + UserID: relationship.ResourceBaseUserID.String(), + SourceID: relationship.ResourceBaseSourceID.String(), + ResourceID: relationship.ResourceBaseSourceResourceID, + ResourceType: relationship.ResourceBaseSourceResourceType, + } + + toResourcePlaceholder := VertexResourcePlaceholder{ + UserID: relationship.RelatedResourceUserID.String(), + SourceID: relationship.RelatedResourceSourceID.String(), + ResourceID: relationship.RelatedResourceSourceResourceID, + ResourceType: relationship.RelatedResourceSourceResourceType, + } + + //add placeholders to map, if they don't already exist + if _, ok := resourcePlaceholders[fromResourcePlaceholder.ID()]; !ok { + resourcePlaceholders[fromResourcePlaceholder.ID()] = fromResourcePlaceholder + } + if _, ok := resourcePlaceholders[toResourcePlaceholder.ID()]; !ok { + resourcePlaceholders[toResourcePlaceholder.ID()] = toResourcePlaceholder + } + } + + for ndx, _ := range resourcePlaceholders { + resourcePlaceholder := resourcePlaceholders[ndx] + log.Printf("Adding vertex: %v", resourcePlaceholder.ID()) + err := g.AddVertex( + &resourcePlaceholder, + ) + if err != nil { + return nil, nil, fmt.Errorf("an error occurred while adding vertex: %v", err) + } + } + + //add recriprocial relationships (depending on the graph type) + relatedResourceRelationships = gr.PopulateGraphTypeReciprocalRelationships(graphType, relatedResourceRelationships) + + //add edges to graph + for _, relationship := range relatedResourceRelationships { + + err := g.AddEdge( + resourceKeysVertexId(relationship.ResourceBaseSourceID.String(), relationship.ResourceBaseSourceResourceType, relationship.ResourceBaseSourceResourceID), + resourceKeysVertexId(relationship.RelatedResourceSourceID.String(), relationship.RelatedResourceSourceResourceType, relationship.RelatedResourceSourceResourceID), + ) + + if err != nil { + //this may occur because vertices may not exist + gr.Logger.Warnf("ignoring, an error occurred while adding edge: %v", err) + } + } + + //// simplify graph if possible. + //graph.TransitiveReduction(g) + + // AdjacencyMap computes and returns an adjacency map containing all vertices in the graph. + // + // There is an entry for each vertex, and each of those entries is another map whose keys are + // the hash values of the adjacent vertices. The value is an Edge instance that stores the + // source and target hash values (these are the same as the map keys) as well as edge metadata. + // map[string]map[string]Edge[string]{ + // "A": map[string]Edge[string]{ + // "B": {Source: "A", Target: "B"} + // "C": {Source: "A", Target: "C"} + // } + // } + adjacencyMap, err := g.AdjacencyMap() + if err != nil { + return nil, nil, fmt.Errorf("error while generating AdjacencyMap: %v", err) + } + + // For a directed graph, PredecessorMap is the complement of AdjacencyMap. This is because in a directed graph, only + // vertices joined by an outgoing edge are considered adjacent to the current vertex, whereas + // predecessors are the vertices joined by an ingoing edge. + // ie. "empty" verticies in this map are "root" nodes. + predecessorMap, err := g.PredecessorMap() + if err != nil { + return nil, nil, fmt.Errorf("error while generating PredecessorMap: %v", err) + } + + // Doing this in one massive function, because passing graph by reference is difficult due to generics. + + // Step 1: use predecessorMap to find all "root" resources (eg. MedicalHistory - encounters and conditions). store those nodes in their respective lists. + resourcePlaceholderListDictionary := map[string][]*VertexResourcePlaceholder{} + sources, _, sourceFlattenLevel := getSourcesAndSinksForGraphType(graphType) + + for vertexId, val := range predecessorMap { + + if len(val) != 0 { + //skip any nodes/verticies/resources that are not "root" + continue + } + + resourcePlaceholder, err := g.Vertex(vertexId) + if err != nil { + //could not find this vertex in graph, ignoring + log.Printf("could not find vertex in graph: %v", err) + continue + } + + //check if this "root" node (which has no predecessors) is a valid source type + foundSourceType := "" + foundSourceLevel := -1 + for ndx, sourceResourceTypes := range sources { + log.Printf("testing resourceType: %s", resourcePlaceholder.ResourceType) + + if slices.Contains(sourceResourceTypes, strings.ToLower(resourcePlaceholder.ResourceType)) { + foundSourceType = resourcePlaceholder.ResourceType + foundSourceLevel = ndx + break + } + } + + if foundSourceLevel == -1 { + continue //skip this resourcePlaceholder, it is not a valid source type + } + + if _, ok := resourcePlaceholderListDictionary[foundSourceType]; !ok { + resourcePlaceholderListDictionary[foundSourceType] = []*VertexResourcePlaceholder{} + } + + resourcePlaceholderListDictionary[foundSourceType] = append(resourcePlaceholderListDictionary[foundSourceType], resourcePlaceholder) + } + + // Step 2: now that we've created a relationship graph using placeholders, we need to determine which page of resources to return + // and look up the actual resources from the database. + + resourceListDictionary, totalElements, err := gr.InflateResourceGraphAtPage(resourcePlaceholderListDictionary, options.Page) + if err != nil { + return nil, nil, fmt.Errorf("error while paginating & inflating resource graph: %v", err) + } + resourceGraphMetadata.TotalElements = totalElements + + // Step 3: define a function. When given a resource, should find all related resources, flatten the heirarchy and set the RelatedResourceFhir list + flattenRelatedResourcesFn := func(resource *models.ResourceBase) { + // this is a "root" encounter, which is not related to any condition, we should add it to the Unknown encounters list + vertexId := resourceVertexId(&VertexResourcePlaceholder{ + ResourceType: resource.SourceResourceType, + ResourceID: resource.SourceResourceID, + SourceID: resource.SourceID.String(), + UserID: resource.UserID.String(), + }) + gr.Logger.Debugf("populating resourcePlaceholder: %s", vertexId) + + resource.RelatedResource = []*models.ResourceBase{} + + //get all the resource placeholders associated with this node + //TODO: handle error? + graph.DFS(g, vertexId, func(relatedVertexId string) bool { + relatedResourcePlaceholder, _ := g.Vertex(relatedVertexId) + //skip the current resourcePlaceholder if it's referenced in this list. + //also skip the current resourcePlaceholder if its a Binary resourcePlaceholder (which is a special case) + if vertexId != resourceVertexId(relatedResourcePlaceholder) && relatedResourcePlaceholder.ResourceType != "Binary" { + relatedResource, err := gr.GetResourceByResourceTypeAndId(ctx, relatedResourcePlaceholder.ResourceType, relatedResourcePlaceholder.ResourceID) + if err != nil { + gr.Logger.Warnf("ignoring, cannot safely handle error which occurred while getting related resource: %v", err) + return true + } + resource.RelatedResource = append( + resource.RelatedResource, + relatedResource, + ) + } + return false + }) + } + + // Step 4: flatten resources (if needed) and sort them + for resourceType, _ := range resourceListDictionary { + sourceFlatten, sourceFlattenOk := sourceFlattenLevel[strings.ToLower(resourceType)] + + if sourceFlattenOk && sourceFlatten == true { + //if flatten is set to true, we want to flatten the graph. This is usually for non primary source types (eg. Encounter is a source type, but Condition is the primary source type) + + // Step 3: populate related resources for each encounter, flattened + for ndx, _ := range resourceListDictionary[resourceType] { + // this is a "root" encounter, which is not related to any condition, we should add it to the Unknown encounters list + flattenRelatedResourcesFn(resourceListDictionary[resourceType][ndx]) + + //sort all related resources (by date, desc) + resourceListDictionary[resourceType][ndx].RelatedResource = utils.SortResourcePtrListByDate(resourceListDictionary[resourceType][ndx].RelatedResource) + } + } else { + // if flatten is set to false, we want to preserve the top relationships in the graph heirarchy. This is usually for primary source types (eg. Condition is the primary source type) + // we want to ensure context is preserved, so we will flatten the graph futher down in the heirarchy + + // Step 4: find all encounters referenced by the root conditions, populate them, then add them to the condition as RelatedResourceFhir + for ndx, _ := range resourceListDictionary[resourceType] { + // this is a "root" condition, + + resourceListDictionary[resourceType][ndx].RelatedResource = []*models.ResourceBase{} + currentResource := resourceListDictionary[resourceType][ndx] + vertexId := resourceKeysVertexId(currentResource.SourceID.String(), currentResource.SourceResourceType, currentResource.SourceResourceID) + for relatedVertexId, _ := range adjacencyMap[vertexId] { + relatedResourcePlaceholder, _ := g.Vertex(relatedVertexId) + relatedResourceFhir, err := gr.GetResourceByResourceTypeAndId(ctx, relatedResourcePlaceholder.ResourceType, relatedResourcePlaceholder.ResourceID) + if err != nil { + gr.Logger.Warnf("ignoring, cannot safely handle error which occurred while getting related resource (flatten=false): %v", err) + continue + } + flattenRelatedResourcesFn(relatedResourceFhir) + resourceListDictionary[resourceType][ndx].RelatedResource = append(resourceListDictionary[resourceType][ndx].RelatedResource, relatedResourceFhir) + } + + //sort all related resources (by date, desc) + resourceListDictionary[resourceType][ndx].RelatedResource = utils.SortResourcePtrListByDate(resourceListDictionary[resourceType][ndx].RelatedResource) + } + } + + resourceListDictionary[resourceType] = utils.SortResourcePtrListByDate(resourceListDictionary[resourceType]) + } + + // Step 5: return the populated resource list dictionary + + return resourceListDictionary, &resourceGraphMetadata, nil +} + +// LoadResourceGraphAtPage - this function will take a dictionary of placeholder "sources" graph and load the actual resources from the database, for a specific page +// - first, it will load all the "source" resources (eg. Encounter, Condition, etc) +// - sort the root resources by date, desc +// - use the page number + page size to determine which root resources to return +// - return a dictionary of "source" resource lists +func (gr *GormRepository) InflateResourceGraphAtPage(resourcePlaceholderListDictionary map[string][]*VertexResourcePlaceholder, page int) (map[string][]*models.ResourceBase, int, error) { + totalElements := 0 + // Step 3a: since we cant calulate the sort order until the resources are loaded, we need to load all the root resources first. + + //TODO: maybe its more performant to query each resource by type/id/source, since they are indexed already? + rootWrappedResourceModels := []models.ResourceBase{} + for resourceType, _ := range resourcePlaceholderListDictionary { + // resourcePlaceholderListDictionary contains top level resource types (eg. Encounter, Condition, etc) + + selectList := [][]interface{}{} + for ndx, _ := range resourcePlaceholderListDictionary[resourceType] { + selectList = append(selectList, []interface{}{ + resourcePlaceholderListDictionary[resourceType][ndx].UserID, + resourcePlaceholderListDictionary[resourceType][ndx].SourceID, + resourcePlaceholderListDictionary[resourceType][ndx].ResourceType, + resourcePlaceholderListDictionary[resourceType][ndx].ResourceID, + }) + } + + tableName, err := databaseModel.GetTableNameByResourceType(resourceType) + if err != nil { + return nil, totalElements, err + } + var tableWrappedResourceModels []models.ResourceBase + gr.GormClient. + Where("(user_id, source_id, source_resource_type, source_resource_id) IN ?", selectList). + Table(tableName). + Find(&tableWrappedResourceModels) + + //append these resources to the rootWrappedResourceModels list + rootWrappedResourceModels = append(rootWrappedResourceModels, tableWrappedResourceModels...) + } + + //sort + rootWrappedResourceModels = utils.SortResourceListByDate(rootWrappedResourceModels) + + //calculate total elements + totalElements = len(rootWrappedResourceModels) + + //paginate (by calculating window for the slice) + rootWrappedResourceModels = utils.PaginateResourceList(rootWrappedResourceModels, page, 20) //todo: replace size with pkg.ResourceListPageSize + + // Step 3b: now that we have the root resources, lets generate a dictionary of resource lists, keyed by resource type + resourceListDictionary := map[string][]*models.ResourceBase{} + for ndx, _ := range rootWrappedResourceModels { + resourceType := rootWrappedResourceModels[ndx].SourceResourceType + if _, ok := resourceListDictionary[resourceType]; !ok { + resourceListDictionary[resourceType] = []*models.ResourceBase{} + } + resourceListDictionary[resourceType] = append(resourceListDictionary[resourceType], &rootWrappedResourceModels[ndx]) + } + + // Step 4: return the populated resource list dictionary + return resourceListDictionary, totalElements, nil +} + +// We need to support the following types of graphs: +// - Medical History +// - AddressBook (contacts) +// - Medications +// - Billing Report +// edges are always "strongly connected", however "source" nodes (roots, like Condition or Encounter -- depending on ) are only one way. +// add an edge from every resource to its related resource. Keep in mind that FHIR resources may not contain reciprocal edges, so we ensure the graph is rooted by flipping any +// related resources that are "Condition" or "Encounter" +func (gr *GormRepository) PopulateGraphTypeReciprocalRelationships(graphType pkg.ResourceGraphType, relationships []models.RelatedResource) []models.RelatedResource { + reciprocalRelationships := []models.RelatedResource{} + + //prioritized lists of sources and sinks for the graph. We will use these to determine which resources are "root" nodes. + sources, sinks, _ := getSourcesAndSinksForGraphType(graphType) + + for _, relationship := range relationships { + + //calculate the + resourceAGraphSourceLevel := foundResourceGraphSource(relationship.ResourceBaseSourceResourceType, sources) + resourceBGraphSourceLevel := foundResourceGraphSource(relationship.RelatedResourceSourceResourceType, sources) + + resourceAGraphSinkLevel := foundResourceGraphSink(relationship.ResourceBaseSourceResourceType, sinks) + resourceBGraphSinkLevel := foundResourceGraphSink(relationship.RelatedResourceSourceResourceType, sinks) + + if resourceAGraphSourceLevel > -1 && resourceBGraphSourceLevel > -1 { + //handle the case where both resources are "sources" (eg. MedicalHistory - Condition or Encounter) + if resourceAGraphSourceLevel <= resourceBGraphSourceLevel { + //A is a higher priority than B, so we will add an edge from A to B + reciprocalRelationships = append(reciprocalRelationships, relationship) + } else { + //B is a higher priority than A, so we will add an edge from B to A (flipped relationship) + reciprocalRelationships = append(reciprocalRelationships, models.RelatedResource{ + ResourceBaseUserID: relationship.RelatedResourceUserID, + ResourceBaseSourceID: relationship.RelatedResourceSourceID, + ResourceBaseSourceResourceType: relationship.RelatedResourceSourceResourceType, + ResourceBaseSourceResourceID: relationship.RelatedResourceSourceResourceID, + RelatedResourceUserID: relationship.ResourceBaseUserID, + RelatedResourceSourceID: relationship.ResourceBaseSourceID, + RelatedResourceSourceResourceType: relationship.ResourceBaseSourceResourceType, + RelatedResourceSourceResourceID: relationship.ResourceBaseSourceResourceID, + }) + } + + } else if resourceAGraphSourceLevel > -1 || resourceBGraphSinkLevel > -1 { + //resource A is a Source, or resource B is a sink, normal A -> B relationship (edge) + reciprocalRelationships = append(reciprocalRelationships, relationship) + + } else if resourceBGraphSourceLevel > -1 || resourceAGraphSinkLevel > -1 { + //resource B is a Source, or resource A is a sink, create B -> A relationship (edge) + + reciprocalRelationships = append(reciprocalRelationships, models.RelatedResource{ + ResourceBaseUserID: relationship.RelatedResourceUserID, + ResourceBaseSourceID: relationship.RelatedResourceSourceID, + ResourceBaseSourceResourceType: relationship.RelatedResourceSourceResourceType, + ResourceBaseSourceResourceID: relationship.RelatedResourceSourceResourceID, + RelatedResourceUserID: relationship.ResourceBaseUserID, + RelatedResourceSourceID: relationship.ResourceBaseSourceID, + RelatedResourceSourceResourceType: relationship.ResourceBaseSourceResourceType, + RelatedResourceSourceResourceID: relationship.ResourceBaseSourceResourceID, + }) + + } else { + //this is a regular pair of resources, create reciprocal edges + + reciprocalRelationships = append(reciprocalRelationships, relationship) + + reciprocalRelationships = append(reciprocalRelationships, models.RelatedResource{ + ResourceBaseUserID: relationship.RelatedResourceUserID, + ResourceBaseSourceID: relationship.RelatedResourceSourceID, + ResourceBaseSourceResourceType: relationship.RelatedResourceSourceResourceType, + ResourceBaseSourceResourceID: relationship.RelatedResourceSourceResourceID, + RelatedResourceUserID: relationship.ResourceBaseUserID, + RelatedResourceSourceID: relationship.ResourceBaseSourceID, + RelatedResourceSourceResourceType: relationship.ResourceBaseSourceResourceType, + RelatedResourceSourceResourceID: relationship.ResourceBaseSourceResourceID, + }) + } + + } + + return reciprocalRelationships +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// +// Utilities +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +func getSourcesAndSinksForGraphType(graphType pkg.ResourceGraphType) ([][]string, [][]string, map[string]bool) { + var sources [][]string + var sinks [][]string + var sourceFlattenRelated map[string]bool + switch graphType { + case pkg.ResourceGraphTypeMedicalHistory: + sources = [][]string{ + {"condition", "composition"}, + {"encounter", "explanationofbenefit"}, + } + sinks = [][]string{ + {"location", "device", "organization", "practitioner", "medication", "patient", "coverage"}, //resources that are shared across multiple conditions + {"binary"}, + } + sourceFlattenRelated = map[string]bool{ + "encounter": true, + } + break + case pkg.ResourceGraphTypeAddressBook: + sources = [][]string{ + {"practitioner", "organization"}, + {"practitionerrole", "careteam", "location"}, + } + sinks = [][]string{ + {"condition", "composition", "explanationofbenefits"}, //resources that are shared across multiple practitioners + {"encounter", "medication", "patient"}, + } + sourceFlattenRelated = map[string]bool{} + } + return sources, sinks, sourceFlattenRelated +} + +// source resource types are resources that are at the root of the graph, nothing may reference them directly +// loop though the list of source resource types, and see if the checkResourceType is one of them +func foundResourceGraphSource(checkResourceType string, sourceResourceTypes [][]string) int { + found := -1 + for i, sourceResourceType := range sourceResourceTypes { + if slices.Contains(sourceResourceType, strings.ToLower(checkResourceType)) { + found = i + break + } + } + return found +} + +// sink resource types are the leaves of the graph, they must not reference anything else. (only be referenced) +func foundResourceGraphSink(checkResourceType string, sinkResourceTypes [][]string) int { + found := -1 + for i, sinkResourceType := range sinkResourceTypes { + if slices.Contains(sinkResourceType, strings.ToLower(checkResourceType)) { + found = i + break + } + } + return found +} + +// helper function for GetResourceGraph, creating a "hash" for the resource +func resourceVertexId(resourcePlaceholder *VertexResourcePlaceholder) string { + return resourceKeysVertexId(resourcePlaceholder.SourceID, resourcePlaceholder.ResourceType, resourcePlaceholder.ResourceID) +} +func resourceKeysVertexId(sourceId string, resourceType string, resourceId string) string { + return strings.ToLower(fmt.Sprintf("%s/%s/%s", sourceId, resourceType, resourceId)) +} diff --git a/backend/pkg/database/gorm_repository_query.go b/backend/pkg/database/gorm_repository_query.go new file mode 100644 index 00000000..caa846e9 --- /dev/null +++ b/backend/pkg/database/gorm_repository_query.go @@ -0,0 +1,684 @@ +package database + +import ( + "context" + "fmt" + "strconv" + "strings" + "time" + + "github.com/fastenhealth/fasten-onprem/backend/pkg/models" + databaseModel "github.com/fastenhealth/fasten-onprem/backend/pkg/models/database" + "github.com/iancoleman/strcase" + "github.com/samber/lo" + "golang.org/x/exp/maps" + "golang.org/x/exp/slices" + "gorm.io/gorm" +) + +type SearchParameterType string + +const ( + //simple types + SearchParameterTypeNumber SearchParameterType = "number" + SearchParameterTypeDate SearchParameterType = "date" + SearchParameterTypeUri SearchParameterType = "uri" + SearchParameterTypeKeyword SearchParameterType = "keyword" //this is a literal/string primitive. + + //complex types + SearchParameterTypeString SearchParameterType = "string" + SearchParameterTypeToken SearchParameterType = "token" + SearchParameterTypeReference SearchParameterType = "reference" + SearchParameterTypeQuantity SearchParameterType = "quantity" + SearchParameterTypeComposite SearchParameterType = "composite" + SearchParameterTypeSpecial SearchParameterType = "special" +) + +const TABLE_ALIAS = "fhir" + +// Allows users to use SearchParameters to query resources +// Can generate simple or complex queries, depending on the SearchParameter type: +// +// eg. Simple +// +// eg. Complex +// SELECT fhir.* +// FROM fhir_observation as fhir, json_each(fhir.code) as codeJson +// WHERE ( +// +// (codeJson.value ->> '$.code' = "29463-7" AND codeJson.value ->> '$.system' = "http://loinc.org") +// OR (codeJson.value ->> '$.code' = "3141-9" AND codeJson.value ->> '$.system' = "http://loinc.org") +// OR (codeJson.value ->> '$.code' = "27113001" AND codeJson.value ->> '$.system' = "http://snomed.info/sct") +// +// ) +// AND (user_id = "6efcd7c5-3f29-4f0d-926d-a66ff68bbfc2") +// GROUP BY `fhir`.`id` +func (gr *GormRepository) QueryResources(ctx context.Context, query models.QueryResource) (interface{}, error) { + + sqlQuery, err := gr.sqlQueryResources(ctx, query) + if err != nil { + return nil, err + } + + if query.Aggregations != nil && (query.Aggregations.GroupBy != nil || query.Aggregations.CountBy != nil) { + results := []map[string]interface{}{} + clientResp := sqlQuery.Find(&results) + return results, clientResp.Error + + } else { + results := []models.ResourceBase{} + clientResp := sqlQuery.Find(&results) + return results, clientResp.Error + } + +} + +// see QueryResources +// this function has all the logic, but should only be called directly for testing +func (gr *GormRepository) sqlQueryResources(ctx context.Context, query models.QueryResource) (*gorm.DB, error) { + //todo, until we actually parse the select statement, we will just return all resources based on "from" + + //SECURITY: this is required to ensure that only valid resource types are queried (since it's controlled by the user) + if !slices.Contains(databaseModel.GetAllowedResourceTypes(), query.From) { + return nil, fmt.Errorf("invalid resource type %s", query.From) + } + + if queryValidate := query.Validate(); queryValidate != nil { + return nil, queryValidate + } + + //find the associated Gorm Model for this query + queryModel, err := databaseModel.NewFhirResourceModelByType(query.From) + if err != nil { + return nil, err + } + + //SECURITY: this would be unsafe as the user controls the query.From value, however we've validated it is a valid resource type above + fromClauses := []string{fmt.Sprintf("%s as %s", strcase.ToSnake("Fhir"+query.From), TABLE_ALIAS)} + whereClauses := []string{} + whereNamedParameters := map[string]interface{}{} + + //find the FHIR search types associated with each where clause. Any unknown parameters will be ignored. + searchCodeToTypeLookup := queryModel.GetSearchParameters() + for searchParamCodeWithModifier, searchParamCodeValueOrValuesWithPrefix := range query.Where { + searchParameter, err := ProcessSearchParameter(searchParamCodeWithModifier, searchCodeToTypeLookup) + if err != nil { + return nil, err + } + + searchParameterValueOperatorTree, err := ProcessSearchParameterValueIntoOperatorTree(searchParameter, searchParamCodeValueOrValuesWithPrefix) + if err != nil { + return nil, err + } + + for ndxANDlevel, searchParameterValueOperatorAND := range searchParameterValueOperatorTree { + whereORClauses := []string{} + for ndxORlevel, searchParameterValueOperatorOR := range searchParameterValueOperatorAND { + whereORClause, clauseNamedParameters, err := SearchCodeToWhereClause(searchParameter, searchParameterValueOperatorOR, fmt.Sprintf("%d_%d", ndxANDlevel, ndxORlevel)) + if err != nil { + return nil, err + } + //add generated where clause to the list, and add the named parameters to the map of existing named parameters + whereORClauses = append(whereORClauses, whereORClause) + maps.Copy(whereNamedParameters, clauseNamedParameters) + } + whereClauses = append(whereClauses, fmt.Sprintf("(%s)", strings.Join(whereORClauses, " OR "))) + } + + fromClause, err := SearchCodeToFromClause(searchParameter) + if err != nil { + return nil, err + } + if len(fromClause) > 0 { + fromClauses = append(fromClauses, fromClause) + } + } + + //SECURITY: for safety, we will always add/override the current user_id to the where clause. This is to ensure that the user doesnt attempt to override this value in their own where clause + currentUser, currentUserErr := gr.GetCurrentUser(ctx) + if currentUserErr != nil { + return nil, currentUserErr + } + whereNamedParameters["user_id"] = currentUser.ID.String() + whereClauses = append(whereClauses, "(user_id = @user_id)") + + //defaults + selectClauses := []string{fmt.Sprintf("%s.*", TABLE_ALIAS)} + groupClause := fmt.Sprintf("%s.id", TABLE_ALIAS) + orderClause := fmt.Sprintf("%s.sort_date DESC", TABLE_ALIAS) + if query.Aggregations != nil { + + //Handle Aggregations + + if query.Aggregations.CountBy != nil { + //populate the group by and order by clause with the count by values + query.Aggregations.OrderBy = &models.QueryResourceAggregation{ + Field: "*", + Function: "count", + } + query.Aggregations.GroupBy = query.Aggregations.CountBy + + if query.Aggregations.GroupBy.Field == "*" { + //we need to get the count of all resources, so we need to remove the group by clause and replace it by + // `source_resource_type` which will be the same for all resources + query.Aggregations.GroupBy.Field = "source_resource_type" + } + } + + //process order by clause + if query.Aggregations.OrderBy != nil { + orderAsc := true //default to ascending, switch to desc if parameter is a date type. + if !(query.Aggregations.OrderBy.Field == "*") { + orderAggregationParam, err := ProcessAggregationParameter(*query.Aggregations.OrderBy, searchCodeToTypeLookup) + if err != nil { + return nil, err + } + orderAggregationFromClause, err := SearchCodeToFromClause(orderAggregationParam.SearchParameter) + if err != nil { + return nil, err + } + fromClauses = append(fromClauses, orderAggregationFromClause) + + //if the order by is a date type, we need to order by DESC (most recent first) + if orderAggregationParam.Type == SearchParameterTypeDate { + orderAsc = false + } + + orderClause = AggregationParameterToClause(orderAggregationParam) + if orderAsc { + orderClause = fmt.Sprintf("%s ASC", orderClause) + } else { + orderClause = fmt.Sprintf("%s DESC", orderClause) + } + } else { + orderClause = fmt.Sprintf("%s(%s) DESC", query.Aggregations.OrderBy.Function, query.Aggregations.OrderBy.Field) + } + } + + //process group by clause + if query.Aggregations.GroupBy != nil { + groupAggregationParam, err := ProcessAggregationParameter(*query.Aggregations.GroupBy, searchCodeToTypeLookup) + if err != nil { + return nil, err + } + groupAggregationFromClause, err := SearchCodeToFromClause(groupAggregationParam.SearchParameter) + if err != nil { + return nil, err + } + fromClauses = append(fromClauses, groupAggregationFromClause) + + groupClause = AggregationParameterToClause(groupAggregationParam) + selectClauses = []string{ + fmt.Sprintf("%s as %s", groupClause, "label"), + } + + if query.Aggregations.OrderBy == nil || query.Aggregations.OrderBy.Field == "*" { + selectClauses = append(selectClauses, fmt.Sprintf("%s as %s", "count(*)", "value")) + orderClause = fmt.Sprintf("%s DESC", "count(*)") + } else { + //use the orderBy aggregation as the value + orderAggregationParam, err := ProcessAggregationParameter(*query.Aggregations.OrderBy, searchCodeToTypeLookup) + if err != nil { + return nil, err + } + + orderSelectClause := AggregationParameterToClause(orderAggregationParam) + selectClauses = append(selectClauses, fmt.Sprintf("%s as %s", orderSelectClause, "value")) + } + + } + } + + //ensure Where and From clauses are unique + whereClauses = lo.Uniq(whereClauses) + whereClauses = lo.Compact(whereClauses) + fromClauses = lo.Uniq(fromClauses) + fromClauses = lo.Compact(fromClauses) + + sqlQuery := gr.GormClient.WithContext(ctx). + Select(strings.Join(selectClauses, ", ")). + Where(strings.Join(whereClauses, " AND "), whereNamedParameters). + Group(groupClause). + Order(orderClause). + Table(strings.Join(fromClauses, ", ")) + + //add limit and offset clauses if present + if query.Limit != nil { + sqlQuery = sqlQuery.Limit(*query.Limit) + } + if query.Offset != nil { + sqlQuery = sqlQuery.Offset(*query.Offset) + } + + return sqlQuery, nil +} + +/// INTERNAL functionality. These functions are exported for testing, but are not available in the Interface +//TODO: dont export these, instead use casting to convert the interface to the GormRepository struct, then call ehese functions directly + +type SearchParameter struct { + Name string + Type SearchParameterType + Modifier string +} + +type AggregationParameter struct { + SearchParameter + Function string //count, sum, avg, min, max, etc +} + +// Lists in the SearchParameterValueOperatorTree are AND'd together, and items within each SearchParameterValueOperatorTree list are OR'd together +// For example, the following would be AND'd together, and then OR'd with the next SearchParameterValueOperatorTree +// +// { +// {SearchParameterValue{Value: "foo"}, SearchParameterValue{Value: "bar"}} +// {SearchParameterValue{Value: "baz"}}, +// } +// +// This would result in the following SQL: +// +// (value = "foo" OR value = "bar") AND (value = "baz") +type SearchParameterValueOperatorTree [][]SearchParameterValue + +type SearchParameterValue struct { + Prefix string + Value interface{} + SecondaryValues map[string]interface{} +} + +// SearchParameters are made up of parameter names and modifiers. For example, "name" and "name:exact" are both valid search parameters +// This function will parse the searchCodeWithModifier and return the SearchParameter +func ProcessSearchParameter(searchCodeWithModifier string, searchParamTypeLookup map[string]string) (SearchParameter, error) { + searchParameter := SearchParameter{} + + //determine the searchCode searchCodeModifier + //TODO: this is only applicable to string, token, reference and uri type (however unknown names & modifiers are ignored) + if searchCodeParts := strings.SplitN(searchCodeWithModifier, ":", 2); len(searchCodeParts) == 2 { + searchParameter.Name = searchCodeParts[0] + searchParameter.Modifier = searchCodeParts[1] + } else { + searchParameter.Name = searchCodeParts[0] + searchParameter.Modifier = "" + } + + //next, determine the searchCodeType for this Resource (or throw an error if it is unknown) + searchParamTypeStr, searchParamTypeOk := searchParamTypeLookup[searchParameter.Name] + if !searchParamTypeOk { + return searchParameter, fmt.Errorf("unknown search parameter: %s", searchParameter.Name) + } else { + searchParameter.Type = SearchParameterType(searchParamTypeStr) + } + + //if this is a token search parameter with a modifier, we need to throw an error + if searchParameter.Type == SearchParameterTypeToken && len(searchParameter.Modifier) > 0 { + return searchParameter, fmt.Errorf("token search parameter %s cannot have a modifier", searchParameter.Name) + } + + return searchParameter, nil +} + +// ProcessSearchParameterValueIntoOperatorTree searchParamCodeValueOrValuesWithPrefix may be a single string, or a list of strings +// each string, may itself be a concatenation of multiple values, seperated by a comma +// so we need to do three stages of processing: +// 1. split the searchParamCodeValueOrValuesWithPrefix into a list of strings +// 2. split each string into a list of values +// 3. use the ProcessSearchParameterValue function to split each value into a list of prefixes and values +// these are then stored in a multidimentional list of SearchParameterValueOperatorTree +// top level is AND'd together, and each item within the lists are OR'd together +// +// For example, searchParamCodeValueOrValuesWithPrefix may be: +// +// "code": "29463-7,3141-9,27113001" +// "code": ["le29463-7", "gt3141-9", "27113001"] +func ProcessSearchParameterValueIntoOperatorTree(searchParameter SearchParameter, searchParamCodeValueOrValuesWithPrefix interface{}) (SearchParameterValueOperatorTree, error) { + + searchParamCodeValuesWithPrefix := []string{} + switch v := searchParamCodeValueOrValuesWithPrefix.(type) { + case string: + searchParamCodeValuesWithPrefix = append(searchParamCodeValuesWithPrefix, v) + break + case []string: + searchParamCodeValuesWithPrefix = v + break + default: + return nil, fmt.Errorf("invalid search parameter value type %T, must be a string or a list of strings (%s=%v)", v, searchParameter.Name, searchParamCodeValueOrValuesWithPrefix) + } + + //generate a SearchParameterValueOperatorTree, because we may have multiple OR and AND operators for the same search parameter. + //ie, (code = "foo" OR code = "bar") AND (code = "baz") + searchParamCodeValueOperatorTree := SearchParameterValueOperatorTree{} + + //loop through each searchParamCodeValueWithPrefix, and split it into a list of values (comma seperated) + for _, searchParamCodeValuesInANDClause := range searchParamCodeValuesWithPrefix { + searchParameterValuesOperatorOR := []SearchParameterValue{} + for _, searchParamCodeValueInORClause := range strings.Split(searchParamCodeValuesInANDClause, ",") { + searchParameterValue, err := ProcessSearchParameterValue(searchParameter, searchParamCodeValueInORClause) + if err != nil { + return nil, err + } + searchParameterValuesOperatorOR = append(searchParameterValuesOperatorOR, searchParameterValue) + } + searchParamCodeValueOperatorTree = append(searchParamCodeValueOperatorTree, searchParameterValuesOperatorOR) + } + return searchParamCodeValueOperatorTree, nil +} + +// ProcessSearchParameterValue searchValueWithPrefix may or may not have a prefix which needs to be parsed +// this function will parse the searchValueWithPrefix and return the SearchParameterValue +// for example, "eq2018-01-01" would return a SearchParameterValue with a prefix of "eq" and a value of "2018-01-01" +// and "2018-01-01" would return a SearchParameterValue with a value of "2018-01-01" +// +// some query types, like token, quantity and reference, have secondary values that need to be parsed +// for example, code="http://loinc.org|29463-7" would return a SearchParameterValue with a value of "29463-7" and a secondary value of { "codeSystem": "http://loinc.org" } +func ProcessSearchParameterValue(searchParameter SearchParameter, searchValueWithPrefix string) (SearchParameterValue, error) { + searchParameterValue := SearchParameterValue{ + SecondaryValues: map[string]interface{}{}, + Value: searchValueWithPrefix, + } + if (searchParameter.Type == SearchParameterTypeString || searchParameter.Type == SearchParameterTypeUri || searchParameter.Type == SearchParameterTypeKeyword) && len(searchParameterValue.Value.(string)) == 0 { + return searchParameterValue, fmt.Errorf("invalid search parameter value: (%s=%s)", searchParameter.Name, searchParameterValue.Value) + } + + //certain types (like number,date and quanitty have a prefix that needs to be parsed) + if searchParameter.Type == SearchParameterTypeNumber || searchParameter.Type == SearchParameterTypeDate || searchParameter.Type == SearchParameterTypeQuantity { + //loop though all known/allowed prefixes, and determine if the searchValueWithPrefix starts with one of them + allowedPrefixes := []string{"eq", "ne", "gt", "lt", "ge", "le", "sa", "eb", "ap"} + for _, allowedPrefix := range allowedPrefixes { + if strings.HasPrefix(searchValueWithPrefix, allowedPrefix) { + searchParameterValue.Prefix = allowedPrefix + searchParameterValue.Value = strings.TrimPrefix(searchValueWithPrefix, allowedPrefix) + break + } + } + } + + //certain Types (like token, quantity, reference) have secondary query values that need to be parsed (delimited by "|") value + if searchParameter.Type == SearchParameterTypeQuantity { + if searchParameterValueParts := strings.SplitN(searchParameterValue.Value.(string), "|", 3); len(searchParameterValueParts) == 1 { + searchParameterValue.Value = searchParameterValueParts[0] + } else if len(searchParameterValueParts) == 2 { + searchParameterValue.Value = searchParameterValueParts[0] + if len(searchParameterValueParts[1]) > 0 { + searchParameterValue.SecondaryValues[searchParameter.Name+"System"] = searchParameterValueParts[1] + } + } else if len(searchParameterValueParts) == 3 { + searchParameterValue.Value = searchParameterValueParts[0] + if len(searchParameterValueParts[1]) > 0 { + searchParameterValue.SecondaryValues[searchParameter.Name+"System"] = searchParameterValueParts[1] + } + if len(searchParameterValueParts[2]) > 0 { + searchParameterValue.SecondaryValues[searchParameter.Name+"Code"] = searchParameterValueParts[2] + } + } + } else if searchParameter.Type == SearchParameterTypeToken { + if searchParameterValueParts := strings.SplitN(searchParameterValue.Value.(string), "|", 2); len(searchParameterValueParts) == 1 { + searchParameterValue.Value = searchParameterValueParts[0] //this is a code + if len(searchParameterValue.Value.(string)) == 0 { + return searchParameterValue, fmt.Errorf("invalid search parameter value: (%s=%s)", searchParameter.Name, searchParameterValue.Value) + } + } else if len(searchParameterValueParts) == 2 { + //if theres 2 parts, first is always system, second is always the code. Either one may be emty. If both are emty this is invalid. + searchParameterValue.SecondaryValues[searchParameter.Name+"System"] = searchParameterValueParts[0] + searchParameterValue.Value = searchParameterValueParts[1] + if len(searchParameterValueParts[0]) == 0 && len(searchParameterValueParts[1]) == 0 { + return searchParameterValue, fmt.Errorf("invalid search parameter value: (%s=%s)", searchParameter.Name, searchParameterValue.Value) + } + } + } else if searchParameter.Type == SearchParameterTypeReference { + //todo + return searchParameterValue, fmt.Errorf("search parameter type not yet implemented: %s", searchParameter.Type) + } + + //certain types (Quantity and Number) need to be converted to Float64 + if searchParameter.Type == SearchParameterTypeQuantity || searchParameter.Type == SearchParameterTypeNumber { + if conv, err := strconv.ParseFloat(searchParameterValue.Value.(string), 64); err == nil { + searchParameterValue.Value = conv + } else { + return searchParameterValue, fmt.Errorf("invalid search parameter value (NaN): (%s=%s)", searchParameter.Name, searchParameterValue.Value) + } + } else if searchParameter.Type == SearchParameterTypeDate { + //other types (like date) need to be converted to a time.Time + if conv, err := time.Parse(time.RFC3339, searchParameterValue.Value.(string)); err == nil { + searchParameterValue.Value = conv + } else { + // fallback to parsing just a date (without time) + if conv, err := time.Parse("2006-01-02", searchParameterValue.Value.(string)); err == nil { + searchParameterValue.Value = conv + } else { + return searchParameterValue, fmt.Errorf("invalid search parameter value (invalid date): (%s=%s)", searchParameter.Name, searchParameterValue.Value) + } + } + } + return searchParameterValue, nil +} + +func NamedParameterWithSuffix(parameterName string, suffix string) string { + return fmt.Sprintf("%s_%s", parameterName, suffix) +} + +// SearchCodeToWhereClause converts a searchCode and searchCodeValue to a where clause and a map of named parameters +func SearchCodeToWhereClause(searchParam SearchParameter, searchParamValue SearchParameterValue, namedParameterSuffix string) (string, map[string]interface{}, error) { + + //add named parameters to the lookup map. Basically, this is a map of all the named parameters that will be used in the where clause we're generating + searchClauseNamedParams := map[string]interface{}{ + NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix): searchParamValue.Value, + } + for k, v := range searchParamValue.SecondaryValues { + searchClauseNamedParams[NamedParameterWithSuffix(k, namedParameterSuffix)] = v + } + + //parse the searchCode and searchCodeValue to determine the correct where clause + //////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + //SIMPLE SEARCH PARAMETERS + //////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + switch searchParam.Type { + case SearchParameterTypeNumber, SearchParameterTypeDate: + + if searchParamValue.Prefix == "" || searchParamValue.Prefix == "eq" { + return fmt.Sprintf("(%s = @%s)", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)), searchClauseNamedParams, nil + } else if searchParamValue.Prefix == "lt" || searchParamValue.Prefix == "eb" { + return fmt.Sprintf("(%s < @%s)", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)), searchClauseNamedParams, nil + } else if searchParamValue.Prefix == "le" { + return fmt.Sprintf("(%s <= @%s)", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)), searchClauseNamedParams, nil + } else if searchParamValue.Prefix == "gt" || searchParamValue.Prefix == "sa" { + return fmt.Sprintf("(%s > @%s)", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)), searchClauseNamedParams, nil + } else if searchParamValue.Prefix == "ge" { + return fmt.Sprintf("(%s >= @%s)", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)), searchClauseNamedParams, nil + } else if searchParamValue.Prefix == "ne" { + return fmt.Sprintf("(%s <> @%s)", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)), searchClauseNamedParams, nil + } else if searchParam.Modifier == "ap" { + return "", nil, fmt.Errorf("search modifier 'ap' not supported for search parameter type %s (%s=%s)", searchParam.Type, searchParam.Name, searchParamValue.Value) + } + + case SearchParameterTypeUri: + if searchParam.Modifier == "" { + return fmt.Sprintf("(%s = @%s)", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)), searchClauseNamedParams, nil + } else if searchParam.Modifier == "below" { + searchClauseNamedParams[NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)] = searchParamValue.Value.(string) + "%" // column starts with "http://example.com" + return fmt.Sprintf("(%s LIKE @%s)", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)), searchClauseNamedParams, nil + } else if searchParam.Modifier == "above" { + return "", nil, fmt.Errorf("search modifier 'above' not supported for search parameter type %s (%s=%s)", searchParam.Type, searchParam.Name, searchParamValue.Value) + } + //////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + //COMPLEX SEARCH PARAMETERS + //////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + case SearchParameterTypeString: + if searchParam.Modifier == "" { + searchClauseNamedParams[NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)] = searchParamValue.Value.(string) + "%" // "eve" matches "Eve" and "Evelyn" + return fmt.Sprintf("(%sJson.value LIKE @%s)", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)), searchClauseNamedParams, nil + } else if searchParam.Modifier == "exact" { + // "eve" matches "eve" (not "Eve" or "EVE") + return fmt.Sprintf("(%sJson.value = @%s)", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)), searchClauseNamedParams, nil + } else if searchParam.Modifier == "contains" { + searchClauseNamedParams[NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)] = "%" + searchParamValue.Value.(string) + "%" // "eve" matches "Eve", "Evelyn" and "Severine" + return fmt.Sprintf("(%sJson.value LIKE @%s)", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)), searchClauseNamedParams, nil + } + case SearchParameterTypeQuantity: + + //setup the clause + var clause string + if searchParamValue.Prefix == "" || searchParamValue.Prefix == "eq" { + //TODO: when no prefix is specified, we need to search using BETWEEN (+/- 0.05) + clause = fmt.Sprintf("%sJson.value ->> '$.value' = @%s", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)) + } else if searchParamValue.Prefix == "lt" || searchParamValue.Prefix == "eb" { + clause = fmt.Sprintf("%sJson.value ->> '$.value' < @%s", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)) + } else if searchParamValue.Prefix == "le" { + clause = fmt.Sprintf("%sJson.value ->> '$.value' <= @%s", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)) + } else if searchParamValue.Prefix == "gt" || searchParamValue.Prefix == "sa" { + clause = fmt.Sprintf("%sJson.value ->> '$.value' > @%s", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)) + } else if searchParamValue.Prefix == "ge" { + clause = fmt.Sprintf("%sJson.value ->> '$.value' >= @%s", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)) + } else if searchParamValue.Prefix == "ne" { + clause = fmt.Sprintf("%sJson.value ->> '$.value' <> @%s", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)) + } else if searchParamValue.Prefix == "ap" { + return "", nil, fmt.Errorf("search modifier 'ap' not supported for search parameter type %s (%s=%s)", searchParam.Type, searchParam.Name, searchParamValue.Value) + } + + //append the code and/or system clauses (if required) + //this looks like unnecessary code, however its required to ensure consistent tests + allowedSecondaryKeys := []string{"code", "system"} + + for _, k := range allowedSecondaryKeys { + namedParameterKey := fmt.Sprintf("%s%s", searchParam.Name, strings.Title(k)) + if _, ok := searchParamValue.SecondaryValues[namedParameterKey]; ok { + clause += fmt.Sprintf(` AND %sJson.value ->> '$.%s' = @%s`, searchParam.Name, k, NamedParameterWithSuffix(namedParameterKey, namedParameterSuffix)) + } + } + + return fmt.Sprintf("(%s)", clause), searchClauseNamedParams, nil + case SearchParameterTypeToken: + //unfortunately we don't know the datatype of this token, however, we're already preprocessed this field in backend/pkg/models/database/generate.go + // all of the following datatypes will be stored in a JSON object with the following structure: + // { + // "system": "http://example.com", + // "code": "example-code", + // "text": "example display" + // } + // primitive datatypes will not have a system or text, just a code (e.g. "code": true or "code": "http://www.example.com") + // + // - Coding - https://hl7.org/fhir/r4/datatypes.html#Coding + // - Identifier - https://hl7.org/fhir/r4/datatypes.html#Identifier + // - ContactPoint - https://hl7.org/fhir/r4/datatypes.html#ContactPoint + // - CodeableConcept - https://hl7.org/fhir/r4/datatypes.html#CodeableConcept + // - code - https://hl7.org/fhir/r4/datatypes.html#code + // - boolean - https://hl7.org/fhir/r4/datatypes.html#boolean + // - uri - https://hl7.org/fhir/r4/datatypes.html#uri + // - string - https://hl7.org/fhir/r4/datatypes.html#string + + //TODO: support ":text" modifier + + //setup the clause + clause := []string{} + if searchParamValue.Value.(string) != "" { + clause = append(clause, fmt.Sprintf("%sJson.value ->> '$.code' = @%s", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix))) + } + + //append the code and/or system clauses (if required) + //this looks like unnecessary code, however its required to ensure consistent tests + allowedSecondaryKeys := []string{"system"} + + for _, k := range allowedSecondaryKeys { + namedParameterKey := fmt.Sprintf("%s%s", searchParam.Name, strings.Title(k)) + if _, ok := searchParamValue.SecondaryValues[namedParameterKey]; ok { + clause = append(clause, fmt.Sprintf(`%sJson.value ->> '$.%s' = @%s`, searchParam.Name, k, NamedParameterWithSuffix(namedParameterKey, namedParameterSuffix))) + } + } + return fmt.Sprintf("(%s)", strings.Join(clause, " AND ")), searchClauseNamedParams, nil + + case SearchParameterTypeKeyword: + //setup the clause + return fmt.Sprintf("(%s = @%s)", searchParam.Name, NamedParameterWithSuffix(searchParam.Name, namedParameterSuffix)), searchClauseNamedParams, nil + case SearchParameterTypeReference: + return "", nil, fmt.Errorf("search parameter type %s not supported", searchParam.Type) + } + return "", searchClauseNamedParams, nil +} + +func SearchCodeToFromClause(searchParam SearchParameter) (string, error) { + //complex search parameters (e.g. token, reference, quantities, special) require the use of `json_*` FROM clauses + + //////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + //COMPLEX SEARCH PARAMETERS + //////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + switch searchParam.Type { + case SearchParameterTypeQuantity, SearchParameterTypeToken, SearchParameterTypeString: + //setup the clause + return fmt.Sprintf("json_each(%s.%s) as %sJson", TABLE_ALIAS, searchParam.Name, searchParam.Name), nil + } + return "", nil +} + +func AggregationParameterToClause(aggParameter AggregationParameter) string { + var clause string + + switch aggParameter.Type { + case SearchParameterTypeQuantity, SearchParameterTypeString: + //setup the clause + clause = fmt.Sprintf("(%sJson.value ->> '$.%s')", aggParameter.Name, aggParameter.Modifier) + case SearchParameterTypeToken: + //modifier is optional for token types. + if aggParameter.Modifier != "" { + clause = fmt.Sprintf("(%sJson.value ->> '$.%s')", aggParameter.Name, aggParameter.Modifier) + } else { + //if no modifier is specified, use the system and code to generate the clause + //((codeJson.value ->> '$.system') || '|' || (codeJson.value ->> '$.code')) + clause = fmt.Sprintf("((%sJson.value ->> '$.system') || '|' || (%sJson.value ->> '$.code'))", aggParameter.Name, aggParameter.Name) + } + + default: + clause = fmt.Sprintf("%s.%s", TABLE_ALIAS, aggParameter.Name) + } + + if len(aggParameter.Function) > 0 { + clause = fmt.Sprintf("%s(%s)", aggParameter.Function, clause) + } + return clause +} + +// ProcessAggregationParameter processes the aggregation parameters which are fields with optional properties: +// Fields that are primitive types (number, uri) must not have any property specified: +// eg. `probability` +// +// Fields that are complex types (token, quantity) must have a property specified: +// eg. `identifier:code` +// +// if the a property is specified, its set as the modifier, and used when generating the SQL query groupBy, orderBy, etc clause +func ProcessAggregationParameter(aggregationFieldWithFn models.QueryResourceAggregation, searchParamTypeLookup map[string]string) (AggregationParameter, error) { + aggregationParameter := AggregationParameter{ + SearchParameter: SearchParameter{}, + Function: aggregationFieldWithFn.Function, + } + + //determine the searchCode searchCodeModifier + //TODO: this is only applicable to string, token, reference and uri type (however unknown names & modifiers are ignored) + if aggregationFieldParts := strings.SplitN(aggregationFieldWithFn.Field, ":", 2); len(aggregationFieldParts) == 2 { + aggregationParameter.Name = aggregationFieldParts[0] + aggregationParameter.Modifier = aggregationFieldParts[1] + } else { + aggregationParameter.Name = aggregationFieldParts[0] + aggregationParameter.Modifier = "" + } + + //next, determine the searchCodeType for this Resource (or throw an error if it is unknown) + searchParamTypeStr, searchParamTypeOk := searchParamTypeLookup[aggregationParameter.Name] + if !searchParamTypeOk { + return aggregationParameter, fmt.Errorf("unknown search parameter in aggregation: %s", aggregationParameter.Name) + } else { + aggregationParameter.Type = SearchParameterType(searchParamTypeStr) + } + + //primitive types should not have a modifier, we need to throw an error + if aggregationParameter.Type == SearchParameterTypeNumber || aggregationParameter.Type == SearchParameterTypeUri || aggregationParameter.Type == SearchParameterTypeKeyword || aggregationParameter.Type == SearchParameterTypeDate { + if len(aggregationParameter.Modifier) > 0 { + return aggregationParameter, fmt.Errorf("primitive aggregation parameter %s cannot have a property (%s)", aggregationParameter.Name, aggregationParameter.Modifier) + } + } else if aggregationParameter.Type == SearchParameterTypeToken { + //modifier is optional for token types + } else { + //complex types must have a modifier + if len(aggregationParameter.Modifier) == 0 { + return aggregationParameter, fmt.Errorf("complex aggregation parameter %s must have a property", aggregationParameter.Name) + } + } + return aggregationParameter, nil +} diff --git a/backend/pkg/database/gorm_repository_query_sql_test.go b/backend/pkg/database/gorm_repository_query_sql_test.go new file mode 100644 index 00000000..85ae9dac --- /dev/null +++ b/backend/pkg/database/gorm_repository_query_sql_test.go @@ -0,0 +1,519 @@ +package database + +import ( + "context" + "fmt" + "io/ioutil" + "log" + "os" + "strings" + "testing" + + "github.com/fastenhealth/fasten-onprem/backend/pkg" + mock_config "github.com/fastenhealth/fasten-onprem/backend/pkg/config/mock" + "github.com/fastenhealth/fasten-onprem/backend/pkg/event_bus" + "github.com/fastenhealth/fasten-onprem/backend/pkg/models" + "github.com/golang/mock/gomock" + "github.com/sirupsen/logrus" + "github.com/stretchr/testify/require" + "github.com/stretchr/testify/suite" + "gorm.io/gorm" +) + +// Define the suite, and absorb the built-in basic suite +// functionality from testify - including a T() method which +// returns the current testing context +type RepositorySqlTestSuite struct { + suite.Suite + MockCtrl *gomock.Controller + TestDatabase *os.File + + TestRepository DatabaseRepository +} + +// BeforeTest has a function to be executed right before the test starts and receives the suite and test names as input +func (suite *RepositorySqlTestSuite) BeforeTest(suiteName, testName string) { + suite.MockCtrl = gomock.NewController(suite.T()) + + dbFile, err := ioutil.TempFile("", fmt.Sprintf("%s.*.db", testName)) + if err != nil { + log.Fatal(err) + } + suite.TestDatabase = dbFile + + fakeConfig := mock_config.NewMockInterface(suite.MockCtrl) + fakeConfig.EXPECT().GetString("database.location").Return(suite.TestDatabase.Name()).AnyTimes() + fakeConfig.EXPECT().GetString("database.type").Return("sqlite").AnyTimes() + fakeConfig.EXPECT().GetString("log.level").Return("INFO").AnyTimes() + dbRepo, err := NewRepository(fakeConfig, logrus.WithField("test", suite.T().Name()), event_bus.NewNoopEventBusServer()) + require.NoError(suite.T(), err) + suite.TestRepository = dbRepo + userModel := &models.User{ + Username: "test_username", + Password: "testpassword", + Email: "test@test.com", + } + err = suite.TestRepository.CreateUser(context.Background(), userModel) + require.NoError(suite.T(), err) + +} + +// AfterTest has a function to be executed right after the test finishes and receives the suite and test names as input +func (suite *RepositorySqlTestSuite) AfterTest(suiteName, testName string) { + suite.MockCtrl.Finish() + os.Remove(suite.TestDatabase.Name()) +} + +// In order for 'go test' to run this suite, we need to create +// a normal test function and pass our suite to suite.Run +func TestRepositorySqlTestSuite(t *testing.T) { + suite.Run(t, new(RepositorySqlTestSuite)) + +} + +func (suite *RepositorySqlTestSuite) TestQueryResources_SQL() { + //setup + sqliteRepo := suite.TestRepository.(*GormRepository) + sqliteRepo.GormClient = sqliteRepo.GormClient.Session(&gorm.Session{DryRun: true}) + + //test + authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username") + + sqlQuery, err := sqliteRepo.sqlQueryResources(authContext, models.QueryResource{ + Select: []string{}, + Where: map[string]interface{}{ + "code": "test_code", + }, + From: "Observation", + }) + require.NoError(suite.T(), err) + var results []map[string]interface{} + statement := sqlQuery.Find(&results).Statement + sqlString := statement.SQL.String() + sqlParams := statement.Vars + + //assert + require.NoError(suite.T(), err) + require.Equal(suite.T(), + strings.Join([]string{ + "SELECT fhir.*", + "FROM fhir_observation as fhir, json_each(fhir.code) as codeJson", + "WHERE ((codeJson.value ->> '$.code' = ?)) AND (user_id = ?)", + "GROUP BY `fhir`.`id`", + "ORDER BY fhir.sort_date DESC", + }, " "), + sqlString) + require.Equal(suite.T(), sqlParams, []interface{}{ + "test_code", "00000000-0000-0000-0000-000000000000", + }) +} + +func (suite *RepositorySqlTestSuite) TestQueryResources_SQL_WithMultipleWhereConditions() { + //setup + sqliteRepo := suite.TestRepository.(*GormRepository) + sqliteRepo.GormClient = sqliteRepo.GormClient.Session(&gorm.Session{DryRun: true}) + + //test + authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username") + + sqlQuery, err := sqliteRepo.sqlQueryResources(authContext, models.QueryResource{ + Select: []string{}, + Where: map[string]interface{}{ + "code": "test_code", + "category": "12345", + }, + From: "Observation", + }) + require.NoError(suite.T(), err) + var results []map[string]interface{} + statement := sqlQuery.Find(&results).Statement + sqlString := statement.SQL.String() + sqlParams := statement.Vars + + //assert + require.NoError(suite.T(), err) + require.Equal(suite.T(), + strings.Join([]string{ + "SELECT fhir.*", + "FROM fhir_observation as fhir, json_each(fhir.code) as codeJson, json_each(fhir.category) as categoryJson", + "WHERE ((codeJson.value ->> '$.code' = ?)) AND ((categoryJson.value ->> '$.code' = ?)) AND (user_id = ?)", + "GROUP BY `fhir`.`id`", + "ORDER BY fhir.sort_date DESC", + }, " "), + sqlString) + require.Equal(suite.T(), sqlParams, []interface{}{ + "test_code", "12345", "00000000-0000-0000-0000-000000000000", + }) +} + +func (suite *RepositorySqlTestSuite) TestQueryResources_SQL_WithPrimitiveOrderByAggregation() { + //setup + sqliteRepo := suite.TestRepository.(*GormRepository) + sqliteRepo.GormClient = sqliteRepo.GormClient.Session(&gorm.Session{DryRun: true}) + + //test + authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username") + + sqlQuery, err := sqliteRepo.sqlQueryResources(authContext, models.QueryResource{ + Select: []string{}, + Where: map[string]interface{}{ + "activityCode": "test_code", + }, + From: "CarePlan", + Aggregations: &models.QueryResourceAggregations{OrderBy: &models.QueryResourceAggregation{Field: "instantiatesUri"}}, + }) + require.NoError(suite.T(), err) + var results []map[string]interface{} + statement := sqlQuery.Find(&results).Statement + sqlString := statement.SQL.String() + sqlParams := statement.Vars + + //assert + require.NoError(suite.T(), err) + require.Equal(suite.T(), + strings.Join([]string{ + "SELECT fhir.*", + "FROM fhir_care_plan as fhir, json_each(fhir.activityCode) as activityCodeJson", + "WHERE ((activityCodeJson.value ->> '$.code' = ?)) AND (user_id = ?)", + "GROUP BY `fhir`.`id`", + "ORDER BY fhir.instantiatesUri ASC", + }, " "), sqlString) + require.Equal(suite.T(), sqlParams, []interface{}{ + "test_code", "00000000-0000-0000-0000-000000000000", + }) +} + +func (suite *RepositorySqlTestSuite) TestQueryResources_SQL_WithKeywordOrderByAggregation() { + //setup + sqliteRepo := suite.TestRepository.(*GormRepository) + sqliteRepo.GormClient = sqliteRepo.GormClient.Session(&gorm.Session{DryRun: true}) + + //test + authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username") + + sqlQuery, err := sqliteRepo.sqlQueryResources(authContext, models.QueryResource{ + Select: []string{}, + Where: map[string]interface{}{}, + From: "CarePlan", + Aggregations: &models.QueryResourceAggregations{OrderBy: &models.QueryResourceAggregation{Field: "id"}}, + }) + require.NoError(suite.T(), err) + var results []map[string]interface{} + statement := sqlQuery.Find(&results).Statement + sqlString := statement.SQL.String() + sqlParams := statement.Vars + + //assert + require.NoError(suite.T(), err) + require.Equal(suite.T(), + strings.Join([]string{ + "SELECT fhir.*", + "FROM fhir_care_plan as fhir", + "WHERE (user_id = ?)", + "GROUP BY `fhir`.`id`", + "ORDER BY fhir.id ASC", + }, " "), sqlString) + require.Equal(suite.T(), sqlParams, []interface{}{ + "00000000-0000-0000-0000-000000000000", + }) +} + +func (suite *RepositorySqlTestSuite) TestQueryResources_SQL_WithComplexOrderByAggregation() { + //setup + sqliteRepo := suite.TestRepository.(*GormRepository) + sqliteRepo.GormClient = sqliteRepo.GormClient.Session(&gorm.Session{DryRun: true}) + + //test + authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username") + + sqlQuery, err := sqliteRepo.sqlQueryResources(authContext, models.QueryResource{ + Select: []string{}, + Where: map[string]interface{}{ + "code": "test_code", + }, + From: "Observation", + Aggregations: &models.QueryResourceAggregations{OrderBy: &models.QueryResourceAggregation{Field: "valueString:value"}}, + }) + require.NoError(suite.T(), err) + var results []map[string]interface{} + statement := sqlQuery.Find(&results).Statement + sqlString := statement.SQL.String() + sqlParams := statement.Vars + + //assert + require.NoError(suite.T(), err) + require.Equal(suite.T(), + strings.Join([]string{ + "SELECT fhir.*", + "FROM fhir_observation as fhir, json_each(fhir.code) as codeJson, json_each(fhir.valueString) as valueStringJson", + "WHERE ((codeJson.value ->> '$.code' = ?)) AND (user_id = ?)", + "GROUP BY `fhir`.`id`", + "ORDER BY (valueStringJson.value ->> '$.value') ASC", + }, " "), sqlString) + require.Equal(suite.T(), sqlParams, []interface{}{ + "test_code", "00000000-0000-0000-0000-000000000000", + }) +} + +func (suite *RepositorySqlTestSuite) TestQueryResources_SQL_WithPrimitiveCountByAggregation() { + //setup + sqliteRepo := suite.TestRepository.(*GormRepository) + sqliteRepo.GormClient = sqliteRepo.GormClient.Session(&gorm.Session{DryRun: true}) + + //test + authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username") + + sqlQuery, err := sqliteRepo.sqlQueryResources(authContext, models.QueryResource{ + Select: []string{}, + Where: map[string]interface{}{ + "activityCode": "test_code", + }, + From: "CarePlan", + Aggregations: &models.QueryResourceAggregations{CountBy: &models.QueryResourceAggregation{Field: "instantiatesUri"}}, + }) + require.NoError(suite.T(), err) + var results []map[string]interface{} + statement := sqlQuery.Find(&results).Statement + sqlString := statement.SQL.String() + sqlParams := statement.Vars + + //assert + require.NoError(suite.T(), err) + require.Equal(suite.T(), + strings.Join([]string{ + "SELECT fhir.instantiatesUri as label, count(*) as value", + "FROM fhir_care_plan as fhir, json_each(fhir.activityCode) as activityCodeJson", + "WHERE ((activityCodeJson.value ->> '$.code' = ?)) AND (user_id = ?)", + "GROUP BY `fhir`.`instantiatesUri`", + "ORDER BY count(*) DESC", + }, " "), sqlString) + require.Equal(suite.T(), sqlParams, []interface{}{ + "test_code", "00000000-0000-0000-0000-000000000000", + }) +} + +func (suite *RepositorySqlTestSuite) TestQueryResources_SQL_WithKeywordCountByAggregation() { + //setup + sqliteRepo := suite.TestRepository.(*GormRepository) + sqliteRepo.GormClient = sqliteRepo.GormClient.Session(&gorm.Session{DryRun: true}) + + //test + authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username") + + sqlQuery, err := sqliteRepo.sqlQueryResources(authContext, models.QueryResource{ + Select: []string{}, + Where: map[string]interface{}{ + "activityCode": "test_code", + }, + From: "CarePlan", + Aggregations: &models.QueryResourceAggregations{CountBy: &models.QueryResourceAggregation{Field: "source_resource_type"}}, + }) + require.NoError(suite.T(), err) + var results []map[string]interface{} + statement := sqlQuery.Find(&results).Statement + sqlString := statement.SQL.String() + sqlParams := statement.Vars + + //assert + require.NoError(suite.T(), err) + require.Equal(suite.T(), + strings.Join([]string{ + "SELECT fhir.source_resource_type as label, count(*) as value", + "FROM fhir_care_plan as fhir, json_each(fhir.activityCode) as activityCodeJson", + "WHERE ((activityCodeJson.value ->> '$.code' = ?)) AND (user_id = ?)", + "GROUP BY `fhir`.`source_resource_type`", + "ORDER BY count(*) DESC", + }, " "), sqlString) + require.Equal(suite.T(), sqlParams, []interface{}{ + "test_code", "00000000-0000-0000-0000-000000000000", + }) +} + +func (suite *RepositorySqlTestSuite) TestQueryResources_SQL_WithWildcardCountByAggregation() { + //setup + sqliteRepo := suite.TestRepository.(*GormRepository) + sqliteRepo.GormClient = sqliteRepo.GormClient.Session(&gorm.Session{DryRun: true}) + + //test + authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username") + + sqlQuery, err := sqliteRepo.sqlQueryResources(authContext, models.QueryResource{ + Select: []string{}, + Where: map[string]interface{}{}, + From: "CarePlan", + Aggregations: &models.QueryResourceAggregations{CountBy: &models.QueryResourceAggregation{Field: "*"}}, + }) + require.NoError(suite.T(), err) + var results []map[string]interface{} + statement := sqlQuery.Find(&results).Statement + sqlString := statement.SQL.String() + sqlParams := statement.Vars + + //assert + require.NoError(suite.T(), err) + require.Equal(suite.T(), + strings.Join([]string{ + "SELECT fhir.source_resource_type as label, count(*) as value", + "FROM fhir_care_plan as fhir", + "WHERE (user_id = ?)", + "GROUP BY `fhir`.`source_resource_type`", + "ORDER BY count(*) DESC", + }, " "), sqlString) + require.Equal(suite.T(), sqlParams, []interface{}{ + "00000000-0000-0000-0000-000000000000", + }) +} + +func (suite *RepositorySqlTestSuite) TestQueryResources_SQL_WithComplexCountByAggregation() { + //setup + sqliteRepo := suite.TestRepository.(*GormRepository) + sqliteRepo.GormClient = sqliteRepo.GormClient.Session(&gorm.Session{DryRun: true}) + + //test + authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username") + + sqlQuery, err := sqliteRepo.sqlQueryResources(authContext, models.QueryResource{ + Select: []string{}, + Where: map[string]interface{}{ + "code": "test_code", + }, + From: "Observation", + Aggregations: &models.QueryResourceAggregations{CountBy: &models.QueryResourceAggregation{Field: "code:code"}}, + }) + require.NoError(suite.T(), err) + var results []map[string]interface{} + statement := sqlQuery.Find(&results).Statement + sqlString := statement.SQL.String() + sqlParams := statement.Vars + + //assert + require.NoError(suite.T(), err) + require.Equal(suite.T(), + strings.Join([]string{ + "SELECT (codeJson.value ->> '$.code') as label, count(*) as value", + "FROM fhir_observation as fhir, json_each(fhir.code) as codeJson", + "WHERE ((codeJson.value ->> '$.code' = ?)) AND (user_id = ?)", + "GROUP BY (codeJson.value ->> '$.code')", + "ORDER BY count(*) DESC", + }, " "), sqlString) + require.Equal(suite.T(), sqlParams, []interface{}{ + "test_code", "00000000-0000-0000-0000-000000000000", + }) +} + +func (suite *RepositorySqlTestSuite) TestQueryResources_SQL_WithComplexGroupByWithOrderByMaxFnAggregation() { + //setup + sqliteRepo := suite.TestRepository.(*GormRepository) + sqliteRepo.GormClient = sqliteRepo.GormClient.Session(&gorm.Session{DryRun: true}) + + //test + authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username") + + sqlQuery, err := sqliteRepo.sqlQueryResources(authContext, models.QueryResource{ + Select: []string{}, + Where: map[string]interface{}{ + "code": "test_code", + }, + From: "Observation", + Aggregations: &models.QueryResourceAggregations{ + GroupBy: &models.QueryResourceAggregation{Field: "code:code"}, + OrderBy: &models.QueryResourceAggregation{Field: "sort_date", Function: "max"}, + }, + }) + require.NoError(suite.T(), err) + var results []map[string]interface{} + statement := sqlQuery.Find(&results).Statement + sqlString := statement.SQL.String() + sqlParams := statement.Vars + + //assert + require.NoError(suite.T(), err) + require.Equal(suite.T(), + strings.Join([]string{ + "SELECT (codeJson.value ->> '$.code') as label, max(fhir.sort_date) as value", + "FROM fhir_observation as fhir, json_each(fhir.code) as codeJson", + "WHERE ((codeJson.value ->> '$.code' = ?)) AND (user_id = ?)", + "GROUP BY (codeJson.value ->> '$.code')", + "ORDER BY max(fhir.sort_date) DESC", + }, " "), sqlString) + require.Equal(suite.T(), sqlParams, []interface{}{ + "test_code", "00000000-0000-0000-0000-000000000000", + }) +} + +func (suite *RepositorySqlTestSuite) TestQueryResources_SQL_WithTokenGroupByNoModifier() { + //setup + sqliteRepo := suite.TestRepository.(*GormRepository) + sqliteRepo.GormClient = sqliteRepo.GormClient.Session(&gorm.Session{DryRun: true}) + + //test + authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username") + + sqlQuery, err := sqliteRepo.sqlQueryResources(authContext, models.QueryResource{ + Select: []string{}, + Where: map[string]interface{}{}, + From: "Observation", + Aggregations: &models.QueryResourceAggregations{ + GroupBy: &models.QueryResourceAggregation{Field: "code"}, + }, + }) + require.NoError(suite.T(), err) + var results []map[string]interface{} + statement := sqlQuery.Find(&results).Statement + sqlString := statement.SQL.String() + sqlParams := statement.Vars + + //assert + require.NoError(suite.T(), err) + require.Equal(suite.T(), + strings.Join([]string{ + "SELECT ((codeJson.value ->> '$.system') || '|' || (codeJson.value ->> '$.code')) as label, count(*) as value", + "FROM fhir_observation as fhir, json_each(fhir.code) as codeJson", + "WHERE (user_id = ?)", + "GROUP BY ((codeJson.value ->> '$.system') || '|' || (codeJson.value ->> '$.code'))", + "ORDER BY count(*) DESC", + }, " "), sqlString) + require.Equal(suite.T(), sqlParams, []interface{}{ + "00000000-0000-0000-0000-000000000000", + }) +} + +func (suite *RepositorySqlTestSuite) TestQueryResources_SQL_WithTokenGroupByNoModifierWithLimit() { + //setup + sqliteRepo := suite.TestRepository.(*GormRepository) + sqliteRepo.GormClient = sqliteRepo.GormClient.Session(&gorm.Session{DryRun: true}) + + //test + authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username") + + limit := 10 + sqlQuery, err := sqliteRepo.sqlQueryResources(authContext, models.QueryResource{ + Select: []string{}, + Where: map[string]interface{}{}, + From: "Observation", + Limit: &limit, + Aggregations: &models.QueryResourceAggregations{ + GroupBy: &models.QueryResourceAggregation{Field: "code"}, + }, + }) + require.NoError(suite.T(), err) + var results []map[string]interface{} + statement := sqlQuery.Find(&results).Statement + sqlString := statement.SQL.String() + sqlParams := statement.Vars + + //assert + require.NoError(suite.T(), err) + require.Equal(suite.T(), + strings.Join([]string{ + "SELECT ((codeJson.value ->> '$.system') || '|' || (codeJson.value ->> '$.code')) as label, count(*) as value", + "FROM fhir_observation as fhir, json_each(fhir.code) as codeJson", + "WHERE (user_id = ?)", + "GROUP BY ((codeJson.value ->> '$.system') || '|' || (codeJson.value ->> '$.code'))", + "ORDER BY count(*) DESC", + "LIMIT 10", + }, " "), sqlString) + require.Equal(suite.T(), sqlParams, []interface{}{ + "00000000-0000-0000-0000-000000000000", + }) +} diff --git a/backend/pkg/database/gorm_repository_query_test.go b/backend/pkg/database/gorm_repository_query_test.go new file mode 100644 index 00000000..1495606b --- /dev/null +++ b/backend/pkg/database/gorm_repository_query_test.go @@ -0,0 +1,305 @@ +package database + +import ( + "context" + "strings" + "testing" + "time" + + "github.com/fastenhealth/fasten-onprem/backend/pkg" + mock_config "github.com/fastenhealth/fasten-onprem/backend/pkg/config/mock" + "github.com/fastenhealth/fasten-onprem/backend/pkg/event_bus" + "github.com/fastenhealth/fasten-onprem/backend/pkg/models" + "github.com/sirupsen/logrus" + "github.com/stretchr/testify/require" + "gorm.io/gorm" +) + +// mimic tests from https://hl7.org/fhir/r4/search.html#token +func TestProcessSearchParameter(t *testing.T) { + //setup + t.Parallel() + var processSearchParameterTests = []struct { + searchParameterWithModifier string // input + searchParameterLookup map[string]string // input (allowed search parameters) + expected SearchParameter + expectedError bool // expected result + }{ + {"test", map[string]string{"test": "string"}, SearchParameter{Type: "string", Name: "test", Modifier: ""}, false}, + {"test:begin", map[string]string{"test": "string"}, SearchParameter{Type: "string", Name: "test", Modifier: "begin"}, false}, + {"unknown:doesntmatter", map[string]string{"test": "string"}, SearchParameter{}, true}, //unknown search parameter shoudl throw error + {"unknown", map[string]string{"test": "string"}, SearchParameter{}, true}, //unknown search parameter shoudl throw error + {"test", map[string]string{"test": "faketype"}, SearchParameter{Type: "faketype", Name: "test", Modifier: ""}, false}, + {"id", map[string]string{"id": "keyword"}, SearchParameter{Type: "keyword", Name: "id", Modifier: ""}, false}, + + {"given", map[string]string{"given": "string"}, SearchParameter{Type: "string", Name: "given", Modifier: ""}, false}, + {"given:contains", map[string]string{"given": "string"}, SearchParameter{Type: "string", Name: "given", Modifier: "contains"}, false}, + {"given:exact", map[string]string{"given": "string"}, SearchParameter{Type: "string", Name: "given", Modifier: "exact"}, false}, + {"url:below", map[string]string{"url": "string"}, SearchParameter{Type: "string", Name: "url", Modifier: "below"}, false}, + {"url:above", map[string]string{"url": "string"}, SearchParameter{Type: "string", Name: "url", Modifier: "above"}, false}, + + {"display:text", map[string]string{"display": "token"}, SearchParameter{}, true}, + } + + //test && assert + for ndx, tt := range processSearchParameterTests { + actual, actualErr := ProcessSearchParameter(tt.searchParameterWithModifier, tt.searchParameterLookup) + if tt.expectedError { + require.Error(t, actualErr, "Expected error but got none for processSearchParameterTests[%d] %s", ndx, tt.searchParameterWithModifier) + } else { + require.NoError(t, actualErr, "Expected no error but got one for processSearchParameterTests[%d] %s", ndx, tt.searchParameterWithModifier) + require.Equal(t, tt.expected, actual) + } + } +} + +// mimic tests from https://hl7.org/fhir/r4/search.html#token +func TestProcessSearchParameterValue(t *testing.T) { + //setup + t.Parallel() + var processSearchParameterValueTests = []struct { + searchParameter SearchParameter // input + searchValueWithPrefix string // input (search value) + expected SearchParameterValue + expectedError bool // expected result + }{ + {SearchParameter{Type: "number", Name: "probability", Modifier: ""}, "gt0.8", SearchParameterValue{Value: 0.8, Prefix: "gt", SecondaryValues: map[string]interface{}{}}, false}, + {SearchParameter{Type: "number", Name: "probability", Modifier: ""}, "100", SearchParameterValue{Value: float64(100), Prefix: "", SecondaryValues: map[string]interface{}{}}, false}, + {SearchParameter{Type: "number", Name: "probability", Modifier: ""}, "100.00", SearchParameterValue{Value: float64(100), Prefix: "", SecondaryValues: map[string]interface{}{}}, false}, + {SearchParameter{Type: "number", Name: "probability", Modifier: ""}, "1e2", SearchParameterValue{Value: float64(100), Prefix: "", SecondaryValues: map[string]interface{}{}}, false}, + {SearchParameter{Type: "number", Name: "probability", Modifier: ""}, "lt100", SearchParameterValue{Value: float64(100), Prefix: "lt", SecondaryValues: map[string]interface{}{}}, false}, + {SearchParameter{Type: "number", Name: "probability", Modifier: ""}, "le100", SearchParameterValue{Value: float64(100), Prefix: "le", SecondaryValues: map[string]interface{}{}}, false}, + {SearchParameter{Type: "number", Name: "probability", Modifier: ""}, "gt100", SearchParameterValue{Value: float64(100), Prefix: "gt", SecondaryValues: map[string]interface{}{}}, false}, + {SearchParameter{Type: "number", Name: "probability", Modifier: ""}, "ge100", SearchParameterValue{Value: float64(100), Prefix: "ge", SecondaryValues: map[string]interface{}{}}, false}, + {SearchParameter{Type: "number", Name: "probability", Modifier: ""}, "ne100", SearchParameterValue{Value: float64(100), Prefix: "ne", SecondaryValues: map[string]interface{}{}}, false}, + {SearchParameter{Type: "number", Name: "probability", Modifier: ""}, "unknown100", SearchParameterValue{}, true}, //unknown prefix, invalid number error + {SearchParameter{Type: "number", Name: "probability", Modifier: ""}, "", SearchParameterValue{}, true}, //empty string, invalid number error + + {SearchParameter{Type: "date", Name: "issueDate", Modifier: ""}, "eq2013-01-14", SearchParameterValue{Value: time.Date(2013, time.January, 14, 0, 0, 0, 0, time.UTC), Prefix: "eq", SecondaryValues: map[string]interface{}{}}, false}, + {SearchParameter{Type: "date", Name: "issueDate", Modifier: ""}, "ne2013-01-14", SearchParameterValue{Value: time.Date(2013, time.January, 14, 0, 0, 0, 0, time.UTC), Prefix: "ne", SecondaryValues: map[string]interface{}{}}, false}, + {SearchParameter{Type: "date", Name: "issueDate", Modifier: ""}, "lt2013-01-14T10:00:00Z", SearchParameterValue{Value: time.Date(2013, time.January, 14, 10, 0, 0, 0, time.UTC), Prefix: "lt", SecondaryValues: map[string]interface{}{}}, false}, + {SearchParameter{Type: "date", Name: "issueDate", Modifier: ""}, "lt2013-01-14T10:00", SearchParameterValue{}, true}, //missing seconds + {SearchParameter{Type: "date", Name: "issueDate", Modifier: ""}, "lt2013-01-14T10:00Z", SearchParameterValue{}, true}, //missing timezone + {SearchParameter{Type: "date", Name: "issueDate", Modifier: ""}, "unknown2013-01-14T10:00:00Z", SearchParameterValue{}, true}, //unkown prefix, causes invalid date error + {SearchParameter{Type: "date", Name: "issueDate", Modifier: ""}, "", SearchParameterValue{}, true}, //empty date, invalid date error + + {SearchParameter{Type: "string", Name: "given", Modifier: ""}, "eve", SearchParameterValue{Value: "eve", Prefix: "", SecondaryValues: map[string]interface{}{}}, false}, + {SearchParameter{Type: "string", Name: "given", Modifier: "contains"}, "eve", SearchParameterValue{Value: "eve", Prefix: "", SecondaryValues: map[string]interface{}{}}, false}, + {SearchParameter{Type: "string", Name: "given", Modifier: "exact"}, "Eve", SearchParameterValue{Value: "Eve", Prefix: "", SecondaryValues: map[string]interface{}{}}, false}, + {SearchParameter{Type: "string", Name: "given", Modifier: ""}, "", SearchParameterValue{}, true}, //empty string, invalid string error + + {SearchParameter{Type: "uri", Name: "url", Modifier: ""}, "http://acme.org/fhir/ValueSet/123", SearchParameterValue{Value: "http://acme.org/fhir/ValueSet/123", Prefix: "", SecondaryValues: map[string]interface{}{}}, false}, + {SearchParameter{Type: "uri", Name: "url", Modifier: "below"}, "http://acme.org/fhir/", SearchParameterValue{Value: "http://acme.org/fhir/", Prefix: "", SecondaryValues: map[string]interface{}{}}, false}, + {SearchParameter{Type: "uri", Name: "url", Modifier: "above"}, "http://acme.org/fhir/", SearchParameterValue{Value: "http://acme.org/fhir/", Prefix: "", SecondaryValues: map[string]interface{}{}}, false}, + {SearchParameter{Type: "uri", Name: "url", Modifier: ""}, "urn:oid:1.2.3.4.5", SearchParameterValue{Value: "urn:oid:1.2.3.4.5", Prefix: "", SecondaryValues: map[string]interface{}{}}, false}, + {SearchParameter{Type: "uri", Name: "url", Modifier: ""}, "", SearchParameterValue{}, true}, //emtpy uri, invalid uri error + + {SearchParameter{Type: "token", Name: "identifier", Modifier: ""}, "http://acme.org/patient|2345", SearchParameterValue{Value: "2345", Prefix: "", SecondaryValues: map[string]interface{}{"identifierSystem": "http://acme.org/patient"}}, false}, + {SearchParameter{Type: "token", Name: "gender", Modifier: ""}, "male", SearchParameterValue{Value: "male", Prefix: "", SecondaryValues: map[string]interface{}{}}, false}, + {SearchParameter{Type: "token", Name: "gender", Modifier: "not"}, "male", SearchParameterValue{Value: "male", Prefix: "", SecondaryValues: map[string]interface{}{}}, false}, + {SearchParameter{Type: "token", Name: "section", Modifier: "not"}, "48765-2", SearchParameterValue{Value: "48765-2", Prefix: "", SecondaryValues: map[string]interface{}{}}, false}, + {SearchParameter{Type: "token", Name: "active", Modifier: ""}, "true", SearchParameterValue{Value: "true", Prefix: "", SecondaryValues: map[string]interface{}{}}, false}, + {SearchParameter{Type: "token", Name: "code", Modifier: ""}, "http://acme.org/conditions/codes|ha125", SearchParameterValue{Value: "ha125", Prefix: "", SecondaryValues: map[string]interface{}{"codeSystem": "http://acme.org/conditions/codes"}}, false}, + {SearchParameter{Type: "token", Name: "code", Modifier: ""}, "ha125", SearchParameterValue{Value: "ha125", Prefix: "", SecondaryValues: map[string]interface{}{}}, false}, + {SearchParameter{Type: "token", Name: "identifier", Modifier: "otype"}, "http://terminology.hl7.org/CodeSystem/v2-0203|MR|446053", SearchParameterValue{Value: "MR|446053", Prefix: "", SecondaryValues: map[string]interface{}{"identifierSystem": "http://terminology.hl7.org/CodeSystem/v2-0203"}}, false}, + {SearchParameter{Type: "token", Name: "code", Modifier: ""}, "|", SearchParameterValue{}, true}, //empty value should throw an error + {SearchParameter{Type: "token", Name: "code", Modifier: ""}, "", SearchParameterValue{}, true}, //empty value should throw an error + {SearchParameter{Type: "token", Name: "code", Modifier: ""}, "http://acme.org/conditions/codes|", SearchParameterValue{Value: "", Prefix: "", SecondaryValues: map[string]interface{}{"codeSystem": "http://acme.org/conditions/codes"}}, false}, + {SearchParameter{Type: "token", Name: "code", Modifier: ""}, "|807-1", SearchParameterValue{Value: "807-1", Prefix: "", SecondaryValues: map[string]interface{}{"codeSystem": ""}}, false}, + + {SearchParameter{Type: "quantity", Name: "valueQuantity", Modifier: ""}, "5.4|http://unitsofmeasure.org|mg", SearchParameterValue{Value: float64(5.4), Prefix: "", SecondaryValues: map[string]interface{}{"valueQuantitySystem": "http://unitsofmeasure.org", "valueQuantityCode": "mg"}}, false}, + {SearchParameter{Type: "quantity", Name: "valueQuantity", Modifier: ""}, "5.40e-3|http://unitsofmeasure.org|g", SearchParameterValue{Value: float64(0.0054), Prefix: "", SecondaryValues: map[string]interface{}{"valueQuantitySystem": "http://unitsofmeasure.org", "valueQuantityCode": "g"}}, false}, + {SearchParameter{Type: "quantity", Name: "valueQuantity", Modifier: ""}, "5.4||mg", SearchParameterValue{Value: float64(5.4), Prefix: "", SecondaryValues: map[string]interface{}{"valueQuantityCode": "mg"}}, false}, + {SearchParameter{Type: "quantity", Name: "valueQuantity", Modifier: ""}, "5.4", SearchParameterValue{Value: float64(5.4), Prefix: "", SecondaryValues: map[string]interface{}{}}, false}, + {SearchParameter{Type: "quantity", Name: "valueQuantity", Modifier: ""}, "le5.4|http://unitsofmeasure.org|mg", SearchParameterValue{Value: float64(5.4), Prefix: "le", SecondaryValues: map[string]interface{}{"valueQuantitySystem": "http://unitsofmeasure.org", "valueQuantityCode": "mg"}}, false}, + {SearchParameter{Type: "quantity", Name: "valueQuantity", Modifier: ""}, "ap5.4|http://unitsofmeasure.org|mg", SearchParameterValue{Value: float64(5.4), Prefix: "ap", SecondaryValues: map[string]interface{}{"valueQuantitySystem": "http://unitsofmeasure.org", "valueQuantityCode": "mg"}}, false}, + {SearchParameter{Type: "quantity", Name: "valueQuantity", Modifier: ""}, "unknown5.4", SearchParameterValue{}, true}, //unknown prefix, causes invalid number + {SearchParameter{Type: "quantity", Name: "valueQuantity", Modifier: ""}, "ap5.4|http://unitsofmeasure.org|mg|additional", SearchParameterValue{Value: float64(5.4), Prefix: "ap", SecondaryValues: map[string]interface{}{"valueQuantitySystem": "http://unitsofmeasure.org", "valueQuantityCode": "mg|additional"}}, false}, + {SearchParameter{Type: "quantity", Name: "valueQuantity", Modifier: ""}, "5.4||", SearchParameterValue{Value: float64(5.4), Prefix: "", SecondaryValues: map[string]interface{}{}}, false}, + {SearchParameter{Type: "quantity", Name: "valueQuantity", Modifier: ""}, "", SearchParameterValue{}, true}, + + {SearchParameter{Type: "keyword", Name: "id", Modifier: ""}, "1234", SearchParameterValue{Value: "1234", SecondaryValues: map[string]interface{}{}}, false}, + } + + //test && assert + for ndx, tt := range processSearchParameterValueTests { + actual, actualErr := ProcessSearchParameterValue(tt.searchParameter, tt.searchValueWithPrefix) + if tt.expectedError { + require.Error(t, actualErr, "Expected error but got none for processSearchParameterValueTests[%d] %s=%s", ndx, tt.searchParameter.Name, tt.searchValueWithPrefix) + } else { + require.NoError(t, actualErr, "Expected no error but got one for processSearchParameterValueTests[%d] %s", ndx, tt.searchParameter.Name, tt.searchValueWithPrefix) + require.Equal(t, tt.expected, actual) + } + } +} + +func TestSearchCodeToWhereClause(t *testing.T) { + //setup + var searchCodeToWhereClauseTests = []struct { + searchParameter SearchParameter + searchValue SearchParameterValue + searchLevelSuffix string + expectedClause string + expectedNamedParams map[string]interface{} + expectedError bool + }{ + {SearchParameter{Type: "number", Name: "probability", Modifier: ""}, SearchParameterValue{Value: float64(100), Prefix: "gt", SecondaryValues: map[string]interface{}{}}, "0_0", "(probability > @probability_0_0)", map[string]interface{}{"probability_0_0": float64(100)}, false}, + {SearchParameter{Type: "date", Name: "issueDate", Modifier: ""}, SearchParameterValue{Value: time.Date(2013, time.January, 14, 10, 0, 0, 0, time.UTC), Prefix: "lt", SecondaryValues: map[string]interface{}{}}, "1_1", "(issueDate < @issueDate_1_1)", map[string]interface{}{"issueDate_1_1": time.Date(2013, time.January, 14, 10, 0, 0, 0, time.UTC)}, false}, + + {SearchParameter{Type: "string", Name: "given", Modifier: ""}, SearchParameterValue{Value: "eve", Prefix: "", SecondaryValues: map[string]interface{}{}}, "0_0", "(givenJson.value LIKE @given_0_0)", map[string]interface{}{"given_0_0": "eve%"}, false}, + {SearchParameter{Type: "string", Name: "given", Modifier: "contains"}, SearchParameterValue{Value: "eve", Prefix: "", SecondaryValues: map[string]interface{}{}}, "0_0", "(givenJson.value LIKE @given_0_0)", map[string]interface{}{"given_0_0": "%eve%"}, false}, + {SearchParameter{Type: "string", Name: "given", Modifier: "exact"}, SearchParameterValue{Value: "eve", Prefix: "", SecondaryValues: map[string]interface{}{}}, "0_0", "(givenJson.value = @given_0_0)", map[string]interface{}{"given_0_0": "eve"}, false}, + + {SearchParameter{Type: "uri", Name: "url", Modifier: "below"}, SearchParameterValue{Value: "http://acme.org/fhir/", Prefix: "", SecondaryValues: map[string]interface{}{}}, "0_0", "(url LIKE @url_0_0)", map[string]interface{}{"url_0_0": "http://acme.org/fhir/%"}, false}, + {SearchParameter{Type: "uri", Name: "url", Modifier: "above"}, SearchParameterValue{Value: "http://acme.org/fhir/", Prefix: "", SecondaryValues: map[string]interface{}{}}, "0_0", "", map[string]interface{}{}, true}, //above modifier not supported + + {SearchParameter{Type: "quantity", Name: "valueQuantity", Modifier: ""}, SearchParameterValue{Value: float64(5.4), Prefix: "", SecondaryValues: map[string]interface{}{"valueQuantityCode": "mg"}}, "0_0", "(valueQuantityJson.value ->> '$.value' = @valueQuantity_0_0 AND valueQuantityJson.value ->> '$.code' = @valueQuantityCode_0_0)", map[string]interface{}{"valueQuantity_0_0": float64(5.4), "valueQuantityCode_0_0": "mg"}, false}, + {SearchParameter{Type: "quantity", Name: "valueQuantity", Modifier: ""}, SearchParameterValue{Value: float64(5.4), Prefix: "", SecondaryValues: map[string]interface{}{}}, "0_0", "(valueQuantityJson.value ->> '$.value' = @valueQuantity_0_0)", map[string]interface{}{"valueQuantity_0_0": float64(5.4)}, false}, + {SearchParameter{Type: "quantity", Name: "valueQuantity", Modifier: ""}, SearchParameterValue{Value: float64(5.4), Prefix: "le", SecondaryValues: map[string]interface{}{"valueQuantitySystem": "http://unitsofmeasure.org", "valueQuantityCode": "mg"}}, "0_0", "(valueQuantityJson.value ->> '$.value' <= @valueQuantity_0_0 AND valueQuantityJson.value ->> '$.code' = @valueQuantityCode_0_0 AND valueQuantityJson.value ->> '$.system' = @valueQuantitySystem_0_0)", map[string]interface{}{"valueQuantity_0_0": float64(5.4), "valueQuantitySystem_0_0": "http://unitsofmeasure.org", "valueQuantityCode_0_0": "mg"}, false}, + {SearchParameter{Type: "quantity", Name: "valueQuantity", Modifier: ""}, SearchParameterValue{Value: float64(5.4), Prefix: "ap", SecondaryValues: map[string]interface{}{"valueQuantitySystem": "http://unitsofmeasure.org", "valueQuantityCode": "mg"}}, "0_0", "", map[string]interface{}{}, true}, //ap modifier not supported + {SearchParameter{Type: "quantity", Name: "valueQuantity", Modifier: ""}, SearchParameterValue{Value: float64(5.4), Prefix: "ne", SecondaryValues: map[string]interface{}{"valueQuantitySystem": "http://unitsofmeasure.org", "valueQuantityCode": "mg"}}, "0_0", "(valueQuantityJson.value ->> '$.value' <> @valueQuantity_0_0 AND valueQuantityJson.value ->> '$.code' = @valueQuantityCode_0_0 AND valueQuantityJson.value ->> '$.system' = @valueQuantitySystem_0_0)", map[string]interface{}{"valueQuantity_0_0": float64(5.4), "valueQuantitySystem_0_0": "http://unitsofmeasure.org", "valueQuantityCode_0_0": "mg"}, false}, + + {SearchParameter{Type: "token", Name: "code", Modifier: ""}, SearchParameterValue{Value: "ha125", Prefix: "", SecondaryValues: map[string]interface{}{"codeSystem": "http://acme.org/conditions/codes"}}, "0_0", "(codeJson.value ->> '$.code' = @code_0_0 AND codeJson.value ->> '$.system' = @codeSystem_0_0)", map[string]interface{}{"code_0_0": "ha125", "codeSystem_0_0": "http://acme.org/conditions/codes"}, false}, + {SearchParameter{Type: "token", Name: "code", Modifier: ""}, SearchParameterValue{Value: "ha125", Prefix: "", SecondaryValues: map[string]interface{}{}}, "0_0", "(codeJson.value ->> '$.code' = @code_0_0)", map[string]interface{}{"code_0_0": "ha125"}, false}, + {SearchParameter{Type: "token", Name: "identifier", Modifier: "otype"}, SearchParameterValue{Value: "MR|446053", Prefix: "", SecondaryValues: map[string]interface{}{"identifierSystem": "http://terminology.hl7.org/CodeSystem/v2-0203"}}, "0_0", "(identifierJson.value ->> '$.code' = @identifier_0_0 AND identifierJson.value ->> '$.system' = @identifierSystem_0_0)", map[string]interface{}{"identifier_0_0": "MR|446053", "identifierSystem_0_0": "http://terminology.hl7.org/CodeSystem/v2-0203"}, false}, + + {SearchParameter{Type: "keyword", Name: "id", Modifier: ""}, SearchParameterValue{Value: "1234", Prefix: "", SecondaryValues: map[string]interface{}{}}, "0_0", "(id = @id_0_0)", map[string]interface{}{"id_0_0": "1234"}, false}, + } + + //test && assert + for ndx, tt := range searchCodeToWhereClauseTests { + actualClause, actualNamedParams, actualErr := SearchCodeToWhereClause(tt.searchParameter, tt.searchValue, tt.searchLevelSuffix) + if tt.expectedError { + require.Error(t, actualErr, "Expected error but got none for searchCodeToWhereClauseTests[%d] %s=%s", ndx, tt.searchParameter.Name, tt.searchValue.Value) + } else { + require.NoError(t, actualErr, "Expected no error but got one for searchCodeToWhereClauseTests[%d] %s=%s", ndx, tt.searchParameter.Name, tt.searchValue.Value) + require.Equal(t, tt.expectedClause, actualClause) + require.Equal(t, tt.expectedNamedParams, actualNamedParams) + } + } + +} + +// TODO +func TestSearchCodeToFromClause(t *testing.T) { + //setup + var searchCodeToFromClauseTests = []struct { + searchParameter SearchParameter + expectedClause string + expectedError bool + }{ + {SearchParameter{Type: "number", Name: "probability", Modifier: ""}, "", false}, + {SearchParameter{Type: "date", Name: "issueDate", Modifier: ""}, "", false}, + {SearchParameter{Type: "keyword", Name: "id", Modifier: ""}, "", false}, + {SearchParameter{Type: "token", Name: "hello", Modifier: ""}, "json_each(fhir.hello) as helloJson", false}, + } + + //test && assert + for ndx, tt := range searchCodeToFromClauseTests { + actualClause, actualErr := SearchCodeToFromClause(tt.searchParameter) + if tt.expectedError { + require.Error(t, actualErr, "Expected error but got none for searchCodeToFromClauseTests[%d] %s", ndx, tt.searchParameter.Name) + } else { + require.NoError(t, actualErr, "Expected no error but got one for searchCodeToFromClauseTests[%d] %s", ndx, tt.searchParameter.Name) + require.Equal(t, tt.expectedClause, actualClause) + } + } + +} + +//Aggregation tests + +// mimic tests from https://hl7.org/fhir/r4/search.html#token +func TestProcessAggregationParameter(t *testing.T) { + //setup + t.Parallel() + var processSearchParameterTests = []struct { + aggregationFieldWithFn models.QueryResourceAggregation // input + searchParameterLookup map[string]string // input (allowed search parameters) + expected AggregationParameter + expectedError bool // expected result + }{ + //primitive types + {models.QueryResourceAggregation{Field: "test"}, map[string]string{"test": "keyword"}, AggregationParameter{SearchParameter: SearchParameter{Type: "keyword", Name: "test", Modifier: ""}}, false}, + {models.QueryResourceAggregation{Field: "test"}, map[string]string{"test": "number"}, AggregationParameter{SearchParameter: SearchParameter{Type: "number", Name: "test", Modifier: ""}}, false}, + {models.QueryResourceAggregation{Field: "test"}, map[string]string{"test": "uri"}, AggregationParameter{SearchParameter: SearchParameter{Type: "uri", Name: "test", Modifier: ""}}, false}, + {models.QueryResourceAggregation{Field: "test"}, map[string]string{"test": "date"}, AggregationParameter{SearchParameter: SearchParameter{Type: "date", Name: "test", Modifier: ""}}, false}, + + {models.QueryResourceAggregation{Field: "test:hello"}, map[string]string{"test": "keyword"}, AggregationParameter{SearchParameter: SearchParameter{Type: "date", Name: "test", Modifier: ""}}, true}, //cannot have a modifier + {models.QueryResourceAggregation{Field: "test:hello"}, map[string]string{"test": "number"}, AggregationParameter{SearchParameter: SearchParameter{Type: "date", Name: "test", Modifier: ""}}, true}, //cannot have a modifier + {models.QueryResourceAggregation{Field: "test:hello"}, map[string]string{"test": "uri"}, AggregationParameter{SearchParameter: SearchParameter{Type: "date", Name: "test", Modifier: ""}}, true}, //cannot have a modifier + {models.QueryResourceAggregation{Field: "test:hello"}, map[string]string{"test": "date"}, AggregationParameter{SearchParameter: SearchParameter{Type: "date", Name: "test", Modifier: ""}}, true}, //cannot have a modifier + + //complex types + {models.QueryResourceAggregation{Field: "test"}, map[string]string{"test": "reference"}, AggregationParameter{SearchParameter: SearchParameter{Type: "reference", Name: "test", Modifier: ""}}, true}, //complex types should throw an error when missing modifier + {models.QueryResourceAggregation{Field: "test"}, map[string]string{"test": "string"}, AggregationParameter{SearchParameter: SearchParameter{Type: "string", Name: "test", Modifier: ""}}, true}, //complex types should throw an error when missing modifier + {models.QueryResourceAggregation{Field: "test"}, map[string]string{"test": "quantity"}, AggregationParameter{SearchParameter: SearchParameter{Type: "quantity", Name: "test", Modifier: ""}}, true}, //complex types should throw an error when missing modifier + + {models.QueryResourceAggregation{Field: "test:hello"}, map[string]string{"test": "reference"}, AggregationParameter{SearchParameter: SearchParameter{Type: "reference", Name: "test", Modifier: "hello"}}, false}, + {models.QueryResourceAggregation{Field: "test:hello"}, map[string]string{"test": "string"}, AggregationParameter{SearchParameter: SearchParameter{Type: "string", Name: "test", Modifier: "hello"}}, false}, + {models.QueryResourceAggregation{Field: "test:hello"}, map[string]string{"test": "quantity"}, AggregationParameter{SearchParameter: SearchParameter{Type: "quantity", Name: "test", Modifier: "hello"}}, false}, + + //token type + {models.QueryResourceAggregation{Field: "code"}, map[string]string{"code": "token"}, AggregationParameter{SearchParameter: SearchParameter{Type: "token", Name: "code", Modifier: ""}}, false}, + {models.QueryResourceAggregation{Field: "code:code"}, map[string]string{"code": "token"}, AggregationParameter{SearchParameter: SearchParameter{Type: "token", Name: "code", Modifier: "code"}}, false}, + } + + //test && assert + for ndx, tt := range processSearchParameterTests { + actual, actualErr := ProcessAggregationParameter(tt.aggregationFieldWithFn, tt.searchParameterLookup) + if tt.expectedError { + require.Error(t, actualErr, "Expected error but got none for processAggregationParameterTests[%d] %s", ndx, tt.aggregationFieldWithFn) + } else { + require.NoError(t, actualErr, "Expected no error but got one for processAggregationParameterTests[%d] %s", ndx, tt.aggregationFieldWithFn) + require.Equal(t, tt.expected, actual) + } + } +} + +func (suite *RepositoryTestSuite) TestQueryResources_SQL() { + //setup + fakeConfig := mock_config.NewMockInterface(suite.MockCtrl) + fakeConfig.EXPECT().GetString("database.location").Return(suite.TestDatabase.Name()).AnyTimes() + fakeConfig.EXPECT().GetString("database.type").Return("sqlite").AnyTimes() + fakeConfig.EXPECT().GetString("log.level").Return("INFO").AnyTimes() + dbRepo, err := NewRepository(fakeConfig, logrus.WithField("test", suite.T().Name()), event_bus.NewNoopEventBusServer()) + require.NoError(suite.T(), err) + userModel := &models.User{ + Username: "test_username", + Password: "testpassword", + Email: "test@test.com", + } + err = dbRepo.CreateUser(context.Background(), userModel) + require.NoError(suite.T(), err) + + sqliteRepo := dbRepo.(*GormRepository) + sqliteRepo.GormClient = sqliteRepo.GormClient.Session(&gorm.Session{DryRun: true}) + + //test + authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username") + + sqlQuery, err := sqliteRepo.sqlQueryResources(authContext, models.QueryResource{ + Select: []string{}, + Where: map[string]interface{}{ + "code": "test_code", + }, + From: "Observation", + }) + require.NoError(suite.T(), err) + var results []map[string]interface{} + statement := sqlQuery.Find(&results).Statement + sqlString := statement.SQL.String() + sqlParams := statement.Vars + + //assert + require.NoError(suite.T(), err) + require.Equal(suite.T(), sqlString, + strings.Join([]string{ + "SELECT fhir.*", + "FROM fhir_observation as fhir, json_each(fhir.code) as codeJson", + "WHERE ((codeJson.value ->> '$.code' = ?)) AND (user_id = ?) GROUP BY `fhir`.`id`", + "ORDER BY fhir.sort_date DESC"}, " ")) + require.Equal(suite.T(), sqlParams, []interface{}{ + "test_code", "00000000-0000-0000-0000-000000000000", + }) +} diff --git a/backend/pkg/database/gorm_repository_settings.go b/backend/pkg/database/gorm_repository_settings.go new file mode 100644 index 00000000..e9f3eb8a --- /dev/null +++ b/backend/pkg/database/gorm_repository_settings.go @@ -0,0 +1,98 @@ +package database + +import ( + "context" + "fmt" + + "github.com/fastenhealth/fasten-onprem/backend/pkg/models" + "github.com/google/uuid" +) + +// LoadSettings will retrieve settings from the database, store them in the AppConfig object, and return a Settings struct +func (gr *GormRepository) LoadUserSettings(ctx context.Context) (*models.UserSettings, error) { + currentUser, currentUserErr := gr.GetCurrentUser(ctx) + if currentUserErr != nil { + return nil, currentUserErr + } + + settingsEntries := []models.UserSettingEntry{} + if err := gr.GormClient. + WithContext(ctx). + Where(models.UserSettingEntry{ + UserID: currentUser.ID, + }). + Find(&settingsEntries).Error; err != nil { + return nil, fmt.Errorf("Could not get settings from DB: %v", err) + } + + settings := models.UserSettings{} + for _, settingsEntry := range settingsEntries { + err := settings.FromUserSettingsEntry(&settingsEntry) + if err != nil { + return nil, fmt.Errorf("Could not get settings from DB: %v", err) + } + } + + return &settings, nil +} + +// testing +// curl -d '{"metrics": { "notify_level": 5, "status_filter_attributes": 5, "status_threshold": 5 }}' -H "Content-Type: application/json" -X POST http://localhost:9090/api/settings +// SaveSettings will update settings in AppConfig object, then save the settings to the database. +func (gr *GormRepository) SaveUserSettings(ctx context.Context, newSettings *models.UserSettings) error { + currentUser, currentUserErr := gr.GetCurrentUser(ctx) + if currentUserErr != nil { + return currentUserErr + } + + //retrieve current settings from the database + currentSettingsEntries := []models.UserSettingEntry{} + + if err := gr.GormClient. + WithContext(ctx). + Where(models.UserSettingEntry{ + UserID: currentUser.ID, + }). + Find(¤tSettingsEntries).Error; err != nil { + return fmt.Errorf("Could not get settings from DB: %v", err) + } + + //update settingsEntries + + newSettingsEntries, err := newSettings.ToUserSettingsEntry(currentSettingsEntries) + if err != nil { + return fmt.Errorf("merge new settings with DB: %v", err) + } + + for ndx, settingsEntry := range newSettingsEntries { + + // store in database. + //TODO: this should be `gr.gormClient.Updates(&settingsEntries).Error` + err := gr.GormClient. + WithContext(ctx). + Model(&models.UserSettingEntry{}). + Where([]uuid.UUID{settingsEntry.ID}). + Select("setting_value_numeric", "setting_value_string", "setting_value_bool", "setting_value_array"). + Updates(newSettingsEntries[ndx]).Error + if err != nil { + return err + } + } + return nil +} + +func (gr *GormRepository) PopulateDefaultUserSettings(ctx context.Context, userId uuid.UUID) error { + + //retrieve current settings from the database + settingsEntries := []models.UserSettingEntry{} + settingsEntries = append(settingsEntries, models.UserSettingEntry{ + UserID: userId, + SettingKeyName: "dashboard_locations", + SettingKeyDescription: "remote dashboard locations (github gists)", + SettingDataType: "array", + SettingValueArray: []string{}, + }) + + return gr.GormClient.WithContext(ctx).Create(settingsEntries).Error + +} diff --git a/backend/pkg/database/gorm_repository_settings_test.go b/backend/pkg/database/gorm_repository_settings_test.go new file mode 100644 index 00000000..54df1625 --- /dev/null +++ b/backend/pkg/database/gorm_repository_settings_test.go @@ -0,0 +1,104 @@ +package database + +import ( + "context" + "fmt" + "github.com/fastenhealth/fasten-onprem/backend/pkg" + "github.com/fastenhealth/fasten-onprem/backend/pkg/config" + "github.com/fastenhealth/fasten-onprem/backend/pkg/event_bus" + "github.com/fastenhealth/fasten-onprem/backend/pkg/models" + "github.com/sirupsen/logrus" + "github.com/stretchr/testify/require" + "github.com/stretchr/testify/suite" + "io/ioutil" + "log" + "os" + "testing" +) + +// Define the suite, and absorb the built-in basic suite +// functionality from testify - including a T() method which +// returns the current testing context +type RepositorySettingsTestSuite struct { + suite.Suite + TestDatabase *os.File + TestConfig config.Interface + + TestRepository DatabaseRepository + TestUser *models.User +} + +// BeforeTest has a function to be executed right before the test starts and receives the suite and test names as input +func (suite *RepositorySettingsTestSuite) BeforeTest(suiteName, testName string) { + + dbFile, err := ioutil.TempFile("", fmt.Sprintf("%s.*.db", testName)) + if err != nil { + log.Fatal(err) + } + suite.TestDatabase = dbFile + + testConfig, err := config.Create() + require.NoError(suite.T(), err) + testConfig.SetDefault("database.location", suite.TestDatabase.Name()) + testConfig.SetDefault("log.level", "INFO") + suite.TestConfig = testConfig + + dbRepo, err := NewRepository(testConfig, logrus.WithField("test", suite.T().Name()), event_bus.NewNoopEventBusServer()) + require.NoError(suite.T(), err) + suite.TestRepository = dbRepo + userModel := &models.User{ + Username: "test_username", + Password: "testpassword", + Email: "test@test.com", + } + err = suite.TestRepository.CreateUser(context.Background(), userModel) + suite.TestUser = userModel + require.NoError(suite.T(), err) + +} + +// AfterTest has a function to be executed right after the test finishes and receives the suite and test names as input +func (suite *RepositorySettingsTestSuite) AfterTest(suiteName, testName string) { + os.Remove(suite.TestDatabase.Name()) +} + +// In order for 'go test' to run this suite, we need to create +// a normal test function and pass our suite to suite.Run +func TestRepositorySettingsTestSuite(t *testing.T) { + suite.Run(t, new(RepositorySettingsTestSuite)) + +} + +func (suite *RepositorySettingsTestSuite) TestLoadUserSettings() { + //setup + authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username") + + //test + userSettings, err := suite.TestRepository.LoadUserSettings(authContext) + require.NoError(suite.T(), err) + + //assert + require.Equal(suite.T(), userSettings, &models.UserSettings{ + DashboardLocations: []string{}, + }) +} + +func (suite *RepositorySettingsTestSuite) TestSaveUserSettings() { + //setup + authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username") + + //test + err := suite.TestRepository.SaveUserSettings(authContext, &models.UserSettings{ + DashboardLocations: []string{"https://gist.github.com/AnalogJ/a56ded05cc6766b377268f14719cb84d"}, + }) + require.NoError(suite.T(), err) + userSettings, err := suite.TestRepository.LoadUserSettings(authContext) + require.NoError(suite.T(), err) + + //assert + require.Equal(suite.T(), userSettings, &models.UserSettings{ + DashboardLocations: []string{ + "https://gist.github.com/AnalogJ/a56ded05cc6766b377268f14719cb84d", + }, + }) +} diff --git a/backend/pkg/database/gorm_repository_test.go b/backend/pkg/database/gorm_repository_test.go new file mode 100644 index 00000000..c67ffeff --- /dev/null +++ b/backend/pkg/database/gorm_repository_test.go @@ -0,0 +1,1409 @@ +package database + +import ( + "encoding/json" + "fmt" + "io/ioutil" + "log" + "net/http/httptest" + "os" + "testing" + "time" + + "github.com/fastenhealth/fasten-onprem/backend/pkg" + mock_config "github.com/fastenhealth/fasten-onprem/backend/pkg/config/mock" + "github.com/fastenhealth/fasten-onprem/backend/pkg/event_bus" + "github.com/fastenhealth/fasten-onprem/backend/pkg/models" + sourceModels "github.com/fastenhealth/fasten-sources/clients/models" + sourcePkg "github.com/fastenhealth/fasten-sources/pkg" + "github.com/fastenhealth/gofhir-models/fhir401" + fhirutils "github.com/fastenhealth/gofhir-models/fhir401/utils" + "github.com/gin-gonic/gin" + "github.com/golang/mock/gomock" + "github.com/google/uuid" + "github.com/sirupsen/logrus" + "github.com/stretchr/testify/require" + "github.com/stretchr/testify/suite" + "golang.org/x/net/context" +) + +func TestSourceCredentialInterface(t *testing.T) { + t.Parallel() + + repo := new(GormRepository) + + //assert + require.Implements(t, (*sourceModels.DatabaseRepository)(nil), repo, "should implement the DatabaseRepository interface from fasten-sources") + require.Implements(t, (*DatabaseRepository)(nil), repo, "should implement the DatabaseRepository interface") +} + +// Define the suite, and absorb the built-in basic suite +// functionality from testify - including a T() method which +// returns the current testing context +type RepositoryTestSuite struct { + suite.Suite + MockCtrl *gomock.Controller + TestDatabase *os.File +} + +// BeforeTest has a function to be executed right before the test starts and receives the suite and test names as input +func (suite *RepositoryTestSuite) BeforeTest(suiteName, testName string) { + suite.MockCtrl = gomock.NewController(suite.T()) + + dbFile, err := ioutil.TempFile("", fmt.Sprintf("%s.*.db", testName)) + if err != nil { + log.Fatal(err) + } + suite.TestDatabase = dbFile + +} + +// AfterTest has a function to be executed right after the test finishes and receives the suite and test names as input +func (suite *RepositoryTestSuite) AfterTest(suiteName, testName string) { + suite.MockCtrl.Finish() + os.Remove(suite.TestDatabase.Name()) +} + +// In order for 'go test' to run this suite, we need to create +// a normal test function and pass our suite to suite.Run +func TestRepositoryTestSuite(t *testing.T) { + suite.Run(t, new(RepositoryTestSuite)) + +} + +func (suite *RepositoryTestSuite) TestNewRepository() { + //setup + fakeConfig := mock_config.NewMockInterface(suite.MockCtrl) + fakeConfig.EXPECT().GetString("database.location").Return(suite.TestDatabase.Name()).AnyTimes() + fakeConfig.EXPECT().GetString("database.type").Return("sqlite").AnyTimes() + fakeConfig.EXPECT().GetString("log.level").Return("INFO").AnyTimes() + + //test + _, err := NewRepository(fakeConfig, logrus.WithField("test", suite.T().Name()), event_bus.NewNoopEventBusServer()) + + //assert + require.NoError(suite.T(), err) +} + +func (suite *RepositoryTestSuite) TestCreateUser() { + //setup + fakeConfig := mock_config.NewMockInterface(suite.MockCtrl) + fakeConfig.EXPECT().GetString("database.location").Return(suite.TestDatabase.Name()).AnyTimes() + fakeConfig.EXPECT().GetString("database.type").Return("sqlite").AnyTimes() + fakeConfig.EXPECT().GetString("log.level").Return("INFO").AnyTimes() + dbRepo, err := NewRepository(fakeConfig, logrus.WithField("test", suite.T().Name()), event_bus.NewNoopEventBusServer()) + require.NoError(suite.T(), err) + + //test + userModel := &models.User{ + Username: "test_username", + Password: "testpassword", + Email: "test@test.com", + } + dbRepo.CreateUser(context.Background(), userModel) + + //assert + require.NotEmpty(suite.T(), userModel.ID) +} + +func (suite *RepositoryTestSuite) TestCreateUser_WithExitingUser_ShouldFail() { + //setup + fakeConfig := mock_config.NewMockInterface(suite.MockCtrl) + fakeConfig.EXPECT().GetString("database.location").Return(suite.TestDatabase.Name()).AnyTimes() + fakeConfig.EXPECT().GetString("database.type").Return("sqlite").AnyTimes() + fakeConfig.EXPECT().GetString("log.level").Return("INFO").AnyTimes() + dbRepo, err := NewRepository(fakeConfig, logrus.WithField("test", suite.T().Name()), event_bus.NewNoopEventBusServer()) + require.NoError(suite.T(), err) + + //test + userModel := &models.User{ + Username: "test_username", + Password: "testpassword", + Email: "test@test.com", + } + err = dbRepo.CreateUser(context.Background(), userModel) + require.NoError(suite.T(), err) + + userModel2 := &models.User{ + Username: "test_username", + Password: "testpassword", + Email: "test@test.com", + } + err = dbRepo.CreateUser(context.Background(), userModel2) + //assert + require.Error(suite.T(), err) +} + +// TODO: ensure user's cannot specify the ID when creating a user. +func (suite *RepositoryTestSuite) TestCreateUser_WithUserProvidedId_ShouldBeReplaced() { + //setup + fakeConfig := mock_config.NewMockInterface(suite.MockCtrl) + fakeConfig.EXPECT().GetString("database.location").Return(suite.TestDatabase.Name()).AnyTimes() + fakeConfig.EXPECT().GetString("database.type").Return("sqlite").AnyTimes() + fakeConfig.EXPECT().GetString("log.level").Return("INFO").AnyTimes() + dbRepo, err := NewRepository(fakeConfig, logrus.WithField("test", suite.T().Name()), event_bus.NewNoopEventBusServer()) + require.NoError(suite.T(), err) + + //test + userProvidedId := uuid.New() + userModel := &models.User{ + ModelBase: models.ModelBase{ + ID: userProvidedId, + }, + Username: "test_username", + Password: "testpassword", + Email: "test@test.com", + } + dbRepo.CreateUser(context.Background(), userModel) + + //assert + require.NotEmpty(suite.T(), userModel.ID) + require.NotEqual(suite.T(), userProvidedId.String(), userModel.ID.String()) +} + +func (suite *RepositoryTestSuite) TestGetUserByUsername() { + //setup + fakeConfig := mock_config.NewMockInterface(suite.MockCtrl) + fakeConfig.EXPECT().GetString("database.location").Return(suite.TestDatabase.Name()).AnyTimes() + fakeConfig.EXPECT().GetString("database.type").Return("sqlite").AnyTimes() + fakeConfig.EXPECT().GetString("log.level").Return("INFO").AnyTimes() + dbRepo, err := NewRepository(fakeConfig, logrus.WithField("test", suite.T().Name()), event_bus.NewNoopEventBusServer()) + require.NoError(suite.T(), err) + userModel := &models.User{ + Username: "test_username", + Password: "testpassword", + Email: "test@test.com", + } + err = dbRepo.CreateUser(context.Background(), userModel) + require.NoError(suite.T(), err) + + //test + userModelResult, err := dbRepo.GetUserByUsername(context.Background(), "test_username") + + //assert + require.NoError(suite.T(), err) + require.Equal(suite.T(), userModel.ID, userModelResult.ID) +} + +func (suite *RepositoryTestSuite) TestGetUserByUsername_WithInvalidUsername() { + //setup + fakeConfig := mock_config.NewMockInterface(suite.MockCtrl) + fakeConfig.EXPECT().GetString("database.location").Return(suite.TestDatabase.Name()).AnyTimes() + fakeConfig.EXPECT().GetString("database.type").Return("sqlite").AnyTimes() + fakeConfig.EXPECT().GetString("log.level").Return("INFO").AnyTimes() + dbRepo, err := NewRepository(fakeConfig, logrus.WithField("test", suite.T().Name()), event_bus.NewNoopEventBusServer()) + require.NoError(suite.T(), err) + userModel := &models.User{ + Username: "test_username", + Password: "testpassword", + Email: "test@test.com", + } + err = dbRepo.CreateUser(context.Background(), userModel) + require.NoError(suite.T(), err) + + //test + _, err = dbRepo.GetUserByUsername(context.Background(), "userdoesntexist") + + //assert + require.Error(suite.T(), err) +} + +func (suite *RepositoryTestSuite) TestGetCurrentUser_WithContextBackgroundAuthUser() { + //setup + fakeConfig := mock_config.NewMockInterface(suite.MockCtrl) + fakeConfig.EXPECT().GetString("database.location").Return(suite.TestDatabase.Name()).AnyTimes() + fakeConfig.EXPECT().GetString("database.type").Return("sqlite").AnyTimes() + fakeConfig.EXPECT().GetString("log.level").Return("INFO").AnyTimes() + dbRepo, err := NewRepository(fakeConfig, logrus.WithField("test", suite.T().Name()), event_bus.NewNoopEventBusServer()) + require.NoError(suite.T(), err) + userModel := &models.User{ + Username: "test_username", + Password: "testpassword", + Email: "test@test.com", + } + err = dbRepo.CreateUser(context.Background(), userModel) + require.NoError(suite.T(), err) + + //test + userModelResult, err := dbRepo.GetCurrentUser(context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username")) + + //assert + require.NoError(suite.T(), err) + require.NotNil(suite.T(), userModelResult) + require.Equal(suite.T(), userModelResult.Username, "test_username") +} + +func (suite *RepositoryTestSuite) TestGetCurrentUser_WithGinContextBackgroundAuthUser() { + //setup + fakeConfig := mock_config.NewMockInterface(suite.MockCtrl) + fakeConfig.EXPECT().GetString("database.location").Return(suite.TestDatabase.Name()).AnyTimes() + fakeConfig.EXPECT().GetString("database.type").Return("sqlite").AnyTimes() + fakeConfig.EXPECT().GetString("log.level").Return("INFO").AnyTimes() + dbRepo, err := NewRepository(fakeConfig, logrus.WithField("test", suite.T().Name()), event_bus.NewNoopEventBusServer()) + require.NoError(suite.T(), err) + userModel := &models.User{ + Username: "test_username", + Password: "testpassword", + Email: "test@test.com", + } + err = dbRepo.CreateUser(context.Background(), userModel) + require.NoError(suite.T(), err) + + //test + //ginContext := gin.Context{} + w := httptest.NewRecorder() + ginContext, _ := gin.CreateTestContext(w) + ginContext.Set(pkg.ContextKeyTypeAuthUsername, "test_username") + userModelResult, err := dbRepo.GetCurrentUser(ginContext) + + //assert + require.NoError(suite.T(), err) + require.NotNil(suite.T(), userModelResult) + require.Equal(suite.T(), userModelResult.Username, "test_username") +} + +func (suite *RepositoryTestSuite) TestGetCurrentUser_WithContextBackgroundAuthUserAndNoUserExists_ShouldThrowError() { + //setup + fakeConfig := mock_config.NewMockInterface(suite.MockCtrl) + fakeConfig.EXPECT().GetString("database.location").Return(suite.TestDatabase.Name()).AnyTimes() + fakeConfig.EXPECT().GetString("database.type").Return("sqlite").AnyTimes() + fakeConfig.EXPECT().GetString("log.level").Return("INFO").AnyTimes() + dbRepo, err := NewRepository(fakeConfig, logrus.WithField("test", suite.T().Name()), event_bus.NewNoopEventBusServer()) + require.NoError(suite.T(), err) + + //test + userModelResult, err := dbRepo.GetCurrentUser(context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username")) + + //assert + require.Error(suite.T(), err) + require.Nil(suite.T(), userModelResult) +} + +func (suite *RepositoryTestSuite) TestCreateGlossaryEntry() { + //setup + fakeConfig := mock_config.NewMockInterface(suite.MockCtrl) + fakeConfig.EXPECT().GetString("database.location").Return(suite.TestDatabase.Name()).AnyTimes() + fakeConfig.EXPECT().GetString("database.type").Return("sqlite").AnyTimes() + fakeConfig.EXPECT().GetString("log.level").Return("INFO").AnyTimes() + dbRepo, err := NewRepository(fakeConfig, logrus.WithField("test", suite.T().Name()), event_bus.NewNoopEventBusServer()) + require.NoError(suite.T(), err) + + //test + glossaryEntry := &models.Glossary{ + Code: "49727002", + CodeSystem: "2.16.840.1.113883.6.96", + Publisher: "U.S. National Library of Medicine", + Title: "Cough", + Url: "https://medlineplus.gov/cough.html?utm_source=mplusconnect&utm_medium=service", + Description: `

Coughing is a reflex that keeps your throat and airways clear. Although it can be annoying, coughing helps your body heal or protect itself. Coughs can be either acute or chronic. Acute coughs begin suddenly and usually last no more than 2 to 3 weeks. Acute coughs are the kind you most often get with a cold, flu, or acute bronchitis. Chronic coughs last longer than 2 to 3 weeks. Causes of chronic cough include:

+ +

Water can help ease your cough - whether you drink it or add it to the air with a steamy shower or vaporizer. If you have a cold or the flu, antihistamines may work better than non-prescription cough medicines. Children under four should not have cough medicine. For children over four, use caution and read labels carefully.

`, + } + err = dbRepo.CreateGlossaryEntry(context.Background(), glossaryEntry) + + //assert + require.NoError(suite.T(), err) + require.NotEmpty(suite.T(), glossaryEntry.ID) +} + +func (suite *RepositoryTestSuite) TestUpsertRawResource() { + //setup + fakeConfig := mock_config.NewMockInterface(suite.MockCtrl) + fakeConfig.EXPECT().GetString("database.location").Return(suite.TestDatabase.Name()).AnyTimes() + fakeConfig.EXPECT().GetString("database.type").Return("sqlite").AnyTimes() + fakeConfig.EXPECT().GetString("log.level").Return("INFO").AnyTimes() + dbRepo, err := NewRepository(fakeConfig, logrus.WithField("test", suite.T().Name()), event_bus.NewNoopEventBusServer()) + require.NoError(suite.T(), err) + + userModel := &models.User{ + Username: "test_username", + Password: "testpassword", + Email: "test@test.com", + } + err = dbRepo.CreateUser(context.Background(), userModel) + require.NoError(suite.T(), err) + testSourceCredential := models.SourceCredential{ + ModelBase: models.ModelBase{ + ID: uuid.New(), + }, + UserID: userModel.ID, + } + testPatientData, err := os.ReadFile("./testdata/Abraham100_Heller342_Patient.json") + require.NoError(suite.T(), err) + + //test + authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username") + wasCreated, err := dbRepo.UpsertRawResource( + authContext, + &testSourceCredential, + sourceModels.RawResourceFhir{ + SourceResourceType: "Patient", + SourceResourceID: "b426b062-8273-4b93-a907-de3176c0567d", + ResourceRaw: testPatientData, + }, + ) + require.NoError(suite.T(), err) + foundPatientResource, err := dbRepo.GetResourceByResourceTypeAndId(authContext, "Patient", "b426b062-8273-4b93-a907-de3176c0567d") + + //assert + require.NoError(suite.T(), err) + require.True(suite.T(), wasCreated) + require.NotNil(suite.T(), foundPatientResource) + require.Equal(suite.T(), foundPatientResource.SourceID, testSourceCredential.ID) + + //ensure that the raw resource data is the same (we don't want to modify the data) + var expectedPationData map[string]interface{} + err = json.Unmarshal(testPatientData, &expectedPationData) + require.NoError(suite.T(), err) + + var actualPatientData map[string]interface{} + err = json.Unmarshal(foundPatientResource.ResourceRaw, &actualPatientData) + require.Equal(suite.T(), expectedPationData, actualPatientData) +} + +//TODO create UPSERT test, where the resource already exists and we need to update it + +func (suite *RepositoryTestSuite) TestUpsertRawResource_WithRelatedResourceAndDuplicateReference() { + //setup + fakeConfig := mock_config.NewMockInterface(suite.MockCtrl) + fakeConfig.EXPECT().GetString("database.location").Return(suite.TestDatabase.Name()).AnyTimes() + fakeConfig.EXPECT().GetString("database.type").Return("sqlite").AnyTimes() + fakeConfig.EXPECT().GetString("log.level").Return("INFO").AnyTimes() + dbRepo, err := NewRepository(fakeConfig, logrus.WithField("test", suite.T().Name()), event_bus.NewNoopEventBusServer()) + require.NoError(suite.T(), err) + + userModel := &models.User{ + Username: "test_username", + Password: "testpassword", + Email: "test@test.com", + } + err = dbRepo.CreateUser(context.Background(), userModel) + require.NoError(suite.T(), err) + authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username") + + testSourceCredential := models.SourceCredential{ + ModelBase: models.ModelBase{ + ID: uuid.New(), + }, + UserID: userModel.ID, + } + patientData, err := os.ReadFile("./testdata/Abraham100_Heller342_Patient.json") + require.NoError(suite.T(), err) + + //test + wasCreated, err := dbRepo.UpsertRawResource( + context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username"), + &testSourceCredential, + sourceModels.RawResourceFhir{ + SourceResourceType: "Patient", + SourceResourceID: "b426b062-8273-4b93-a907-de3176c0567d", + ResourceRaw: patientData, + ReferencedResources: []string{"Observation/1", "Observation/2", "Observation/3", "Observation/3"}, //duplicate resource reference should not cause an issue, it should be silently ignored + }, + ) + + //assert + require.NoError(suite.T(), err) + require.True(suite.T(), wasCreated) + relatedResource, err := dbRepo.FindResourceAssociationsByTypeAndId(authContext, &testSourceCredential, "Patient", "b426b062-8273-4b93-a907-de3176c0567d") + require.NoError(suite.T(), err) + require.Equal(suite.T(), 3, len(relatedResource)) + +} + +func (suite *RepositoryTestSuite) TestListResources() { + //setup + fakeConfig := mock_config.NewMockInterface(suite.MockCtrl) + fakeConfig.EXPECT().GetString("database.location").Return(suite.TestDatabase.Name()).AnyTimes() + fakeConfig.EXPECT().GetString("database.type").Return("sqlite").AnyTimes() + fakeConfig.EXPECT().GetString("log.level").Return("INFO").AnyTimes() + dbRepo, err := NewRepository(fakeConfig, logrus.WithField("test", suite.T().Name()), event_bus.NewNoopEventBusServer()) + require.NoError(suite.T(), err) + + userModel := &models.User{ + Username: "test_username", + Password: "testpassword", + Email: "test@test.com", + } + err = dbRepo.CreateUser(context.Background(), userModel) + require.NoError(suite.T(), err) + authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username") + + otherUserModel := &models.User{ + Username: "test_other_username", + Password: "testpassword", + Email: "testother@test.com", + } + err = dbRepo.CreateUser(context.Background(), otherUserModel) + require.NoError(suite.T(), err) + + testSource1Credential := models.SourceCredential{ + ModelBase: models.ModelBase{ + ID: uuid.New(), + }, + UserID: userModel.ID, + } + testSource2Credential := models.SourceCredential{ + ModelBase: models.ModelBase{ + ID: uuid.New(), + }, + UserID: userModel.ID, + } + patientDataAbraham100, err := os.ReadFile("./testdata/Abraham100_Heller342_Patient.json") + require.NoError(suite.T(), err) + + testResource1Created, err := dbRepo.UpsertRawResource( + context.WithValue(authContext, pkg.ContextKeyTypeAuthUsername, "test_username"), + &testSource1Credential, + sourceModels.RawResourceFhir{ + SourceResourceType: "Patient", + SourceResourceID: "b426b062-8273-4b93-a907-de3176c0567d", + ResourceRaw: patientDataAbraham100, + ReferencedResources: []string{"Observation/1", "Observation/2", "Observation/3", "Observation/3"}, //duplicate resource reference should not cause an issue, it should be silently ignored + }, + ) + require.NoError(suite.T(), err) + require.True(suite.T(), testResource1Created) + + patientDataLillia547, err := os.ReadFile("./testdata/Lillia547_Schneider99_Patient.json") + require.NoError(suite.T(), err) + testResource2Created, err := dbRepo.UpsertRawResource( + context.WithValue(authContext, pkg.ContextKeyTypeAuthUsername, "test_username"), + &testSource2Credential, + sourceModels.RawResourceFhir{ + SourceResourceType: "Patient", + SourceResourceID: "d3fbfb3a-7b8d-45c0-13b4-9666e4d36a3e", + ResourceRaw: patientDataLillia547, + ReferencedResources: []string{"Observation/10", "Observation/20", "Observation/30"}, + }, + ) + require.NoError(suite.T(), err) + require.True(suite.T(), testResource2Created) + + //test + foundPatientResources, err := dbRepo.ListResources(authContext, models.ListResourceQueryOptions{ + SourceResourceType: "Patient", + }) + require.NoError(suite.T(), err) + + findAllResources, err := dbRepo.ListResources(authContext, models.ListResourceQueryOptions{}) + require.NoError(suite.T(), err) + + findSourceResources, err := dbRepo.ListResources(authContext, models.ListResourceQueryOptions{SourceID: testSource1Credential.ID.String()}) + require.NoError(suite.T(), err) + + //find specific resource + findSpecificResource, err := dbRepo.ListResources(authContext, models.ListResourceQueryOptions{SourceResourceID: "d3fbfb3a-7b8d-45c0-13b4-9666e4d36a3e", SourceResourceType: "Patient"}) + require.NoError(suite.T(), err) + + findInvalidResource, err := dbRepo.ListResources(authContext, models.ListResourceQueryOptions{SourceResourceID: "11111111-7b8d-45c0-13b4-9666e4d36a3e", SourceResourceType: "Patient"}) + require.NoError(suite.T(), err) + + findResourceWithOtherUserId, err := dbRepo.ListResources(context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_other_username"), models.ListResourceQueryOptions{SourceResourceID: "d3fbfb3a-7b8d-45c0-13b4-9666e4d36a3e", SourceResourceType: "Patient"}) + require.NoError(suite.T(), err) + + _, err = dbRepo.ListResources(context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "doesnt_exist"), models.ListResourceQueryOptions{SourceResourceID: "d3fbfb3a-7b8d-45c0-13b4-9666e4d36a3e", SourceResourceType: "Patient"}) + require.Error(suite.T(), err) + + //assert + require.Equal(suite.T(), len(foundPatientResources), 2) + require.Equal(suite.T(), len(findAllResources), 2) + require.Equal(suite.T(), len(findSourceResources), 1) + require.Equal(suite.T(), len(findSpecificResource), 1) + require.Equal(suite.T(), len(findInvalidResource), 0) + require.Equal(suite.T(), len(findResourceWithOtherUserId), 0) +} + +func (suite *RepositoryTestSuite) TestGetResourceByResourceTypeAndId() { + //setup + fakeConfig := mock_config.NewMockInterface(suite.MockCtrl) + fakeConfig.EXPECT().GetString("database.location").Return(suite.TestDatabase.Name()).AnyTimes() + fakeConfig.EXPECT().GetString("database.type").Return("sqlite").AnyTimes() + fakeConfig.EXPECT().GetString("log.level").Return("INFO").AnyTimes() + dbRepo, err := NewRepository(fakeConfig, logrus.WithField("test", suite.T().Name()), event_bus.NewNoopEventBusServer()) + require.NoError(suite.T(), err) + + userModel := &models.User{ + Username: "test_username", + Password: "testpassword", + Email: "test@test.com", + } + err = dbRepo.CreateUser(context.Background(), userModel) + require.NoError(suite.T(), err) + testSourceCredential := models.SourceCredential{ + ModelBase: models.ModelBase{ + ID: uuid.New(), + }, + UserID: userModel.ID, + } + testPatientData, err := os.ReadFile("./testdata/Abraham100_Heller342_Patient.json") + require.NoError(suite.T(), err) + + authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username") + wasCreated, err := dbRepo.UpsertRawResource( + authContext, + &testSourceCredential, + sourceModels.RawResourceFhir{ + SourceResourceType: "Patient", + SourceResourceID: "b426b062-8273-4b93-a907-de3176c0567d", + ResourceRaw: testPatientData, + }, + ) + require.NoError(suite.T(), err) + require.True(suite.T(), wasCreated) + + //test & assert + findPatientResource, err := dbRepo.GetResourceByResourceTypeAndId(authContext, "Patient", "b426b062-8273-4b93-a907-de3176c0567d") + require.NoError(suite.T(), err) + require.NotNil(suite.T(), findPatientResource) + + //raise an error if the resource is not found (invalid resource type or id missing) + _, err = dbRepo.GetResourceByResourceTypeAndId(authContext, "Patient", "11111111-8273-4b93-a907-de3176c0567d") + require.Error(suite.T(), err) + + _, err = dbRepo.GetResourceByResourceTypeAndId(authContext, "Observation", "b426b062-8273-4b93-a907-de3176c0567d") + require.Error(suite.T(), err) + + _, err = dbRepo.GetResourceByResourceTypeAndId(authContext, "InvalidResource", "b426b062-8273-4b93-a907-de3176c0567d") + require.Error(suite.T(), err) +} + +func (suite *RepositoryTestSuite) TestGetResourceBySourceId() { + //setup + fakeConfig := mock_config.NewMockInterface(suite.MockCtrl) + fakeConfig.EXPECT().GetString("database.location").Return(suite.TestDatabase.Name()).AnyTimes() + fakeConfig.EXPECT().GetString("database.type").Return("sqlite").AnyTimes() + fakeConfig.EXPECT().GetString("log.level").Return("INFO").AnyTimes() + dbRepo, err := NewRepository(fakeConfig, logrus.WithField("test", suite.T().Name()), event_bus.NewNoopEventBusServer()) + require.NoError(suite.T(), err) + + userModel := &models.User{ + Username: "test_username", + Password: "testpassword", + Email: "test@test.com", + } + err = dbRepo.CreateUser(context.Background(), userModel) + require.NoError(suite.T(), err) + testSourceCredential := models.SourceCredential{ + ModelBase: models.ModelBase{ + ID: uuid.New(), + }, + UserID: userModel.ID, + } + testPatientData, err := os.ReadFile("./testdata/Abraham100_Heller342_Patient.json") + require.NoError(suite.T(), err) + + authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username") + wasCreated, err := dbRepo.UpsertRawResource( + authContext, + &testSourceCredential, + sourceModels.RawResourceFhir{ + SourceResourceType: "Patient", + SourceResourceID: "b426b062-8273-4b93-a907-de3176c0567d", + ResourceRaw: testPatientData, + }, + ) + require.NoError(suite.T(), err) + require.True(suite.T(), wasCreated) + + //test & assert + findPatientResource, err := dbRepo.GetResourceBySourceId(authContext, testSourceCredential.ID.String(), "b426b062-8273-4b93-a907-de3176c0567d") + require.NoError(suite.T(), err) + require.NotNil(suite.T(), findPatientResource) + + //raise an error if the resource is not found (invalid resource id for source) + _, err = dbRepo.GetResourceByResourceTypeAndId(authContext, testSourceCredential.ID.String(), "11111111-8273-4b93-a907-de3176c0567d") + require.Error(suite.T(), err) + + _, err = dbRepo.GetResourceByResourceTypeAndId(authContext, uuid.NewString(), "b426b062-8273-4b93-a907-de3176c0567d") + require.Error(suite.T(), err) + + _, err = dbRepo.GetResourceByResourceTypeAndId(authContext, testSourceCredential.ID.String(), "") + require.Error(suite.T(), err) +} + +func (suite *RepositoryTestSuite) TestGetPatientForSources() { + //setup + fakeConfig := mock_config.NewMockInterface(suite.MockCtrl) + fakeConfig.EXPECT().GetString("database.location").Return(suite.TestDatabase.Name()).AnyTimes() + fakeConfig.EXPECT().GetString("database.type").Return("sqlite").AnyTimes() + fakeConfig.EXPECT().GetString("log.level").Return("INFO").AnyTimes() + dbRepo, err := NewRepository(fakeConfig, logrus.WithField("test", suite.T().Name()), event_bus.NewNoopEventBusServer()) + require.NoError(suite.T(), err) + + userModel := &models.User{ + Username: "test_username", + Password: "testpassword", + Email: "test@test.com", + } + err = dbRepo.CreateUser(context.Background(), userModel) + require.NoError(suite.T(), err) + testSourceCredential := models.SourceCredential{ + ModelBase: models.ModelBase{ + ID: uuid.New(), + }, + UserID: userModel.ID, + } + testPatientData, err := os.ReadFile("./testdata/Abraham100_Heller342_Patient.json") + require.NoError(suite.T(), err) + + authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username") + wasCreated, err := dbRepo.UpsertRawResource( + authContext, + &testSourceCredential, + sourceModels.RawResourceFhir{ + SourceResourceType: "Patient", + SourceResourceID: "b426b062-8273-4b93-a907-de3176c0567d", + ResourceRaw: testPatientData, + }, + ) + require.NoError(suite.T(), err) + require.True(suite.T(), wasCreated) + + was2Created, err := dbRepo.UpsertRawResource( + authContext, + &testSourceCredential, + sourceModels.RawResourceFhir{ + SourceResourceType: "Patient", + SourceResourceID: "11111111-8273-4b93-a907-de3176c0567d", + ResourceRaw: testPatientData, + }, + ) + require.NoError(suite.T(), err) + require.True(suite.T(), was2Created) + //test & assert + findPatients, err := dbRepo.GetPatientForSources(authContext) + require.NoError(suite.T(), err) + require.NotNil(suite.T(), findPatients) + require.Len(suite.T(), findPatients, 2) //TODO: this may need to change to 1 if we group by source_id + +} + +func (suite *RepositoryTestSuite) TestAddResourceAssociation() { + //setup + fakeConfig := mock_config.NewMockInterface(suite.MockCtrl) + fakeConfig.EXPECT().GetString("database.location").Return(suite.TestDatabase.Name()).AnyTimes() + fakeConfig.EXPECT().GetString("database.type").Return("sqlite").AnyTimes() + fakeConfig.EXPECT().GetString("log.level").Return("INFO").AnyTimes() + dbRepo, err := NewRepository(fakeConfig, logrus.WithField("test", suite.T().Name()), event_bus.NewNoopEventBusServer()) + require.NoError(suite.T(), err) + + userModel := &models.User{ + Username: "test_username", + Password: "testpassword", + Email: "test@test.com", + } + err = dbRepo.CreateUser(context.Background(), userModel) + require.NoError(suite.T(), err) + authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username") + + testSourceCredential := models.SourceCredential{ + ModelBase: models.ModelBase{ + ID: uuid.New(), + }, + UserID: userModel.ID, + } + + //test + err = dbRepo.AddResourceAssociation(authContext, + &testSourceCredential, "Patient", "b426b062-8273-4b93-a907-de3176c0567d", + &testSourceCredential, "Observation", "11111111-8273-4b93-a907-de3176c0567d") + require.NoError(suite.T(), err) + + //assert + related, err := dbRepo.FindResourceAssociationsByTypeAndId(authContext, &testSourceCredential, "Patient", "b426b062-8273-4b93-a907-de3176c0567d") + require.NoError(suite.T(), err) + require.Equal(suite.T(), 1, len(related)) +} + +func (suite *RepositoryTestSuite) TestAddResourceAssociation_WithMismatchingSourceIds() { + //setup + fakeConfig := mock_config.NewMockInterface(suite.MockCtrl) + fakeConfig.EXPECT().GetString("database.location").Return(suite.TestDatabase.Name()).AnyTimes() + fakeConfig.EXPECT().GetString("database.type").Return("sqlite").AnyTimes() + fakeConfig.EXPECT().GetString("log.level").Return("INFO").AnyTimes() + dbRepo, err := NewRepository(fakeConfig, logrus.WithField("test", suite.T().Name()), event_bus.NewNoopEventBusServer()) + require.NoError(suite.T(), err) + + userModel := &models.User{ + Username: "test_username", + Password: "testpassword", + Email: "test@test.com", + } + err = dbRepo.CreateUser(context.Background(), userModel) + require.NoError(suite.T(), err) + authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username") + + //test 1 - user id does not match the user id on resources (but they match eachother) + differentUserIdSourceCredential := models.SourceCredential{ + ModelBase: models.ModelBase{ + ID: uuid.New(), + }, + UserID: uuid.New(), + } + errForUserMismatch := dbRepo.AddResourceAssociation(authContext, + &differentUserIdSourceCredential, "Patient", "b426b062-8273-4b93-a907-de3176c0567d", + &differentUserIdSourceCredential, "Observation", "11111111-8273-4b93-a907-de3176c0567d") + require.Error(suite.T(), errForUserMismatch) + + //test 2 - user id for resources do not match eachother + sourceCredential1 := models.SourceCredential{ + ModelBase: models.ModelBase{ + ID: uuid.New(), + }, + UserID: uuid.New(), + } + sourceCredential2 := models.SourceCredential{ + ModelBase: models.ModelBase{ + ID: uuid.New(), + }, + UserID: uuid.New(), + } + require.NotEqual(suite.T(), sourceCredential1.UserID, sourceCredential2.UserID) + errForResourceMismatch := dbRepo.AddResourceAssociation(authContext, + &sourceCredential1, "Patient", "b426b062-8273-4b93-a907-de3176c0567d", + &sourceCredential2, "Observation", "11111111-8273-4b93-a907-de3176c0567d") + require.Error(suite.T(), errForResourceMismatch) +} + +func (suite *RepositoryTestSuite) TestRemoveResourceAssociation() { + //setup + fakeConfig := mock_config.NewMockInterface(suite.MockCtrl) + fakeConfig.EXPECT().GetString("database.location").Return(suite.TestDatabase.Name()).AnyTimes() + fakeConfig.EXPECT().GetString("database.type").Return("sqlite").AnyTimes() + fakeConfig.EXPECT().GetString("log.level").Return("INFO").AnyTimes() + dbRepo, err := NewRepository(fakeConfig, logrus.WithField("test", suite.T().Name()), event_bus.NewNoopEventBusServer()) + require.NoError(suite.T(), err) + + userModel := &models.User{ + Username: "test_username", + Password: "testpassword", + Email: "test@test.com", + } + err = dbRepo.CreateUser(context.Background(), userModel) + require.NoError(suite.T(), err) + authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username") + + testSourceCredential := models.SourceCredential{ + ModelBase: models.ModelBase{ + ID: uuid.New(), + }, + UserID: userModel.ID, + } + + err = dbRepo.AddResourceAssociation(authContext, + &testSourceCredential, "Patient", "b426b062-8273-4b93-a907-de3176c0567d", + &testSourceCredential, "Observation", "11111111-8273-4b93-a907-de3176c0567d") + require.NoError(suite.T(), err) + + //test + errWhenNotExists := dbRepo.RemoveResourceAssociation(authContext, + &testSourceCredential, "Patient", "999999999-8273-4b93-a907-de3176c0567d", + &testSourceCredential, "Observation", "11111111-8273-4b93-a907-de3176c0567d") + require.Errorf(suite.T(), errWhenNotExists, "association should not exist, so deletion should fail") + + err = dbRepo.RemoveResourceAssociation(authContext, + &testSourceCredential, "Patient", "b426b062-8273-4b93-a907-de3176c0567d", + &testSourceCredential, "Observation", "11111111-8273-4b93-a907-de3176c0567d") + require.NoError(suite.T(), err) +} + +func (suite *RepositoryTestSuite) TestGetSourceSummary() { + //setup + fakeConfig := mock_config.NewMockInterface(suite.MockCtrl) + fakeConfig.EXPECT().GetString("database.location").Return(suite.TestDatabase.Name()).AnyTimes() + fakeConfig.EXPECT().GetString("database.type").Return("sqlite").AnyTimes() + fakeConfig.EXPECT().GetString("log.level").Return("INFO").AnyTimes() + dbRepo, err := NewRepository(fakeConfig, logrus.WithField("test", suite.T().Name()), event_bus.NewNoopEventBusServer()) + require.NoError(suite.T(), err) + + userModel := &models.User{ + Username: "test_username", + Password: "testpassword", + Email: "test@test.com", + } + err = dbRepo.CreateUser(context.Background(), userModel) + require.NoError(suite.T(), err) + authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username") + + testSourceCredential := models.SourceCredential{ + ModelBase: models.ModelBase{ + ID: uuid.New(), + }, + UserID: userModel.ID, + } + err = dbRepo.CreateSource(authContext, &testSourceCredential) + require.NoError(suite.T(), err) + + testPatientData, err := os.ReadFile("./testdata/Abraham100_Heller342_262b819a-5193-404a-9787-b7f599358035.json") + require.NoError(suite.T(), err) + + var testPatientBundle fhir401.Bundle + err = json.Unmarshal(testPatientData, &testPatientBundle) + require.NoError(suite.T(), err) + + for _, resourceEntry := range testPatientBundle.Entry { + + fhirResource, _ := fhirutils.MapToResource(resourceEntry.Resource, false) + resourceType, resourceId := fhirResource.(sourceModels.ResourceInterface).ResourceRef() + if resourceId == nil { + suite.T().Logf("skipping resource with no ID: %s", resourceType) + continue //skip resources missing an ID + } + wasCreated, err := dbRepo.UpsertRawResource( + authContext, + &testSourceCredential, + sourceModels.RawResourceFhir{ + SourceResourceType: resourceType, + SourceResourceID: *resourceId, + ResourceRaw: resourceEntry.Resource, + }, + ) + require.NoError(suite.T(), err) + require.True(suite.T(), wasCreated) + } + + //test + sourceSummary, err := dbRepo.GetSourceSummary(authContext, testSourceCredential.ID.String()) + require.NoError(suite.T(), err) + require.NotNil(suite.T(), sourceSummary) + //validated using https://www.maxmddirect.com/direct/FHIR/ResponseViewer + require.Equal(suite.T(), []map[string]interface{}{ + {"count": int64(1), "resource_type": "CarePlan", "source_id": testSourceCredential.ID.String()}, + {"count": int64(1), "resource_type": "CareTeam", "source_id": testSourceCredential.ID.String()}, + {"count": int64(22), "resource_type": "Claim", "source_id": testSourceCredential.ID.String()}, + {"count": int64(8), "resource_type": "Condition", "source_id": testSourceCredential.ID.String()}, + {"count": int64(2), "resource_type": "DiagnosticReport", "source_id": testSourceCredential.ID.String()}, + {"count": int64(18), "resource_type": "Encounter", "source_id": testSourceCredential.ID.String()}, + {"count": int64(18), "resource_type": "ExplanationOfBenefit", "source_id": testSourceCredential.ID.String()}, + {"count": int64(16), "resource_type": "Immunization", "source_id": testSourceCredential.ID.String()}, + {"count": int64(4), "resource_type": "MedicationRequest", "source_id": testSourceCredential.ID.String()}, + {"count": int64(93), "resource_type": "Observation", "source_id": testSourceCredential.ID.String()}, + {"count": int64(3), "resource_type": "Organization", "source_id": testSourceCredential.ID.String()}, + {"count": int64(1), "resource_type": "Patient", "source_id": testSourceCredential.ID.String()}, + {"count": int64(3), "resource_type": "Practitioner", "source_id": testSourceCredential.ID.String()}, + {"count": int64(8), "resource_type": "Procedure", "source_id": testSourceCredential.ID.String()}, + }, sourceSummary.ResourceTypeCounts) + require.Equal(suite.T(), "b426b062-8273-4b93-a907-de3176c0567d", sourceSummary.Patient.SourceResourceID) + require.Equal(suite.T(), "Patient", sourceSummary.Patient.SourceResourceType) + require.NotEmpty(suite.T(), sourceSummary.Patient.ResourceRaw) +} + +func (suite *RepositoryTestSuite) TestGetSummary() { + //setup + fakeConfig := mock_config.NewMockInterface(suite.MockCtrl) + fakeConfig.EXPECT().GetString("database.location").Return(suite.TestDatabase.Name()).AnyTimes() + fakeConfig.EXPECT().GetString("database.type").Return("sqlite").AnyTimes() + fakeConfig.EXPECT().GetString("log.level").Return("INFO").AnyTimes() + dbRepo, err := NewRepository(fakeConfig, logrus.WithField("test", suite.T().Name()), event_bus.NewNoopEventBusServer()) + require.NoError(suite.T(), err) + + userModel := &models.User{ + Username: "test_username", + Password: "testpassword", + Email: "test@test.com", + } + err = dbRepo.CreateUser(context.Background(), userModel) + require.NoError(suite.T(), err) + authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username") + + testSourceCredential1 := models.SourceCredential{ + ModelBase: models.ModelBase{ + ID: uuid.New(), + }, + UserID: userModel.ID, + Patient: uuid.New().String(), + } + err = dbRepo.CreateSource(authContext, &testSourceCredential1) + require.NoError(suite.T(), err) + + testSourceCredential2 := models.SourceCredential{ + ModelBase: models.ModelBase{ + ID: uuid.New(), + }, + UserID: userModel.ID, + Patient: uuid.New().String(), + } + err = dbRepo.CreateSource(authContext, &testSourceCredential2) + require.NoError(suite.T(), err) + + testPatientData, err := os.ReadFile("./testdata/Abraham100_Heller342_262b819a-5193-404a-9787-b7f599358035.json") + require.NoError(suite.T(), err) + + var testPatientBundle fhir401.Bundle + err = json.Unmarshal(testPatientData, &testPatientBundle) + require.NoError(suite.T(), err) + + for _, resourceEntry := range testPatientBundle.Entry { + + fhirResource, _ := fhirutils.MapToResource(resourceEntry.Resource, false) + resourceType, resourceId := fhirResource.(sourceModels.ResourceInterface).ResourceRef() + if resourceId == nil { + suite.T().Logf("skipping resource with no ID: %s", resourceType) + continue //skip resources missing an ID + } + wasCreated, err := dbRepo.UpsertRawResource( + authContext, + &testSourceCredential1, + sourceModels.RawResourceFhir{ + SourceResourceType: resourceType, + SourceResourceID: *resourceId, + ResourceRaw: resourceEntry.Resource, + }, + ) + require.NoError(suite.T(), err) + require.True(suite.T(), wasCreated) + + wasCreated2, err2 := dbRepo.UpsertRawResource( + authContext, + &testSourceCredential2, + sourceModels.RawResourceFhir{ + SourceResourceType: resourceType, + SourceResourceID: *resourceId + "2", + ResourceRaw: resourceEntry.Resource, + }, + ) + require.NoError(suite.T(), err2) + require.True(suite.T(), wasCreated2) + } + + //test + sourceSummary, err := dbRepo.GetSummary(authContext) + require.NoError(suite.T(), err) + require.NotNil(suite.T(), sourceSummary) + //validated using https://www.maxmddirect.com/direct/FHIR/ResponseViewer + require.Equal(suite.T(), []map[string]interface{}{ + {"count": int64(2), "resource_type": "CarePlan"}, + {"count": int64(2), "resource_type": "CareTeam"}, + {"count": int64(44), "resource_type": "Claim"}, + {"count": int64(16), "resource_type": "Condition"}, + {"count": int64(4), "resource_type": "DiagnosticReport"}, + {"count": int64(36), "resource_type": "Encounter"}, + {"count": int64(36), "resource_type": "ExplanationOfBenefit"}, + {"count": int64(32), "resource_type": "Immunization"}, + {"count": int64(8), "resource_type": "MedicationRequest"}, + {"count": int64(93 * 2), "resource_type": "Observation"}, + {"count": int64(6), "resource_type": "Organization"}, + {"count": int64(2), "resource_type": "Patient"}, + {"count": int64(6), "resource_type": "Practitioner"}, + {"count": int64(16), "resource_type": "Procedure"}, + }, sourceSummary.ResourceTypeCounts) + + require.Equal(suite.T(), 2, len(sourceSummary.Sources)) + require.Equal(suite.T(), 2, len(sourceSummary.Patients)) +} + +func (suite *RepositoryTestSuite) TestAddResourceComposition() { + //setup + fakeConfig := mock_config.NewMockInterface(suite.MockCtrl) + fakeConfig.EXPECT().GetString("database.location").Return(suite.TestDatabase.Name()).AnyTimes() + fakeConfig.EXPECT().GetString("database.type").Return("sqlite").AnyTimes() + fakeConfig.EXPECT().GetString("log.level").Return("INFO").AnyTimes() + dbRepo, err := NewRepository(fakeConfig, logrus.WithField("test", suite.T().Name()), event_bus.NewNoopEventBusServer()) + require.NoError(suite.T(), err) + + userModel := &models.User{ + Username: "test_username", + Password: "testpassword", + Email: "test@test.com", + } + err = dbRepo.CreateUser(context.Background(), userModel) + require.NoError(suite.T(), err) + authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username") + + testSourceCredential := models.SourceCredential{ + ModelBase: models.ModelBase{ + ID: uuid.New(), + }, + UserID: userModel.ID, + Patient: uuid.New().String(), + } + err = dbRepo.CreateSource(authContext, &testSourceCredential) + require.NoError(suite.T(), err) + + //test + testCompositionData, err := os.ReadFile("./testdata/Composition_Create.json") + require.NoError(suite.T(), err) + + type CompositionPayload struct { + Title string `json:"title"` + Resources []*models.ResourceBase `json:"resources"` + } + var compositionPayload CompositionPayload + err = json.Unmarshal(testCompositionData, &compositionPayload) + require.NoError(suite.T(), err) + + //update resources with testSource Credential + for i, _ := range compositionPayload.Resources { + compositionPayload.Resources[i].SourceID = testSourceCredential.ID + } + err = dbRepo.AddResourceComposition(authContext, compositionPayload.Title, compositionPayload.Resources) + require.NoError(suite.T(), err) + + //assert + //check that composition was created + compositions, err := dbRepo.ListResources(authContext, models.ListResourceQueryOptions{ + SourceID: "00000000-0000-0000-0000-000000000000", + SourceResourceType: "Composition", + }) + require.NoError(suite.T(), err) + require.Equal(suite.T(), 1, len(compositions)) + + //assert that the associations were created + associations, err := dbRepo.FindResourceAssociationsByTypeAndId(authContext, + &models.SourceCredential{UserID: userModel.ID, ModelBase: models.ModelBase{ID: uuid.MustParse("00000000-0000-0000-0000-000000000000")}}, //Compositions have a unique/placeholder credential ID + "Composition", compositions[0].SourceResourceID) + require.NoError(suite.T(), err) + require.Equal(suite.T(), 2, len(associations)) + require.Equal(suite.T(), []models.RelatedResource{ + { + ResourceBaseUserID: testSourceCredential.UserID, + ResourceBaseSourceID: compositions[0].SourceID, + ResourceBaseSourceResourceType: "Composition", + ResourceBaseSourceResourceID: compositions[0].SourceResourceID, + RelatedResourceUserID: testSourceCredential.UserID, + RelatedResourceSourceID: testSourceCredential.ID, + RelatedResourceSourceResourceType: "Condition", + RelatedResourceSourceResourceID: "bec92fdc-8765-409b-9850-52786d31aa9b", + }, + { + ResourceBaseUserID: testSourceCredential.UserID, + ResourceBaseSourceID: compositions[0].SourceID, + ResourceBaseSourceResourceType: "Composition", + ResourceBaseSourceResourceID: compositions[0].SourceResourceID, + RelatedResourceUserID: testSourceCredential.UserID, + RelatedResourceSourceID: testSourceCredential.ID, + RelatedResourceSourceResourceType: "Condition", + RelatedResourceSourceResourceID: "cf39b665-4177-41e3-af34-149421cb895f", + }, + }, associations) +} + +func (suite *RepositoryTestSuite) TestAddResourceComposition_WithExistingComposition() { + //setup + fakeConfig := mock_config.NewMockInterface(suite.MockCtrl) + fakeConfig.EXPECT().GetString("database.location").Return(suite.TestDatabase.Name()).AnyTimes() + fakeConfig.EXPECT().GetString("database.type").Return("sqlite").AnyTimes() + fakeConfig.EXPECT().GetString("log.level").Return("INFO").AnyTimes() + dbRepo, err := NewRepository(fakeConfig, logrus.WithField("test", suite.T().Name()), event_bus.NewNoopEventBusServer()) + require.NoError(suite.T(), err) + + userModel := &models.User{ + Username: "test_username", + Password: "testpassword", + Email: "test@test.com", + } + err = dbRepo.CreateUser(context.Background(), userModel) + require.NoError(suite.T(), err) + authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username") + + testSourceCredential := models.SourceCredential{ + ModelBase: models.ModelBase{ + ID: uuid.MustParse("00000000-0000-0000-0000-000000000000"), + }, + UserID: userModel.ID, + Patient: uuid.New().String(), + } + err = dbRepo.CreateSource(authContext, &testSourceCredential) + require.NoError(suite.T(), err) + + //create existing composition + emptyRawJson, err := json.Marshal(map[string]interface{}{}) + require.NoError(suite.T(), err) + err = dbRepo.AddResourceComposition(authContext, "existing composition", []*models.ResourceBase{ + { + OriginBase: models.OriginBase{ + SourceID: testSourceCredential.ID, + SourceResourceType: "Observation", + SourceResourceID: "1", + }, + ResourceRaw: emptyRawJson, + }, + { + OriginBase: models.OriginBase{ + SourceID: testSourceCredential.ID, + SourceResourceType: "Observation", + SourceResourceID: "2", + }, + ResourceRaw: emptyRawJson, + }, + { + OriginBase: models.OriginBase{ + SourceID: testSourceCredential.ID, + SourceResourceType: "Observation", + SourceResourceID: "3", + }, + ResourceRaw: emptyRawJson, + }, + }) + + require.NoError(suite.T(), err) + + //find existing composition + existingCompositions, err := dbRepo.ListResources(authContext, models.ListResourceQueryOptions{ + SourceID: "00000000-0000-0000-0000-000000000000", + SourceResourceType: "Composition", + }) + require.NoError(suite.T(), err) + require.Equal(suite.T(), 1, len(existingCompositions), "Only 1 composition should exist at this point") + + //test + testCompositionData, err := os.ReadFile("./testdata/Composition_Create.json") + require.NoError(suite.T(), err) + + type CompositionPayload struct { + Title string `json:"title"` + Resources []*models.ResourceBase `json:"resources"` + } + var compositionPayload CompositionPayload + err = json.Unmarshal(testCompositionData, &compositionPayload) + require.NoError(suite.T(), err) + + //update resources with testSource Credential + for i, _ := range compositionPayload.Resources { + compositionPayload.Resources[i].SourceID = testSourceCredential.ID + } + //add the existing composition as a resource to this composition + compositionPayload.Resources = append(compositionPayload.Resources, &existingCompositions[0]) + + //test + err = dbRepo.AddResourceComposition(authContext, compositionPayload.Title, compositionPayload.Resources) + require.NoError(suite.T(), err) + + //assert + //check that composition was created + compositions, err := dbRepo.ListResources(authContext, models.ListResourceQueryOptions{ + SourceID: "00000000-0000-0000-0000-000000000000", + SourceResourceType: "Composition", + }) + require.NoError(suite.T(), err) + require.Equal(suite.T(), 1, len(compositions), "Only 1 composition should exist, the previous one should be deleted, and its related resources merged into this one.") + + //assert that the associations were created + associations, err := dbRepo.FindResourceAssociationsByTypeAndId(authContext, + &models.SourceCredential{UserID: userModel.ID, ModelBase: models.ModelBase{ID: uuid.MustParse("00000000-0000-0000-0000-000000000000")}}, //Compositions have a unique/placeholder credential ID + "Composition", compositions[0].SourceResourceID) + require.NoError(suite.T(), err) + require.Equal(suite.T(), 5, len(associations)) + require.Equal(suite.T(), []models.RelatedResource{ + { + ResourceBaseUserID: testSourceCredential.UserID, + ResourceBaseSourceID: compositions[0].SourceID, + ResourceBaseSourceResourceType: "Composition", + ResourceBaseSourceResourceID: compositions[0].SourceResourceID, + RelatedResourceUserID: testSourceCredential.UserID, + RelatedResourceSourceID: testSourceCredential.ID, + RelatedResourceSourceResourceType: "Condition", + RelatedResourceSourceResourceID: "bec92fdc-8765-409b-9850-52786d31aa9b", + }, + { + ResourceBaseUserID: testSourceCredential.UserID, + ResourceBaseSourceID: compositions[0].SourceID, + ResourceBaseSourceResourceType: "Composition", + ResourceBaseSourceResourceID: compositions[0].SourceResourceID, + RelatedResourceUserID: testSourceCredential.UserID, + RelatedResourceSourceID: testSourceCredential.ID, + RelatedResourceSourceResourceType: "Condition", + RelatedResourceSourceResourceID: "cf39b665-4177-41e3-af34-149421cb895f", + }, + { + ResourceBaseUserID: testSourceCredential.UserID, + ResourceBaseSourceID: compositions[0].SourceID, + ResourceBaseSourceResourceType: "Composition", + ResourceBaseSourceResourceID: compositions[0].SourceResourceID, + RelatedResourceUserID: testSourceCredential.UserID, + RelatedResourceSourceID: testSourceCredential.ID, + RelatedResourceSourceResourceType: "Observation", + RelatedResourceSourceResourceID: "1", + }, + { + ResourceBaseUserID: testSourceCredential.UserID, + ResourceBaseSourceID: compositions[0].SourceID, + ResourceBaseSourceResourceType: "Composition", + ResourceBaseSourceResourceID: compositions[0].SourceResourceID, + RelatedResourceUserID: testSourceCredential.UserID, + RelatedResourceSourceID: testSourceCredential.ID, + RelatedResourceSourceResourceType: "Observation", + RelatedResourceSourceResourceID: "2", + }, + { + ResourceBaseUserID: testSourceCredential.UserID, + ResourceBaseSourceID: compositions[0].SourceID, + ResourceBaseSourceResourceType: "Composition", + ResourceBaseSourceResourceID: compositions[0].SourceResourceID, + RelatedResourceUserID: testSourceCredential.UserID, + RelatedResourceSourceID: testSourceCredential.ID, + RelatedResourceSourceResourceType: "Observation", + RelatedResourceSourceResourceID: "3", + }, + }, associations) + +} + +func (suite *RepositoryTestSuite) TestCreateBackgroundJob_Sync() { + //setup + fakeConfig := mock_config.NewMockInterface(suite.MockCtrl) + fakeConfig.EXPECT().GetString("database.location").Return(suite.TestDatabase.Name()).AnyTimes() + fakeConfig.EXPECT().GetString("database.type").Return("sqlite").AnyTimes() + fakeConfig.EXPECT().GetString("log.level").Return("INFO").AnyTimes() + dbRepo, err := NewRepository(fakeConfig, logrus.WithField("test", suite.T().Name()), event_bus.NewNoopEventBusServer()) + require.NoError(suite.T(), err) + userModel := &models.User{ + Username: "test_username", + Password: "testpassword", + Email: "test@test.com", + } + err = dbRepo.CreateUser(context.Background(), userModel) + require.NoError(suite.T(), err) + authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username") + + //test + sourceCredential := models.SourceCredential{ModelBase: models.ModelBase{ID: uuid.New()}, SourceType: sourcePkg.SourceType("bluebutton")} + backgroundJob := models.NewSyncBackgroundJob(sourceCredential) + err = dbRepo.CreateBackgroundJob( + context.WithValue(authContext, pkg.ContextKeyTypeAuthUsername, "test_username"), + backgroundJob, + ) + + //assert + require.NoError(suite.T(), err) + require.NotEqual(suite.T(), uuid.Nil, backgroundJob.ID) + require.Equal(suite.T(), pkg.BackgroundJobTypeSync, backgroundJob.JobType) + require.Equal(suite.T(), pkg.BackgroundJobStatusLocked, backgroundJob.JobStatus) + require.NotNil(suite.T(), backgroundJob.LockedTime) + require.Nil(suite.T(), backgroundJob.DoneTime) + require.Equal(suite.T(), userModel.ID, backgroundJob.UserID) +} + +func (suite *RepositoryTestSuite) TestListBackgroundJobs() { + //setup + fakeConfig := mock_config.NewMockInterface(suite.MockCtrl) + fakeConfig.EXPECT().GetString("database.location").Return(suite.TestDatabase.Name()).AnyTimes() + fakeConfig.EXPECT().GetString("database.type").Return("sqlite").AnyTimes() + fakeConfig.EXPECT().GetString("log.level").Return("INFO").AnyTimes() + dbRepo, err := NewRepository(fakeConfig, logrus.WithField("test", suite.T().Name()), event_bus.NewNoopEventBusServer()) + require.NoError(suite.T(), err) + + userModel := &models.User{ + Username: "test_username", + Password: "testpassword", + Email: "test@test.com", + } + err = dbRepo.CreateUser(context.Background(), userModel) + require.NoError(suite.T(), err) + authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username") + + otherUserModel := &models.User{ + Username: "test_other_username", + Password: "testpassword", + Email: "testother@test.com", + } + err = dbRepo.CreateUser(context.Background(), otherUserModel) + require.NoError(suite.T(), err) + + testSourceCredential := models.SourceCredential{ + ModelBase: models.ModelBase{ + ID: uuid.New(), + }, + UserID: userModel.ID, + } + + backgroundJob := models.NewSyncBackgroundJob(testSourceCredential) + err = dbRepo.CreateBackgroundJob( + context.WithValue(authContext, pkg.ContextKeyTypeAuthUsername, "test_username"), + backgroundJob, + ) + + backgroundJob2 := models.NewSyncBackgroundJob(testSourceCredential) + backgroundJob2.JobType = pkg.BackgroundJobTypeScheduledSync + err = dbRepo.CreateBackgroundJob( + context.WithValue(authContext, pkg.ContextKeyTypeAuthUsername, "test_username"), + backgroundJob2, + ) + + backgroundJob3 := models.NewSyncBackgroundJob(testSourceCredential) + backgroundJob3.JobStatus = pkg.BackgroundJobStatusFailed + err = dbRepo.CreateBackgroundJob( + context.WithValue(authContext, pkg.ContextKeyTypeAuthUsername, "test_username"), + backgroundJob3, + ) + + require.NoError(suite.T(), err) + + //test + foundAllBackgroundJobs, err := dbRepo.ListBackgroundJobs(authContext, models.BackgroundJobQueryOptions{}) + require.NoError(suite.T(), err) + + syncJobType := pkg.BackgroundJobTypeSync + foundBackgroundJobsByType, err := dbRepo.ListBackgroundJobs(authContext, models.BackgroundJobQueryOptions{ + JobType: &syncJobType, + }) + require.NoError(suite.T(), err) + + syncFailedStatus := pkg.BackgroundJobStatusFailed + foundBackgroundJobsByStatus, err := dbRepo.ListBackgroundJobs(authContext, models.BackgroundJobQueryOptions{ + Status: &syncFailedStatus, + }) + require.NoError(suite.T(), err) + + //assert + require.Equal(suite.T(), len(foundAllBackgroundJobs), 3) + require.Equal(suite.T(), len(foundBackgroundJobsByType), 2) + require.Equal(suite.T(), len(foundBackgroundJobsByStatus), 1) +} + +func (suite *RepositoryTestSuite) TestUpdateBackgroundJob() { + //setup + fakeConfig := mock_config.NewMockInterface(suite.MockCtrl) + fakeConfig.EXPECT().GetString("database.location").Return(suite.TestDatabase.Name()).AnyTimes() + fakeConfig.EXPECT().GetString("database.type").Return("sqlite").AnyTimes() + fakeConfig.EXPECT().GetString("log.level").Return("INFO").AnyTimes() + dbRepo, err := NewRepository(fakeConfig, logrus.WithField("test", suite.T().Name()), event_bus.NewNoopEventBusServer()) + require.NoError(suite.T(), err) + + userModel := &models.User{ + Username: "test_username", + Password: "testpassword", + Email: "test@test.com", + } + err = dbRepo.CreateUser(context.Background(), userModel) + require.NoError(suite.T(), err) + authContext := context.WithValue(context.Background(), pkg.ContextKeyTypeAuthUsername, "test_username") + + sourceCredential := models.SourceCredential{ModelBase: models.ModelBase{ID: uuid.New()}, SourceType: sourcePkg.SourceType("bluebutton")} + backgroundJob := models.NewSyncBackgroundJob(sourceCredential) + err = dbRepo.CreateBackgroundJob( + context.WithValue(authContext, pkg.ContextKeyTypeAuthUsername, "test_username"), + backgroundJob, + ) + + //test + now := time.Now() + backgroundJob.JobStatus = pkg.BackgroundJobStatusFailed + backgroundJob.DoneTime = &now + + err = dbRepo.UpdateBackgroundJob( + authContext, + backgroundJob, + ) + require.NoError(suite.T(), err) + + //list all records and ensure that the updated record is the same + foundAllBackgroundJobs, err := dbRepo.ListBackgroundJobs(authContext, models.BackgroundJobQueryOptions{}) + require.NoError(suite.T(), err) + + //assert + require.Equal(suite.T(), 1, len(foundAllBackgroundJobs)) + require.Equal(suite.T(), backgroundJob.ID, foundAllBackgroundJobs[0].ID) + require.Equal(suite.T(), pkg.BackgroundJobStatusFailed, foundAllBackgroundJobs[0].JobStatus) + require.NotNil(suite.T(), foundAllBackgroundJobs[0].DoneTime) +} diff --git a/backend/pkg/database/postgres_repository.go b/backend/pkg/database/postgres_repository.go new file mode 100644 index 00000000..dd384441 --- /dev/null +++ b/backend/pkg/database/postgres_repository.go @@ -0,0 +1,107 @@ +package database + +import ( + "fmt" + "strings" + + "github.com/fastenhealth/fasten-onprem/backend/pkg/config" + "github.com/fastenhealth/fasten-onprem/backend/pkg/event_bus" + "github.com/fastenhealth/fasten-onprem/backend/pkg/models" + databaseModel "github.com/fastenhealth/fasten-onprem/backend/pkg/models/database" + "github.com/glebarez/sqlite" + "github.com/sirupsen/logrus" + "gorm.io/gorm" +) + +func newPostgresRepository(appConfig config.Interface, globalLogger logrus.FieldLogger, eventBus event_bus.Interface) (DatabaseRepository, error) { + //backgroundContext := context.Background() + + //////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + // Gorm/PostgreSQL setup + //////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + globalLogger.Infof("Trying to connect to sqlite db: %s\n", appConfig.GetString("database.location")) + + // BUSY TIMEOUT SETTING DOCS --- + // When a transaction cannot lock the database, because it is already locked by another one, + // SQLite by default throws an error: database is locked. This behavior is usually not appropriate when + // concurrent access is needed, typically when multiple processes write to the same database. + // PRAGMA busy_timeout lets you set a timeout or a handler for these events. When setting a timeout, + // SQLite will try the transaction multiple times within this timeout. + // fixes #341 + // https://rsqlite.r-dbi.org/reference/sqlitesetbusyhandler + // retrying for 30000 milliseconds, 30seconds - this would be unreasonable for a distributed multi-tenant application, + // but should be fine for local usage. + // + // JOURNAL MODE WAL DOCS --- + // + // Write-Ahead Logging or WAL (New Way) + // In this case all writes are appended to a temporary file (write-ahead log) and this file is periodically merged with the original database. When SQLite is searching for something it would first check this temporary file and if nothing is found proceed with the main database file. + // As a result, readers don’t compete with writers and performance is much better compared to the Old Way. + // https://stackoverflow.com/questions/4060772/sqlite-concurrent-access + // pragmaStr := sqlitePragmaString(map[string]string{ + // "busy_timeout": "5000", + // "foreign_keys": "ON", + // "journal_mode": "wal", + // }) + // dsn := "file:" + appConfig.GetString("database.location") + pragmaStr + dsn := appConfig.GetString("database.location") + database, err := gorm.Open(sqlite.Open(dsn), &gorm.Config{ + //TODO: figure out how to log database queries again. + //logger: logger + DisableForeignKeyConstraintWhenMigrating: true, + }) + + if strings.ToUpper(appConfig.GetString("log.level")) == "DEBUG" { + database = database.Debug() //set debug globally + } + + if err != nil { + return nil, fmt.Errorf("Failed to connect to database! - %v", err) + } + globalLogger.Infof("Successfully connected to fasten postgres db: %s\n", dsn) + + ////verify journal mode + //var journalMode []map[string]interface{} + //resp := database.Raw("PRAGMA journal_mode;").Scan(&journalMode) + //if resp.Error != nil { + // return nil, fmt.Errorf("Failed to verify journal mode! - %v", resp.Error) + //} else { + // globalLogger.Infof("Journal mode: %v", journalMode) + //} + + fastenRepo := GormRepository{ + AppConfig: appConfig, + Logger: globalLogger, + GormClient: database, + EventBus: eventBus, + } + + //TODO: automigrate for now, this should be replaced with a migration tool once the DB has stabilized. + err = fastenRepo.Migrate() + if err != nil { + return nil, err + } + + //automigrate Fhir Resource Tables + err = databaseModel.Migrate(fastenRepo.GormClient) + if err != nil { + return nil, err + } + + // create/update admin user + //TODO: determine if this admin user is ncessary + //SECURITY: validate this user is necessary + adminUser := models.User{} + err = database.FirstOrCreate(&adminUser, models.User{Username: "admin"}).Error + if err != nil { + return nil, fmt.Errorf("Failed to create admin user! - %v", err) + } + + //fail any Locked jobs. This is necessary because the job may have been locked by a process that was killed. + err = fastenRepo.CancelAllLockedBackgroundJobsAndFail() + if err != nil { + return nil, err + } + + return &fastenRepo, nil +} diff --git a/backend/pkg/database/sqlite_repository.go b/backend/pkg/database/sqlite_repository.go index a8c90fd4..45949bc7 100644 --- a/backend/pkg/database/sqlite_repository.go +++ b/backend/pkg/database/sqlite_repository.go @@ -1,26 +1,16 @@ package database import ( - "context" - "encoding/json" - "errors" "fmt" - "github.com/fastenhealth/fasten-onprem/backend/pkg" + "strings" + "github.com/fastenhealth/fasten-onprem/backend/pkg/config" "github.com/fastenhealth/fasten-onprem/backend/pkg/event_bus" "github.com/fastenhealth/fasten-onprem/backend/pkg/models" databaseModel "github.com/fastenhealth/fasten-onprem/backend/pkg/models/database" - "github.com/fastenhealth/fasten-onprem/backend/pkg/utils" - sourceModel "github.com/fastenhealth/fasten-sources/clients/models" - "github.com/gin-gonic/gin" "github.com/glebarez/sqlite" - "github.com/google/uuid" "github.com/sirupsen/logrus" - "gorm.io/datatypes" "gorm.io/gorm" - "net/url" - "strings" - "time" ) func newSqliteRepository(appConfig config.Interface, globalLogger logrus.FieldLogger, eventBus event_bus.Interface) (DatabaseRepository, error) { @@ -78,7 +68,7 @@ func newSqliteRepository(appConfig config.Interface, globalLogger logrus.FieldLo // globalLogger.Infof("Journal mode: %v", journalMode) //} - fastenRepo := SqliteRepository{ + fastenRepo := GormRepository{ AppConfig: appConfig, Logger: globalLogger, GormClient: database, @@ -114,1096 +104,3 @@ func newSqliteRepository(appConfig config.Interface, globalLogger logrus.FieldLo return &fastenRepo, nil } - -type SqliteRepository struct { - AppConfig config.Interface - Logger logrus.FieldLogger - - GormClient *gorm.DB - - EventBus event_bus.Interface -} - -func (sr *SqliteRepository) Migrate() error { - err := sr.GormClient.AutoMigrate( - &models.User{}, - &models.SourceCredential{}, - &models.BackgroundJob{}, - &models.Glossary{}, - &models.UserSettingEntry{}, - ) - if err != nil { - return fmt.Errorf("Failed to automigrate! - %v", err) - } - return nil -} - -func (sr *SqliteRepository) Close() error { - return nil -} - -//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// -// User -//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// - -func (sr *SqliteRepository) CreateUser(ctx context.Context, user *models.User) error { - if err := user.HashPassword(user.Password); err != nil { - return err - } - record := sr.GormClient.Create(user) - if record.Error != nil { - return record.Error - } - - //create user settings - err := sr.PopulateDefaultUserSettings(ctx, user.ID) - if err != nil { - return err - } - return nil -} -func (sr *SqliteRepository) GetUserByUsername(ctx context.Context, username string) (*models.User, error) { - var foundUser models.User - result := sr.GormClient.WithContext(ctx).Where(models.User{Username: username}).First(&foundUser) - return &foundUser, result.Error -} - -// TODO: check for error, right now we return a nil which may cause a panic. -// TODO: can we cache the current user? //SECURITY: -func (sr *SqliteRepository) GetCurrentUser(ctx context.Context) (*models.User, error) { - username := ctx.Value(pkg.ContextKeyTypeAuthUsername) - if username == nil { - ginCtx, ginCtxOk := ctx.(*gin.Context) - if !ginCtxOk { - return nil, fmt.Errorf("could not convert context to gin context") - } - var exists bool - username, exists = ginCtx.Get(pkg.ContextKeyTypeAuthUsername) - if !exists { - return nil, fmt.Errorf("could not extract username from context") - } - } - - var currentUser models.User - usernameStr, usernameStrOk := username.(string) - if !usernameStrOk { - return nil, fmt.Errorf("could not convert username to string: %v", username) - } - - result := sr.GormClient. - WithContext(ctx). - First(¤tUser, map[string]interface{}{"username": usernameStr}) - - if result.Error != nil { - return nil, fmt.Errorf("could not retrieve current user: %v", result.Error) - } - - return ¤tUser, nil -} - -//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// -// Glossary -//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// - -func (sr *SqliteRepository) CreateGlossaryEntry(ctx context.Context, glossaryEntry *models.Glossary) error { - record := sr.GormClient.WithContext(ctx).Create(glossaryEntry) - if record.Error != nil { - return record.Error - } - return nil -} - -func (sr *SqliteRepository) GetGlossaryEntry(ctx context.Context, code string, codeSystem string) (*models.Glossary, error) { - var foundGlossaryEntry models.Glossary - result := sr.GormClient.WithContext(ctx). - Where(models.Glossary{Code: code, CodeSystem: codeSystem}). - First(&foundGlossaryEntry) - return &foundGlossaryEntry, result.Error -} - -//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// -// Summary -//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// - -func (sr *SqliteRepository) GetSummary(ctx context.Context) (*models.Summary, error) { - currentUser, currentUserErr := sr.GetCurrentUser(ctx) - if currentUserErr != nil { - return nil, currentUserErr - } - - // we want a count of all resources for this user by type - var resourceCountResults []map[string]interface{} - - resourceTypes := databaseModel.GetAllowedResourceTypes() - for _, resourceType := range resourceTypes { - tableName, err := databaseModel.GetTableNameByResourceType(resourceType) - if err != nil { - return nil, err - } - var count int64 - result := sr.GormClient.WithContext(ctx). - Table(tableName). - Where(models.OriginBase{ - UserID: currentUser.ID, - }). - Count(&count) - if result.Error != nil { - return nil, result.Error - } - if count == 0 { - continue //don't add resource counts if the count is 0 - } - resourceCountResults = append(resourceCountResults, map[string]interface{}{ - "resource_type": resourceType, - "count": count, - }) - } - - // we want a list of all sources (when they were last updated) - sources, err := sr.GetSources(ctx) - if err != nil { - return nil, err - } - - // we want the main Patient for each source - patients, err := sr.GetPatientForSources(ctx) - if err != nil { - return nil, err - } - - if resourceCountResults == nil { - resourceCountResults = []map[string]interface{}{} - } - summary := &models.Summary{ - Sources: sources, - ResourceTypeCounts: resourceCountResults, - Patients: patients, - } - - return summary, nil -} - -//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// -// Resource -//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// - -// This function will create a new resource if it does not exist, or update an existing resource if it does exist. -// It will also create associations between fhir resources -// This function is called directly by fasten-sources -func (sr *SqliteRepository) UpsertRawResource(ctx context.Context, sourceCredential sourceModel.SourceCredential, rawResource sourceModel.RawResourceFhir) (bool, error) { - - source := sourceCredential.(*models.SourceCredential) - - //convert from a raw resource (from fasten-sources) to a ResourceFhir (which matches the database models) - wrappedResourceModel := &models.ResourceBase{ - OriginBase: models.OriginBase{ - ModelBase: models.ModelBase{}, - UserID: source.UserID, - SourceID: source.ID, - SourceResourceID: rawResource.SourceResourceID, - SourceResourceType: rawResource.SourceResourceType, - }, - SortTitle: rawResource.SortTitle, - SortDate: rawResource.SortDate, - ResourceRaw: datatypes.JSON(rawResource.ResourceRaw), - RelatedResource: nil, - } - if len(rawResource.SourceUri) > 0 { - wrappedResourceModel.SourceUri = &rawResource.SourceUri - } - - //create associations - //note: we create the association in the related_resources table **before** the model actually exists. - //note: these associations are not reciprocal, (i.e. if Procedure references Location, Location may not reference Procedure) - if rawResource.ReferencedResources != nil && len(rawResource.ReferencedResources) > 0 { - for _, referencedResource := range rawResource.ReferencedResources { - parts := strings.Split(referencedResource, "/") - if len(parts) != 2 { - continue - } - - relatedResource := &models.ResourceBase{ - OriginBase: models.OriginBase{ - SourceID: source.ID, - SourceResourceType: parts[0], - SourceResourceID: parts[1], - }, - RelatedResource: nil, - } - err := sr.AddResourceAssociation( - ctx, - source, - wrappedResourceModel.SourceResourceType, - wrappedResourceModel.SourceResourceID, - source, - relatedResource.SourceResourceType, - relatedResource.SourceResourceID, - ) - if err != nil { - return false, err - } - } - } - - return sr.UpsertResource(ctx, wrappedResourceModel) - -} - -// UpsertResource -// this method will upsert a resource, however it will not create associations. -// UPSERT operation -// - call FindOrCreate -// - check if the resource exists -// - if it does not exist, insert it -// -// - if no error during FindOrCreate && no rows affected (nothing was created) -// - update the resource using Updates operation -func (sr *SqliteRepository) UpsertResource(ctx context.Context, wrappedResourceModel *models.ResourceBase) (bool, error) { - currentUser, currentUserErr := sr.GetCurrentUser(ctx) - if currentUserErr != nil { - return false, currentUserErr - } - - wrappedResourceModel.UserID = currentUser.ID - cachedResourceRaw := wrappedResourceModel.ResourceRaw - - sr.Logger.Infof("insert/update FHIRResource (%v) %v", wrappedResourceModel.SourceResourceType, wrappedResourceModel.SourceResourceID) - wrappedFhirResourceModel, err := databaseModel.NewFhirResourceModelByType(wrappedResourceModel.SourceResourceType) - if err != nil { - return false, err - } - - wrappedFhirResourceModel.SetOriginBase(wrappedResourceModel.OriginBase) - wrappedFhirResourceModel.SetSortTitle(wrappedResourceModel.SortTitle) - wrappedFhirResourceModel.SetSortDate(wrappedResourceModel.SortDate) - wrappedFhirResourceModel.SetSourceUri(wrappedResourceModel.SourceUri) - - //TODO: this takes too long, we need to find a way to do this processing faster or in the background async. - err = wrappedFhirResourceModel.PopulateAndExtractSearchParameters(json.RawMessage(wrappedResourceModel.ResourceRaw)) - if err != nil { - sr.Logger.Warnf("ignoring: an error occurred while extracting SearchParameters using FHIRPath (%s/%s): %v", wrappedResourceModel.SourceResourceType, wrappedResourceModel.SourceResourceID, err) - //wrappedFhirResourceModel.SetResourceRaw(wrappedResourceModel.ResourceRaw) - } - - eventSourceSync := models.NewEventSourceSync( - currentUser.ID.String(), - wrappedFhirResourceModel.GetSourceID().String(), - wrappedFhirResourceModel.GetSourceResourceType(), - wrappedFhirResourceModel.GetSourceResourceID(), - ) - - err = sr.EventBus.PublishMessage(eventSourceSync) - if err != nil { - sr.Logger.Warnf("ignoring: an error occurred while publishing event to eventBus (%s/%s): %v", wrappedResourceModel.SourceResourceType, wrappedResourceModel.SourceResourceID, err) - } - - createResult := sr.GormClient.WithContext(ctx).Where(models.OriginBase{ - SourceID: wrappedFhirResourceModel.GetSourceID(), - SourceResourceID: wrappedFhirResourceModel.GetSourceResourceID(), - SourceResourceType: wrappedFhirResourceModel.GetSourceResourceType(), //TODO: and UpdatedAt > old UpdatedAt - }).Omit("RelatedResource.*").FirstOrCreate(wrappedFhirResourceModel) - - if createResult.Error != nil { - return false, createResult.Error - } else if createResult.RowsAffected == 0 { - //at this point, wrappedResourceModel contains the data found in the database. - // check if the database resource matches the new resource. - if wrappedResourceModel.ResourceRaw.String() != string(cachedResourceRaw) { - updateResult := createResult.Omit("RelatedResource.*").Updates(wrappedResourceModel) - return updateResult.RowsAffected > 0, updateResult.Error - } else { - return false, nil - } - - } else { - //resource was created - return createResult.RowsAffected > 0, createResult.Error - } -} - -func (sr *SqliteRepository) ListResources(ctx context.Context, queryOptions models.ListResourceQueryOptions) ([]models.ResourceBase, error) { - currentUser, currentUserErr := sr.GetCurrentUser(ctx) - if currentUserErr != nil { - return nil, currentUserErr - } - - queryParam := models.OriginBase{ - UserID: currentUser.ID, - } - - if len(queryOptions.SourceResourceType) > 0 { - queryParam.SourceResourceType = queryOptions.SourceResourceType - } - - if len(queryOptions.SourceID) > 0 { - sourceUUID, err := uuid.Parse(queryOptions.SourceID) - if err != nil { - return nil, err - } - - queryParam.SourceID = sourceUUID - } - if len(queryOptions.SourceResourceID) > 0 { - queryParam.SourceResourceID = queryOptions.SourceResourceID - } - - manifestJson, _ := json.MarshalIndent(queryParam, "", " ") - sr.Logger.Debugf("THE QUERY OBJECT===========> %v", string(manifestJson)) - - var wrappedResourceModels []models.ResourceBase - queryBuilder := sr.GormClient.WithContext(ctx) - if len(queryOptions.SourceResourceType) > 0 { - tableName, err := databaseModel.GetTableNameByResourceType(queryOptions.SourceResourceType) - if err != nil { - return nil, err - } - queryBuilder = queryBuilder. - Where(queryParam). - Table(tableName) - - if queryOptions.Limit > 0 { - queryBuilder = queryBuilder.Limit(queryOptions.Limit).Offset(queryOptions.Offset) - } - return wrappedResourceModels, queryBuilder.Find(&wrappedResourceModels).Error - } else { - if queryOptions.Limit > 0 { - queryBuilder = queryBuilder.Limit(queryOptions.Limit).Offset(queryOptions.Offset) - } - //there is no FHIR Resource name specified, so we're querying across all FHIR resources - return sr.getResourcesFromAllTables(queryBuilder, queryParam) - } -} - -// TODO: should this be deprecated? (replaced by ListResources) -func (sr *SqliteRepository) GetResourceByResourceTypeAndId(ctx context.Context, sourceResourceType string, sourceResourceId string) (*models.ResourceBase, error) { - currentUser, currentUserErr := sr.GetCurrentUser(ctx) - if currentUserErr != nil { - return nil, currentUserErr - } - - tableName, err := databaseModel.GetTableNameByResourceType(sourceResourceType) - if err != nil { - return nil, err - } - - queryParam := models.OriginBase{ - UserID: currentUser.ID, - SourceResourceType: sourceResourceType, - SourceResourceID: sourceResourceId, - } - - var wrappedResourceModel models.ResourceBase - results := sr.GormClient.WithContext(ctx). - Where(queryParam). - Table(tableName). - First(&wrappedResourceModel) - - return &wrappedResourceModel, results.Error -} - -// we need to figure out how to get the source resource type from the source resource id, or if we're searching across every table :( -func (sr *SqliteRepository) GetResourceBySourceId(ctx context.Context, sourceId string, sourceResourceId string) (*models.ResourceBase, error) { - currentUser, currentUserErr := sr.GetCurrentUser(ctx) - if currentUserErr != nil { - return nil, currentUserErr - } - - sourceIdUUID, err := uuid.Parse(sourceId) - if err != nil { - return nil, err - } - - queryParam := models.OriginBase{ - UserID: currentUser.ID, - SourceID: sourceIdUUID, - SourceResourceID: sourceResourceId, - } - - //there is no FHIR Resource name specified, so we're querying across all FHIR resources - wrappedResourceModels, err := sr.getResourcesFromAllTables(sr.GormClient.WithContext(ctx), queryParam) - if len(wrappedResourceModels) > 0 { - return &wrappedResourceModels[0], err - } else { - return nil, fmt.Errorf("no resource found with source id %s and source resource id %s", sourceId, sourceResourceId) - } -} - -// Get the patient for each source (for the current user) -func (sr *SqliteRepository) GetPatientForSources(ctx context.Context) ([]models.ResourceBase, error) { - currentUser, currentUserErr := sr.GetCurrentUser(ctx) - if currentUserErr != nil { - return nil, currentUserErr - } - - //SELECT * FROM resource_bases WHERE user_id = "" and source_resource_type = "Patient" GROUP BY source_id - - tableName, err := databaseModel.GetTableNameByResourceType("Patient") - if err != nil { - return nil, err - } - - var wrappedResourceModels []models.ResourceBase - results := sr.GormClient.WithContext(ctx). - //Group("source_id"). //broken in Postgres. - Where(models.OriginBase{ - UserID: currentUser.ID, - SourceResourceType: "Patient", - }). - Table(tableName). - Find(&wrappedResourceModels) - - return wrappedResourceModels, results.Error -} - -//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// -// Resource Associations -//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// - -// verifyAssociationPermission ensure that the sources are "owned" by the same user, and that the user is the current user -func (sr *SqliteRepository) verifyAssociationPermission(ctx context.Context, sourceUserID uuid.UUID, relatedSourceUserID uuid.UUID) error { - currentUser, currentUserErr := sr.GetCurrentUser(ctx) - if currentUserErr != nil { - return currentUserErr - } - if sourceUserID != relatedSourceUserID { - return fmt.Errorf("user id's must match when adding associations") - } else if sourceUserID != currentUser.ID { - return fmt.Errorf("user id's must match current user") - } - - return nil -} - -func (sr *SqliteRepository) AddResourceAssociation(ctx context.Context, source *models.SourceCredential, resourceType string, resourceId string, relatedSource *models.SourceCredential, relatedResourceType string, relatedResourceId string) error { - //ensure that the sources are "owned" by the same user - err := sr.verifyAssociationPermission(ctx, source.UserID, relatedSource.UserID) - if err != nil { - return err - } - - err = sr.GormClient.WithContext(ctx).Table("related_resources").Create(map[string]interface{}{ - "resource_base_user_id": source.UserID, - "resource_base_source_id": source.ID, - "resource_base_source_resource_type": resourceType, - "resource_base_source_resource_id": resourceId, - "related_resource_user_id": relatedSource.UserID, - "related_resource_source_id": relatedSource.ID, - "related_resource_source_resource_type": relatedResourceType, - "related_resource_source_resource_id": relatedResourceId, - }).Error - uniqueConstraintError := errors.New("constraint failed: UNIQUE constraint failed") - if err != nil { - if strings.HasPrefix(err.Error(), uniqueConstraintError.Error()) { - sr.Logger.Warnf("Ignoring an error when creating a related_resource association for %s/%s: %v", resourceType, resourceId, err) - //we can safely ignore this error - return nil - } - } - return err -} - -func (sr *SqliteRepository) RemoveResourceAssociation(ctx context.Context, source *models.SourceCredential, resourceType string, resourceId string, relatedSource *models.SourceCredential, relatedResourceType string, relatedResourceId string) error { - //ensure that the sources are "owned" by the same user - err := sr.verifyAssociationPermission(ctx, source.UserID, relatedSource.UserID) - if err != nil { - return err - } - - //manually delete association - results := sr.GormClient.WithContext(ctx). - //Table("related_resources"). - Delete(&models.RelatedResource{}, map[string]interface{}{ - "resource_base_user_id": source.UserID, - "resource_base_source_id": source.ID, - "resource_base_source_resource_type": resourceType, - "resource_base_source_resource_id": resourceId, - "related_resource_user_id": relatedSource.UserID, - "related_resource_source_id": relatedSource.ID, - "related_resource_source_resource_type": relatedResourceType, - "related_resource_source_resource_id": relatedResourceId, - }) - - if results.Error != nil { - return results.Error - } else if results.RowsAffected == 0 { - return fmt.Errorf("no association found for %s/%s and %s/%s", resourceType, resourceId, relatedResourceType, relatedResourceId) - } - return nil -} - -func (sr *SqliteRepository) FindResourceAssociationsByTypeAndId(ctx context.Context, source *models.SourceCredential, resourceType string, resourceId string) ([]models.RelatedResource, error) { - currentUser, currentUserErr := sr.GetCurrentUser(ctx) - if currentUserErr != nil { - return nil, currentUserErr - } - - if source.UserID != currentUser.ID { - return nil, fmt.Errorf("source credential must match the current user id") - } - - // SELECT * FROM related_resources WHERE user_id = "53c1e930-63af-46c9-b760-8e83cbc1abd9"; - var relatedResources []models.RelatedResource - result := sr.GormClient.WithContext(ctx). - Where(models.RelatedResource{ - ResourceBaseUserID: currentUser.ID, - ResourceBaseSourceID: source.ID, - ResourceBaseSourceResourceType: resourceType, - ResourceBaseSourceResourceID: resourceId, - }). - Find(&relatedResources) - return relatedResources, result.Error -} - -//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// -// Resource Composition (Grouping) -//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// - -// AddResourceComposition -// this will group resources together into a "Composition" -- primarily to group related Encounters & Conditions into one semantic root. -// algorithm: -// - find source for each resource -// - (SECURITY) ensure the current user and the source for each resource matches -// - check if there is a Composition resource Type already. -// - if Composition type already exists: -// - update "relatesTo" field with additional data. -// - else: -// - Create a Composition resource type (populated with "relatesTo" references to all provided Resources) -// -// - add AddResourceAssociation for all resources linked to the Composition resource -// - store the Composition resource -// TODO: determine if we should be using a List Resource instead of a Composition resource -func (sr *SqliteRepository) AddResourceComposition(ctx context.Context, compositionTitle string, resources []*models.ResourceBase) error { - currentUser, currentUserErr := sr.GetCurrentUser(ctx) - if currentUserErr != nil { - return currentUserErr - } - - //generate placeholder source - placeholderSource := models.SourceCredential{UserID: currentUser.ID, SourceType: "manual", ModelBase: models.ModelBase{ID: uuid.MustParse("00000000-0000-0000-0000-000000000000")}} - - existingCompositionResources := []*models.ResourceBase{} - rawResourceLookupTable := map[string]*models.ResourceBase{} - - //find the source for each resource we'd like to merge. (for ownership verification) - sourceLookup := map[uuid.UUID]*models.SourceCredential{} - for _, resource := range resources { - if resource.SourceResourceType == pkg.FhirResourceTypeComposition { - //skip, Composition resources don't have a valid SourceCredential - existingCompositionResources = append(existingCompositionResources, resource) - - //compositions may include existing resources, make sure we handle these - for _, related := range resource.RelatedResource { - rawResourceLookupTable[fmt.Sprintf("%s/%s", related.SourceResourceType, related.SourceResourceID)] = related - } - continue - } - - if _, sourceOk := sourceLookup[resource.SourceID]; !sourceOk { - //source has not been added yet, lets query for it. - sourceCred, err := sr.GetSource(ctx, resource.SourceID.String()) - if err != nil { - return fmt.Errorf("could not find source %s", resource.SourceID.String()) - } - sourceLookup[resource.SourceID] = sourceCred - } - - rawResourceLookupTable[fmt.Sprintf("%s/%s", resource.SourceResourceType, resource.SourceResourceID)] = resource - } - - // SECURITY: ensure the current user and the source for each resource matches - for _, source := range sourceLookup { - if source.UserID != currentUser.ID { - return fmt.Errorf("source must be owned by the current user: %s vs %s", source.UserID, currentUser.ID) - } - } - - // - check if there is a Composition resource Type already. - var compositionResource *models.ResourceBase - - if len(existingCompositionResources) > 0 { - //- if Composition type already exists in this set - // - update "relatesTo" field with additional data. - compositionResource = existingCompositionResources[0] - - //disassociate all existing remaining composition resources. - for _, existingCompositionResource := range existingCompositionResources[1:] { - for _, relatedResource := range existingCompositionResource.RelatedResource { - if err := sr.RemoveResourceAssociation( - ctx, - &placeholderSource, - existingCompositionResource.SourceResourceType, - existingCompositionResource.SourceResourceID, - sourceLookup[relatedResource.SourceID], - relatedResource.SourceResourceType, - relatedResource.SourceResourceID, - ); err != nil { - //ignoring errors, could be due to duplicate edges - return fmt.Errorf("an error occurred while removing resource association: %v", err) - } - } - - //remove this resource - compositionTable, err := databaseModel.GetTableNameByResourceType("Composition") - if err != nil { - return fmt.Errorf("an error occurred while finding Composition resource table: %v", err) - } - //TODO: we may need to delete with using the FhirComposition struct type - deleteResult := sr.GormClient.WithContext(ctx). - Table(compositionTable). - Delete(existingCompositionResource) - if deleteResult.Error != nil { - return fmt.Errorf("an error occurred while removing Composition resource(%s/%s): %v", existingCompositionResource.SourceResourceType, existingCompositionResource.SourceID, err) - } else if deleteResult.RowsAffected != 1 { - return fmt.Errorf("composition resource was not deleted %s/%s", existingCompositionResource.SourceResourceType, existingCompositionResource.SourceID) - } - } - - } else { - //- else: - // - Create a Composition resource type (populated with "relatesTo" references to all provided Resources) - compositionResource = &models.ResourceBase{ - OriginBase: models.OriginBase{ - UserID: placeholderSource.UserID, // - SourceID: placeholderSource.ID, //Empty SourceID expected ("0000-0000-0000-0000") - SourceResourceType: pkg.FhirResourceTypeComposition, - SourceResourceID: uuid.New().String(), - }, - } - } - - // - Generate an "updated" RawResource json blob - rawCompositionResource := models.ResourceComposition{ - Title: compositionTitle, - RelatesTo: []models.ResourceCompositionRelatesTo{}, - } - - for relatedResourceKey, _ := range rawResourceLookupTable { - rawCompositionResource.RelatesTo = append(rawCompositionResource.RelatesTo, models.ResourceCompositionRelatesTo{ - Target: models.ResourceCompositionRelatesToTarget{ - TargetReference: models.ResourceCompositionRelatesToTargetReference{ - Reference: relatedResourceKey, - }, - }, - }) - } - - rawResourceJson, err := json.Marshal(rawCompositionResource) - if err != nil { - return err - } - compositionResource.ResourceRaw = rawResourceJson - - compositionResource.SortTitle = &compositionTitle - compositionResource.RelatedResource = utils.SortResourcePtrListByDate(resources) - compositionResource.SortDate = compositionResource.RelatedResource[0].SortDate - - //store the Composition resource - _, err = sr.UpsertResource(ctx, compositionResource) - if err != nil { - return err - } - - // - add AddResourceAssociation for all resources linked to the Composition resource - for _, resource := range rawResourceLookupTable { - if err := sr.AddResourceAssociation( - ctx, - &placeholderSource, - compositionResource.SourceResourceType, - compositionResource.SourceResourceID, - sourceLookup[resource.SourceID], - resource.SourceResourceType, - resource.SourceResourceID, - ); err != nil { - return err - } - } - - return nil -} - -//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// -// SourceCredential -//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// - -func (sr *SqliteRepository) CreateSource(ctx context.Context, sourceCreds *models.SourceCredential) error { - currentUser, currentUserErr := sr.GetCurrentUser(ctx) - if currentUserErr != nil { - return currentUserErr - } - sourceCreds.UserID = currentUser.ID - - //Assign will **always** update the source credential in the DB with data passed into this function. - return sr.GormClient.WithContext(ctx). - Where(models.SourceCredential{ - UserID: sourceCreds.UserID, - SourceType: sourceCreds.SourceType, - Patient: sourceCreds.Patient}). - Assign(*sourceCreds).FirstOrCreate(sourceCreds).Error -} - -func (sr *SqliteRepository) UpdateSource(ctx context.Context, sourceCreds *models.SourceCredential) error { - currentUser, currentUserErr := sr.GetCurrentUser(ctx) - if currentUserErr != nil { - return currentUserErr - } - sourceCreds.UserID = currentUser.ID - - //Assign will **always** update the source credential in the DB with data passed into this function. - return sr.GormClient.WithContext(ctx). - Where(models.SourceCredential{ - ModelBase: models.ModelBase{ID: sourceCreds.ID}, - UserID: sourceCreds.UserID, - SourceType: sourceCreds.SourceType, - }).Updates(models.SourceCredential{ - AccessToken: sourceCreds.AccessToken, - RefreshToken: sourceCreds.RefreshToken, - ExpiresAt: sourceCreds.ExpiresAt, - DynamicClientId: sourceCreds.DynamicClientId, - DynamicClientRegistrationMode: sourceCreds.DynamicClientRegistrationMode, - DynamicClientJWKS: sourceCreds.DynamicClientJWKS, - LatestBackgroundJobID: sourceCreds.LatestBackgroundJobID, - }).Error -} - -func (sr *SqliteRepository) GetSource(ctx context.Context, sourceId string) (*models.SourceCredential, error) { - currentUser, currentUserErr := sr.GetCurrentUser(ctx) - if currentUserErr != nil { - return nil, currentUserErr - } - - sourceUUID, err := uuid.Parse(sourceId) - if err != nil { - return nil, err - } - - var sourceCred models.SourceCredential - results := sr.GormClient.WithContext(ctx). - Where(models.SourceCredential{UserID: currentUser.ID, ModelBase: models.ModelBase{ID: sourceUUID}}). - Preload("LatestBackgroundJob"). - First(&sourceCred) - - return &sourceCred, results.Error -} - -func (sr *SqliteRepository) GetSourceSummary(ctx context.Context, sourceId string) (*models.SourceSummary, error) { - currentUser, currentUserErr := sr.GetCurrentUser(ctx) - if currentUserErr != nil { - return nil, currentUserErr - } - - sourceUUID, err := uuid.Parse(sourceId) - if err != nil { - return nil, err - } - - sourceSummary := &models.SourceSummary{} - - source, err := sr.GetSource(ctx, sourceId) - if err != nil { - return nil, err - } - sourceSummary.Source = source - - //group by resource type and return counts - // SELECT source_resource_type as resource_type, COUNT(*) as count FROM resource_bases WHERE source_id = "53c1e930-63af-46c9-b760-8e83cbc1abd9" GROUP BY source_resource_type; - - var resourceTypeCounts []map[string]interface{} - - resourceTypes := databaseModel.GetAllowedResourceTypes() - for _, resourceType := range resourceTypes { - tableName, err := databaseModel.GetTableNameByResourceType(resourceType) - if err != nil { - return nil, err - } - var count int64 - result := sr.GormClient.WithContext(ctx). - Table(tableName). - Where(models.OriginBase{ - UserID: currentUser.ID, - SourceID: sourceUUID, - }). - Count(&count) - if result.Error != nil { - return nil, result.Error - } - if count == 0 { - continue //don't add resource counts if the count is 0 - } - resourceTypeCounts = append(resourceTypeCounts, map[string]interface{}{ - "source_id": sourceId, - "resource_type": resourceType, - "count": count, - }) - } - - sourceSummary.ResourceTypeCounts = resourceTypeCounts - - //set patient - patientTableName, err := databaseModel.GetTableNameByResourceType("Patient") - if err != nil { - return nil, err - } - var wrappedPatientResourceModel models.ResourceBase - patientResults := sr.GormClient.WithContext(ctx). - Where(models.OriginBase{ - UserID: currentUser.ID, - SourceResourceType: "Patient", - SourceID: sourceUUID, - }). - Table(patientTableName). - First(&wrappedPatientResourceModel) - - if patientResults.Error != nil { - return nil, patientResults.Error - } - sourceSummary.Patient = &wrappedPatientResourceModel - - return sourceSummary, nil -} - -func (sr *SqliteRepository) GetSources(ctx context.Context) ([]models.SourceCredential, error) { - currentUser, currentUserErr := sr.GetCurrentUser(ctx) - if currentUserErr != nil { - return nil, currentUserErr - } - - var sourceCreds []models.SourceCredential - results := sr.GormClient.WithContext(ctx). - Where(models.SourceCredential{UserID: currentUser.ID}). - Preload("LatestBackgroundJob"). - Find(&sourceCreds) - - return sourceCreds, results.Error -} - -//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// -// Background Job -//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// - -func (sr *SqliteRepository) CreateBackgroundJob(ctx context.Context, backgroundJob *models.BackgroundJob) error { - currentUser, currentUserErr := sr.GetCurrentUser(ctx) - if currentUserErr != nil { - return currentUserErr - } - - backgroundJob.UserID = currentUser.ID - - record := sr.GormClient.Create(backgroundJob) - return record.Error -} - -func (sr *SqliteRepository) GetBackgroundJob(ctx context.Context, backgroundJobId string) (*models.BackgroundJob, error) { - currentUser, currentUserErr := sr.GetCurrentUser(ctx) - if currentUserErr != nil { - return nil, currentUserErr - } - - backgroundJobUUID, err := uuid.Parse(backgroundJobId) - if err != nil { - return nil, err - } - - var backgroundJob models.BackgroundJob - results := sr.GormClient.WithContext(ctx). - Where(models.SourceCredential{UserID: currentUser.ID, ModelBase: models.ModelBase{ID: backgroundJobUUID}}). - First(&backgroundJob) - - return &backgroundJob, results.Error -} - -func (sr *SqliteRepository) UpdateBackgroundJob(ctx context.Context, backgroundJob *models.BackgroundJob) error { - currentUser, currentUserErr := sr.GetCurrentUser(ctx) - if currentUserErr != nil { - return currentUserErr - } - backgroundJob.UserID = currentUser.ID - - return sr.GormClient.WithContext(ctx). - Where(models.BackgroundJob{ - ModelBase: models.ModelBase{ID: backgroundJob.ID}, - UserID: backgroundJob.UserID, - }).Updates(models.BackgroundJob{ - JobStatus: backgroundJob.JobStatus, - Data: backgroundJob.Data, - LockedTime: backgroundJob.LockedTime, - DoneTime: backgroundJob.DoneTime, - Retries: backgroundJob.Retries, - Schedule: backgroundJob.Schedule, - }).Error -} - -func (sr *SqliteRepository) ListBackgroundJobs(ctx context.Context, queryOptions models.BackgroundJobQueryOptions) ([]models.BackgroundJob, error) { - currentUser, currentUserErr := sr.GetCurrentUser(ctx) - if currentUserErr != nil { - return nil, currentUserErr - } - - queryParam := models.BackgroundJob{ - UserID: currentUser.ID, - } - - if queryOptions.JobType != nil { - queryParam.JobType = *queryOptions.JobType - } - if queryOptions.Status != nil { - queryParam.JobStatus = *queryOptions.Status - } - - var backgroundJobs []models.BackgroundJob - query := sr.GormClient.WithContext(ctx). - //Group("source_id"). //broken in Postgres. - Where(queryParam).Limit(queryOptions.Limit).Order("locked_time DESC") - - if queryOptions.Offset > 0 { - query = query.Offset(queryOptions.Offset) - } - - return backgroundJobs, query.Find(&backgroundJobs).Error -} - -func (sr *SqliteRepository) BackgroundJobCheckpoint(ctx context.Context, checkpointData map[string]interface{}, errorData map[string]interface{}) { - sr.Logger.Info("begin checkpointing background job...") - if len(checkpointData) == 0 && len(errorData) == 0 { - sr.Logger.Info("no changes detected. Skipping checkpoint") - return //nothing to do - } - defer sr.Logger.Info("end checkpointing background job") - - currentUser, currentUserErr := sr.GetCurrentUser(ctx) - if currentUserErr != nil { - sr.Logger.Warning("could not find current user info context. Ignoring checkpoint", currentUserErr) - return - } - - //make sure we do an atomic update - backgroundJobId, ok := ctx.Value(pkg.ContextKeyTypeBackgroundJobID).(string) - if !ok { - sr.Logger.Warning("could not find background job id in context. Ignoring checkpoint") - return - } - backgroundJobUUID, err := uuid.Parse(backgroundJobId) - if err != nil { - sr.Logger.Warning("could not parse background job id. Ignoring checkpoint", err) - return - } - //https://gorm.io/docs/advanced_query.html#Locking-FOR-UPDATE - //TODO: if using another database type (not SQLITE) we need to make sure we use the correct locking strategy - //This is not a problem in SQLITE because it does database (or table) level locking by default - //var backgroundJob models.BackgroundJob - //sr.GormClient.Clauses(clause.Locking{Strength: "UPDATE"}).Find(&backgroundJob) - - txErr := sr.GormClient.Transaction(func(tx *gorm.DB) error { - //retrieve the background job by id - var backgroundJob models.BackgroundJob - backgroundJobFindResults := tx.WithContext(ctx). - Where(models.BackgroundJob{ - ModelBase: models.ModelBase{ID: backgroundJobUUID}, - UserID: currentUser.ID, - }). - First(&backgroundJob) - if backgroundJobFindResults.Error != nil { - return backgroundJobFindResults.Error - } - - //deserialize the job data - var backgroundJobSyncData models.BackgroundJobSyncData - if backgroundJob.Data != nil { - err := json.Unmarshal(backgroundJob.Data, &backgroundJobSyncData) - if err != nil { - return err - } - } - - //update the job data with new data provided by the calling functiion - changed := false - if len(checkpointData) > 0 { - backgroundJobSyncData.CheckpointData = checkpointData - changed = true - } - if len(errorData) > 0 { - backgroundJobSyncData.ErrorData = errorData - changed = true - } - - //define a background job with the fields we're going to update - now := time.Now() - updatedBackgroundJob := models.BackgroundJob{ - LockedTime: &now, - } - if changed { - serializedData, err := json.Marshal(backgroundJobSyncData) - if err != nil { - return err - } - updatedBackgroundJob.Data = serializedData - - } - - return tx.WithContext(ctx). - Where(models.BackgroundJob{ - ModelBase: models.ModelBase{ID: backgroundJobUUID}, - UserID: currentUser.ID, - }).Updates(updatedBackgroundJob).Error - }) - - if txErr != nil { - sr.Logger.Warning("could not find or update background job. Ignoring checkpoint", txErr) - } - -} - -// when server restarts, we should unlock all locked jobs, and set their status to failed -// SECURITY: this is global, and effects all users. -func (sr *SqliteRepository) CancelAllLockedBackgroundJobsAndFail() error { - now := time.Now() - return sr.GormClient. - Where(models.BackgroundJob{JobStatus: pkg.BackgroundJobStatusLocked}). - Updates(models.BackgroundJob{ - JobStatus: pkg.BackgroundJobStatusFailed, - DoneTime: &now, - }).Error - -} - -//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// -// Utilities -//////////////////////////////////////////////////////////////////////////////////////////////////////////////////// - -func sqlitePragmaString(pragmas map[string]string) string { - q := url.Values{} - for key, val := range pragmas { - q.Add("_pragma", fmt.Sprintf("%s=%s", key, val)) - } - - queryStr := q.Encode() - if len(queryStr) > 0 { - return "?" + queryStr - } - return "" -} - -// Internal function -// This function will return a list of resources from all FHIR tables in the database -// The query allows us to set the source id, source resource id, source resource type -// SECURITY: this function assumes the user has already been authenticated -// TODO: theres probably a more efficient way of doing this with GORM -func (sr *SqliteRepository) getResourcesFromAllTables(queryBuilder *gorm.DB, queryParam models.OriginBase) ([]models.ResourceBase, error) { - wrappedResourceModels := []models.ResourceBase{} - resourceTypes := databaseModel.GetAllowedResourceTypes() - for _, resourceType := range resourceTypes { - tableName, err := databaseModel.GetTableNameByResourceType(resourceType) - if err != nil { - return nil, err - } - var tempWrappedResourceModels []models.ResourceBase - results := queryBuilder. - Where(queryParam). - Table(tableName). - Find(&tempWrappedResourceModels) - if results.Error != nil { - return nil, results.Error - } - wrappedResourceModels = append(wrappedResourceModels, tempWrappedResourceModels...) - } - return wrappedResourceModels, nil -}