From 761d9f7e01574c239973b4d217543167bcc468fe Mon Sep 17 00:00:00 2001 From: Peter Hoffmann <954078+p-hoffmann@users.noreply.github.com> Date: Sat, 3 Jan 2026 07:10:39 +0800 Subject: [PATCH 01/11] cleanup --- README.md | 20 ++++++ pom.xml | 62 ++++------------- .../org/ohdsi/webapi/DataAccessConfig.java | 17 +++-- .../org/ohdsi/webapi/cache/CacheService.java | 1 - .../org/ohdsi/webapi/i18n/I18nController.java | 1 - .../org/ohdsi/webapi/info/InfoService.java | 1 - .../webapi/job/NotificationController.java | 1 - .../webapi/reusable/ReusableController.java | 2 - .../webapi/security/PermissionController.java | 1 - .../service/CohortDefinitionService.java | 2 - .../webapi/service/ConceptSetService.java | 4 +- .../webapi/service/FeasibilityService.java | 2 - .../ohdsi/webapi/source/SourceController.java | 1 - .../org/ohdsi/webapi/tag/TagController.java | 2 - .../org/ohdsi/webapi/tool/ToolController.java | 3 - .../importer/UserImportJobController.java | 2 - .../resources/application-broadsea.properties | 66 ++++++++++++------- src/main/resources/application-dev.properties | 11 ++-- src/main/resources/application.properties | 15 +++-- .../1.0.0.1__schema-drop_spring_batch.sql | 10 --- .../rollback/1.0.0.2__schema-drop_jpa.sql | 3 - .../rollback/1.0.0.9__schema-drop_shiro.sql | 11 ---- .../webapi/test/CDMResultsServiceIT.java | 20 +++--- .../webapi/test/CohortAnalysisServiceIT.java | 4 +- .../webapi/test/VocabularyServiceIT.java | 4 +- .../java/org/ohdsi/webapi/test/WebApiIT.java | 35 ++++++++-- .../resources/application-test.properties | 20 +++--- 27 files changed, 158 insertions(+), 163 deletions(-) delete mode 100644 src/main/resources/db/migration/postgresql/rollback/1.0.0.1__schema-drop_spring_batch.sql delete mode 100644 src/main/resources/db/migration/postgresql/rollback/1.0.0.2__schema-drop_jpa.sql delete mode 100644 src/main/resources/db/migration/postgresql/rollback/1.0.0.9__schema-drop_shiro.sql diff --git a/README.md b/README.md index 84e6e2fd70..e617554ffa 100644 --- a/README.md +++ b/README.md @@ -36,6 +36,26 @@ mvn clean package -DskipTests -Dpackaging.type=jar java -jar target/WebAPI.jar --spring.profiles.active=webapi-postgresql ``` +## Database configuration (single source of truth) + +Set your datasource and schema once; the packaged properties reuse the shared schema key. + +Minimal local run example (PostgreSQL): + +```bash +export WEBAPI_SCHEMA=webapi # optional; defaults to webapi +export SPRING_DATASOURCE_URL=jdbc:postgresql://localhost:5432/postgres +export SPRING_DATASOURCE_USERNAME=postgres +export SPRING_DATASOURCE_PASSWORD=your_password + +java -jar target/WebAPI.jar \ + --spring.profiles.active=webapi-postgresql \ + --datasource.ohdsi.schema=${WEBAPI_SCHEMA:-webapi} +``` + +Notes: +- Batch uses a table prefix and the security datasource can be overridden if you choose a separate connection, but both are optional when you keep everything on the main datasource/schema. + ## SAML Auth support The following parameters are used: diff --git a/pom.xml b/pom.xml index 3bf51d8690..b78eceb49d 100644 --- a/pom.xml +++ b/pom.xml @@ -14,7 +14,6 @@ UTF-8 3.5.6 - 2.24.3 2.2.1 5.5.0 @@ -40,22 +39,21 @@ 21 21 - - - com.microsoft.sqlserver.jdbc.SQLServerDriver - jdbc:sqlserver://serverName;databaseName=databaseName - user - password + + org.postgresql.Driver + jdbc:postgresql://localhost:5433/postgres?currentSchema=webapi + postgres + mypass - sql server - dbo - sql server + postgresql + webapi + postgresql - com.microsoft.sqlserver.jdbc.SQLServerDriver - jdbc:sqlserver://serverName - userWithWritePrivs - password - classpath:db/migration/sqlserver + org.postgresql.Driver + ${datasource.url} + ${datasource.username} + ${datasource.password} + classpath:db/migration/postgresql ${datasource.ohdsi.schema} false @@ -466,9 +464,6 @@ --add-opens java.naming/com.sun.jndi.ldap=ALL-UNNAMED --add-exports java.naming/com.sun.jndi.ldap=ALL-UNNAMED - - org.springframework.boot.logging.log4j2.Log4J2LoggingSystem - @@ -491,9 +486,6 @@ 3.5.2 ${skipITtests} - - org.springframework.boot.logging.log4j2.Log4J2LoggingSystem - @@ -591,26 +583,6 @@ - - org.apache.logging.log4j - log4j-api - ${log4j2.version} - - - org.apache.logging.log4j - log4j-core - ${log4j2.version} - - - org.apache.logging.log4j - log4j-web - ${log4j2.version} - - - org.apache.logging.log4j - log4j-slf4j-impl - ${log4j2.version} - org.springframework.boot @@ -661,12 +633,6 @@ org.springframework.boot spring-boot-starter - - - org.springframework.boot - spring-boot-starter-logging - - @@ -691,7 +657,7 @@ org.springframework.boot - spring-boot-starter-log4j2 + spring-boot-starter-logging org.springframework.boot diff --git a/src/main/java/org/ohdsi/webapi/DataAccessConfig.java b/src/main/java/org/ohdsi/webapi/DataAccessConfig.java index 075a85b086..fa585bf6b4 100644 --- a/src/main/java/org/ohdsi/webapi/DataAccessConfig.java +++ b/src/main/java/org/ohdsi/webapi/DataAccessConfig.java @@ -49,14 +49,21 @@ public class DataAccessConfig { private Properties getJPAProperties() { Properties properties = new Properties(); - properties.setProperty("hibernate.default_schema", this.env.getProperty("spring.jpa.properties.hibernate.default_schema")); - properties.setProperty("hibernate.dialect", this.env.getProperty("spring.jpa.properties.hibernate.dialect")); - properties.setProperty("hibernate.generate_statistics", this.env.getProperty("spring.jpa.properties.hibernate.generate_statistics")); - properties.setProperty("hibernate.jdbc.batch_size", this.env.getProperty("spring.jpa.properties.hibernate.jdbc.batch_size")); - properties.setProperty("hibernate.order_inserts", this.env.getProperty("spring.jpa.properties.hibernate.order_inserts")); + // Only set optional Hibernate properties when present to avoid null values + putIfPresent(properties, "hibernate.default_schema", this.env.getProperty("spring.jpa.properties.hibernate.default_schema")); + putIfPresent(properties, "hibernate.dialect", this.env.getProperty("spring.jpa.properties.hibernate.dialect")); + putIfPresent(properties, "hibernate.generate_statistics", this.env.getProperty("spring.jpa.properties.hibernate.generate_statistics")); + putIfPresent(properties, "hibernate.jdbc.batch_size", this.env.getProperty("spring.jpa.properties.hibernate.jdbc.batch_size")); + putIfPresent(properties, "hibernate.order_inserts", this.env.getProperty("spring.jpa.properties.hibernate.order_inserts")); properties.setProperty("hibernate.id.new_generator_mappings", "true"); return properties; } + + private static void putIfPresent(Properties target, String key, String value) { + if (value != null) { + target.setProperty(key, value); + } + } @Bean({"primaryDataSource", "dataSource"}) @DependsOn("defaultStringEncryptor") diff --git a/src/main/java/org/ohdsi/webapi/cache/CacheService.java b/src/main/java/org/ohdsi/webapi/cache/CacheService.java index e9fa67efd8..0c9e27cccd 100644 --- a/src/main/java/org/ohdsi/webapi/cache/CacheService.java +++ b/src/main/java/org/ohdsi/webapi/cache/CacheService.java @@ -58,7 +58,6 @@ public CacheService() { @GET - @Path("/") @Produces(MediaType.APPLICATION_JSON) public List getCacheInfoList() { List caches = new ArrayList<>(); diff --git a/src/main/java/org/ohdsi/webapi/i18n/I18nController.java b/src/main/java/org/ohdsi/webapi/i18n/I18nController.java index 18f27f4fd5..de95fbdf2a 100644 --- a/src/main/java/org/ohdsi/webapi/i18n/I18nController.java +++ b/src/main/java/org/ohdsi/webapi/i18n/I18nController.java @@ -30,7 +30,6 @@ public class I18nController { private I18nService i18nService; @GET - @Path("/") @Produces(MediaType.APPLICATION_JSON) public Response getResources(@Context ContainerRequestContext requestContext) { diff --git a/src/main/java/org/ohdsi/webapi/info/InfoService.java b/src/main/java/org/ohdsi/webapi/info/InfoService.java index e822974beb..ca00fa67a6 100644 --- a/src/main/java/org/ohdsi/webapi/info/InfoService.java +++ b/src/main/java/org/ohdsi/webapi/info/InfoService.java @@ -52,7 +52,6 @@ public InfoService(BuildProperties buildProperties, BuildInfo buildInfo, List list( diff --git a/src/main/java/org/ohdsi/webapi/reusable/ReusableController.java b/src/main/java/org/ohdsi/webapi/reusable/ReusableController.java index c10a4fe0cf..80d1bcf76c 100644 --- a/src/main/java/org/ohdsi/webapi/reusable/ReusableController.java +++ b/src/main/java/org/ohdsi/webapi/reusable/ReusableController.java @@ -36,7 +36,6 @@ public ReusableController(ReusableService reusableService) { } @POST - @Path("/") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) public ReusableDTO create(final ReusableDTO dto) { @@ -44,7 +43,6 @@ public ReusableDTO create(final ReusableDTO dto) { } @GET - @Path("/") @Produces(MediaType.APPLICATION_JSON) public Page page(@Pagination Pageable pageable) { return reusableService.page(pageable); diff --git a/src/main/java/org/ohdsi/webapi/security/PermissionController.java b/src/main/java/org/ohdsi/webapi/security/PermissionController.java index 5ab0df974e..1af1059d0f 100644 --- a/src/main/java/org/ohdsi/webapi/security/PermissionController.java +++ b/src/main/java/org/ohdsi/webapi/security/PermissionController.java @@ -56,7 +56,6 @@ public PermissionController(PermissionService permissionService, PermissionManag * @return A list of permissions */ @GET - @Path("") @Produces(MediaType.APPLICATION_JSON) public List getPermissions() { diff --git a/src/main/java/org/ohdsi/webapi/service/CohortDefinitionService.java b/src/main/java/org/ohdsi/webapi/service/CohortDefinitionService.java index 166ecf3992..f8823b0a37 100644 --- a/src/main/java/org/ohdsi/webapi/service/CohortDefinitionService.java +++ b/src/main/java/org/ohdsi/webapi/service/CohortDefinitionService.java @@ -447,7 +447,6 @@ public GenerateSqlResult generateSql(GenerateSqlRequest request) { * @see org.ohdsi.webapi.cohortdefinition.CohortMetadataDTO */ @GET - @Path("/") @Produces(MediaType.APPLICATION_JSON) @Transactional @Cacheable(cacheNames = CachingSetup.COHORT_DEFINITION_LIST_CACHE, key = "@permissionService.getSubjectCacheKey()") @@ -475,7 +474,6 @@ public List getCohortDefinitionList() { * @return The newly created cohort definition */ @POST - @Path("/") @Transactional @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) diff --git a/src/main/java/org/ohdsi/webapi/service/ConceptSetService.java b/src/main/java/org/ohdsi/webapi/service/ConceptSetService.java index eb03acc648..91d60408af 100644 --- a/src/main/java/org/ohdsi/webapi/service/ConceptSetService.java +++ b/src/main/java/org/ohdsi/webapi/service/ConceptSetService.java @@ -173,8 +173,7 @@ public ConceptSetDTO getConceptSet(@PathParam("id") final int id) { * @summary Get all concept sets * @return A list of all concept sets in the WebAPI database */ - @GET - @Path("/") + @GET @Produces(MediaType.APPLICATION_JSON) @Cacheable(cacheNames = ConceptSetService.CachingSetup.CONCEPT_SET_LIST_CACHE, key = "@permissionService.getSubjectCacheKey()") public Collection getConceptSets() { @@ -491,7 +490,6 @@ public Response exportConceptSetToCSV(@PathParam("id") final String id) throws E * @param conceptSetDTO The concept set to save * @return The concept set saved with the concept set identifier */ - @Path("/") @POST @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) diff --git a/src/main/java/org/ohdsi/webapi/service/FeasibilityService.java b/src/main/java/org/ohdsi/webapi/service/FeasibilityService.java index f22d08d3ca..9235f00930 100644 --- a/src/main/java/org/ohdsi/webapi/service/FeasibilityService.java +++ b/src/main/java/org/ohdsi/webapi/service/FeasibilityService.java @@ -385,7 +385,6 @@ public FeasibilityStudyDTO feasibilityStudyToDTO(FeasibilityStudy study) { * @return List */ @GET - @Path("/") @Produces(MediaType.APPLICATION_JSON) public List getFeasibilityStudyList() { @@ -416,7 +415,6 @@ public List getFeasibilityStudyList * @return Feasibility study */ @PUT - @Path("/") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) @Transactional diff --git a/src/main/java/org/ohdsi/webapi/source/SourceController.java b/src/main/java/org/ohdsi/webapi/source/SourceController.java index 5617382747..27466d7d5c 100644 --- a/src/main/java/org/ohdsi/webapi/source/SourceController.java +++ b/src/main/java/org/ohdsi/webapi/source/SourceController.java @@ -153,7 +153,6 @@ public SourceDetails getSourceDetails(@PathParam("sourceId") Integer sourceId) { * @return a new SourceInfo for the created source * @throws Exception */ - @Path("") @POST @Consumes(MediaType.MULTIPART_FORM_DATA) @Produces(MediaType.APPLICATION_JSON) diff --git a/src/main/java/org/ohdsi/webapi/tag/TagController.java b/src/main/java/org/ohdsi/webapi/tag/TagController.java index 9d4f8c3e84..53dd0ac497 100644 --- a/src/main/java/org/ohdsi/webapi/tag/TagController.java +++ b/src/main/java/org/ohdsi/webapi/tag/TagController.java @@ -40,7 +40,6 @@ public TagController(TagService pathwayService, * @return */ @POST - @Path("/") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) public TagDTO create(final TagDTO dto) { @@ -70,7 +69,6 @@ public List search(@QueryParam("namePart") String namePart) { * @return */ @GET - @Path("/") @Produces(MediaType.APPLICATION_JSON) public List list() { return tagService.listInfoDTO(); diff --git a/src/main/java/org/ohdsi/webapi/tool/ToolController.java b/src/main/java/org/ohdsi/webapi/tool/ToolController.java index 34b2449b0c..774d8b8b80 100644 --- a/src/main/java/org/ohdsi/webapi/tool/ToolController.java +++ b/src/main/java/org/ohdsi/webapi/tool/ToolController.java @@ -24,7 +24,6 @@ public ToolController(ToolServiceImpl service) { } @GET - @Path("") @Produces(MediaType.APPLICATION_JSON) public List getTools() { return service.getTools(); @@ -38,7 +37,6 @@ public ToolDTO getToolById(@PathParam("id") Integer id) { } @POST - @Path("") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) public ToolDTO createTool(ToolDTO dto) { @@ -53,7 +51,6 @@ public void delete(@PathParam("id") Integer id) { } @PUT - @Path("") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) public ToolDTO updateTool(ToolDTO toolDTO) { diff --git a/src/main/java/org/ohdsi/webapi/user/importer/UserImportJobController.java b/src/main/java/org/ohdsi/webapi/user/importer/UserImportJobController.java index 4c6c02f2a5..cec7d74d4b 100644 --- a/src/main/java/org/ohdsi/webapi/user/importer/UserImportJobController.java +++ b/src/main/java/org/ohdsi/webapi/user/importer/UserImportJobController.java @@ -55,7 +55,6 @@ public UserImportJobController(UserImportJobService jobService, @Qualifier("conv * @return The job information */ @POST - @Path("/") @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public UserImportJobDTO createJob(UserImportJobDTO jobDTO) { @@ -100,7 +99,6 @@ public UserImportJobDTO updateJob(@PathParam("id") Long jobId, UserImportJobDTO * @return The list of user import jobs */ @GET - @Path("/") @Produces(MediaType.APPLICATION_JSON) @Transactional public List listJobs() { diff --git a/src/main/resources/application-broadsea.properties b/src/main/resources/application-broadsea.properties index 3b6596e4b6..c3c66d0d7f 100644 --- a/src/main/resources/application-broadsea.properties +++ b/src/main/resources/application-broadsea.properties @@ -1,34 +1,44 @@ -# Broadsea PostgreSQL Configuration -datasource.driverClassName=org.postgresql.Driver -datasource.url=jdbc:postgresql://localhost:5432/postgres?currentSchema=webapi -datasource.username=postgres -datasource.password=mypass -datasource.dialect=postgresql -datasource.ohdsi.schema=webapi -datasource.dialect.source=postgresql - -# Flyway Configuration -flyway.datasource.driverClassName=org.postgresql.Driver -flyway.datasource.url=jdbc:postgresql://localhost:5432/postgres?currentSchema=webapi -flyway.datasource.username=postgres -flyway.datasource.password=mypass -flyway.schemas=webapi -flyway.placeholders.ohdsiSchema=webapi -flyway.locations=classpath:db/migration/postgresql -flyway.validateOnMigrate=false +# Broadsea PostgreSQL Configuration (define once, reused below) +broadsea.db.driver=org.postgresql.Driver +broadsea.db.url=jdbc:postgresql://localhost:5433/postgres?currentSchema=webapi_test +broadsea.db.username=postgres +broadsea.db.password=mypass +broadsea.db.schema=webapi_test +broadsea.db.dialect=postgresql + +datasource.driverClassName=${broadsea.db.driver} +datasource.url=${broadsea.db.url} +datasource.username=${broadsea.db.username} +datasource.password=${broadsea.db.password} +datasource.dialect=${broadsea.db.dialect} +datasource.ohdsi.schema=${broadsea.db.schema} +datasource.dialect.source=${broadsea.db.dialect} + +# Flyway Configuration (reuse primary datasource) - keep literal values to avoid SQL Server defaults +spring.flyway.enabled=true +spring.flyway.driver-class-name=org.postgresql.Driver +spring.flyway.url=${broadsea.db.url} +spring.flyway.user=${broadsea.db.username} +spring.flyway.password=${broadsea.db.password} +spring.flyway.locations=classpath:db/migration/postgresql +spring.flyway.schemas=${broadsea.db.schema} +spring.flyway.placeholders.ohdsiSchema=${broadsea.db.schema} +spring.flyway.validate-on-migrate=false +spring.flyway.baseline-on-migrate=true +spring.flyway.baseline-version=0 +spring.flyway.out-of-order=true # Hibernate Configuration -hibernate.dialect=org.hibernate.dialect.PostgreSQLDialect # JPA Configuration spring.jpa.show-sql=false -spring.jpa.properties.hibernate.default_schema=webapi +spring.jpa.properties.hibernate.default_schema=${broadsea.db.schema} spring.jpa.properties.hibernate.generate_statistics=false spring.jpa.properties.hibernate.jdbc.batch_size=20 spring.jpa.properties.hibernate.order_inserts=true # Spring Batch Configuration -spring.batch.repository.tableprefix=webapi.BATCH_ +spring.batch.repository.tableprefix=${broadsea.db.schema}.BATCH_ spring.batch.repository.isolationLevelForCreate=ISOLATION_DEFAULT spring.batch.taskExecutor.corePoolSize=5 spring.batch.taskExecutor.maxPoolSize=10 @@ -37,7 +47,7 @@ spring.batch.taskExecutor.threadGroupName=batchGroup spring.batch.taskExecutor.threadNamePrefix=batchExec # Server Configuration -server.port=8080 +server.port=8081 # Security Configuration security.provider=DisabledSecurity @@ -47,6 +57,13 @@ security.token.expiration=43200 security.defaultGlobalReadPermissions=true security.ssl.enabled=false +# Security data source defaults to Postgres (matches primary) +security.db.datasource.driverClassName=${broadsea.db.driver} +security.db.datasource.url=${broadsea.db.url} +security.db.datasource.username=${broadsea.db.username} +security.db.datasource.password=${broadsea.db.password} +security.db.datasource.schema=${broadsea.db.schema} + # Hikari Configuration spring.datasource.hikari.connection-test-query=SELECT 1 spring.datasource.hikari.connection-test-query-timeout=1000 @@ -66,7 +83,10 @@ logging.level.org.ohdsi=INFO logging.level.org.springframework.jdbc=DEBUG # Spring Cache -spring.cache.type=none +spring.cache.type=simple + +# Disable LDAP auto-config for JDK module restrictions in this env +spring.autoconfigure.exclude=org.springframework.boot.autoconfigure.ldap.LdapAutoConfiguration,org.springframework.boot.autoconfigure.ldap.embedded.EmbeddedLdapAutoConfiguration # Person Settings person.viewDates=false diff --git a/src/main/resources/application-dev.properties b/src/main/resources/application-dev.properties index 90730ac51c..8745c1e335 100644 --- a/src/main/resources/application-dev.properties +++ b/src/main/resources/application-dev.properties @@ -10,11 +10,11 @@ datasource.dialect.source=postgresql # Flyway Configuration (Spring Boot 3.x properties) spring.flyway.driver-class-name=org.postgresql.Driver -spring.flyway.url=jdbc:postgresql://localhost:5432/postgres?currentSchema=webapi_dev -spring.flyway.user=postgres -spring.flyway.password=mypass -spring.flyway.schemas=webapi_dev -spring.flyway.placeholders.ohdsiSchema=webapi_dev +spring.flyway.url=${datasource.url} +spring.flyway.user=${datasource.username} +spring.flyway.password=${datasource.password} +spring.flyway.schemas=${datasource.ohdsi.schema} +spring.flyway.placeholders.ohdsiSchema=${datasource.ohdsi.schema} spring.flyway.locations=classpath:db/migration/postgresql spring.flyway.validate-on-migrate=false spring.flyway.baseline-on-migrate=true @@ -28,7 +28,6 @@ secondary.datasource.username=postgres secondary.datasource.password=mypass # Hibernate Configuration -hibernate.dialect=org.hibernate.dialect.PostgreSQLDialect # JPA Configuration spring.jpa.show-sql=false diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index 2a2537d2d8..93f9162695 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -34,9 +34,9 @@ r.serviceHost=${r.serviceHost} #DataSource for Change Managment / Migration spring.flyway.enabled=true spring.flyway.driver-class-name=${datasource.driverClassName} -spring.flyway.url=${flyway.datasource.url} -spring.flyway.user=${flyway.datasource.username} -spring.flyway.password=${flyway.datasource.password} +spring.flyway.url=${datasource.url} +spring.flyway.user=${datasource.username} +spring.flyway.password=${datasource.password} # CRITICAL: Maintain backward compatibility with Flyway 4.x schema history table name spring.flyway.table=schema_version # check that migration scripts location exists @@ -44,7 +44,7 @@ spring.flyway.fail-on-missing-locations=true spring.flyway.locations=${flyway.locations} # locations of migrations scripts # schemas to manage/update (e.g. ohdsi/results schema) -NOTE: CASE SENSITIVE! -spring.flyway.schemas=${flyway.schemas} +spring.flyway.schemas=${datasource.ohdsi.schema} #Baseline - start flyway managment with existing objects spring.flyway.baseline-on-migrate=true #Due to issue https://github.com/flyway/flyway/issues/752 use default baselineVersion=1 (Note equality to 1.0.0.0, so scripts with that version will be omitted) @@ -53,7 +53,7 @@ spring.flyway.validate-on-migrate=${flyway.validateOnMigrate} # Enable out of order migrations due to distributed development nature of WebAPI spring.flyway.out-of-order=true # Flyway Placeholders: -spring.flyway.placeholders.ohdsiSchema=${flyway.placeholders.ohdsiSchema} +spring.flyway.placeholders.ohdsiSchema=${datasource.ohdsi.schema} #Disable any auto init #http://docs.spring.io/spring-boot/docs/current/reference/html/howto-database-initialization.html @@ -62,12 +62,13 @@ spring.datasource.initialize=false spring.jpa.show-sql=${spring.jpa.show-sql} # JPA Default Schema spring.jpa.properties.hibernate.default_schema=${datasource.ohdsi.schema} -#JPA Dialect -spring.jpa.properties.hibernate.dialect=${hibernate.dialect} spring.jpa.properties.hibernate.generate_statistics=${spring.jpa.properties.hibernate.generate_statistics} spring.jpa.properties.hibernate.jdbc.batch_size=${spring.jpa.properties.hibernate.jdbc.batch_size} spring.jpa.properties.hibernate.order_inserts=${spring.jpa.properties.hibernate.order_inserts} +#Jersey WADL disabled to silence missing JAXBContext warning +jersey.config.server.wadl.disableWadl=true + #Spring Cache spring.cache.jcache.config=classpath:appCache.xml spring.cache.type=${spring.cache.type} diff --git a/src/main/resources/db/migration/postgresql/rollback/1.0.0.1__schema-drop_spring_batch.sql b/src/main/resources/db/migration/postgresql/rollback/1.0.0.1__schema-drop_spring_batch.sql deleted file mode 100644 index b0410dfb64..0000000000 --- a/src/main/resources/db/migration/postgresql/rollback/1.0.0.1__schema-drop_spring_batch.sql +++ /dev/null @@ -1,10 +0,0 @@ -DROP TABLE IF EXISTS BATCH_STEP_EXECUTION_CONTEXT; -DROP TABLE IF EXISTS BATCH_JOB_EXECUTION_CONTEXT; -DROP TABLE IF EXISTS BATCH_STEP_EXECUTION; -DROP TABLE IF EXISTS BATCH_JOB_EXECUTION_PARAMS; -DROP TABLE IF EXISTS BATCH_JOB_EXECUTION; -DROP TABLE IF EXISTS BATCH_JOB_INSTANCE; - -DROP SEQUENCE IF EXISTS BATCH_STEP_EXECUTION_SEQ ; -DROP SEQUENCE IF EXISTS BATCH_JOB_EXECUTION_SEQ ; -DROP SEQUENCE IF EXISTS BATCH_JOB_SEQ ; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/rollback/1.0.0.2__schema-drop_jpa.sql b/src/main/resources/db/migration/postgresql/rollback/1.0.0.2__schema-drop_jpa.sql deleted file mode 100644 index bc4189800c..0000000000 --- a/src/main/resources/db/migration/postgresql/rollback/1.0.0.2__schema-drop_jpa.sql +++ /dev/null @@ -1,3 +0,0 @@ -DROP TABLE IF EXISTS EXAMPLEAPP_WIDGET; - -DROP SEQUENCE IF EXISTS HIBERNATE_SEQUENCE ; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/rollback/1.0.0.9__schema-drop_shiro.sql b/src/main/resources/db/migration/postgresql/rollback/1.0.0.9__schema-drop_shiro.sql deleted file mode 100644 index 34a85b13ad..0000000000 --- a/src/main/resources/db/migration/postgresql/rollback/1.0.0.9__schema-drop_shiro.sql +++ /dev/null @@ -1,11 +0,0 @@ -DROP TABLE SEC_ROLE_PERMISSION; -DROP TABLE SEC_USER_ROLE; -DROP TABLE SEC_PERMISSION; -DROP TABLE SEC_ROLE; -DROP TABLE SEC_USER; - -DROP SEQUENCE SEC_ROLE_PERMISSION_SEQUENCE; -DROP SEQUENCE SEC_USER_ROLE_SEQUENCE; -DROP SEQUENCE SEC_PERMISSION_SEQUENCE; -DROP SEQUENCE SEC_ROLE_SEQUENCE; -DROP SEQUENCE SEC_USER_SEQUENCE; \ No newline at end of file diff --git a/src/test/java/org/ohdsi/webapi/test/CDMResultsServiceIT.java b/src/test/java/org/ohdsi/webapi/test/CDMResultsServiceIT.java index bf3b63655f..9e1be20ac2 100644 --- a/src/test/java/org/ohdsi/webapi/test/CDMResultsServiceIT.java +++ b/src/test/java/org/ohdsi/webapi/test/CDMResultsServiceIT.java @@ -44,8 +44,8 @@ public class CDMResultsServiceIT extends WebApiIT { @Before public void init() throws Exception { - truncateTable(String.format("%s.%s", "public", "source")); - resetSequence(String.format("%s.%s", "public", "source_sequence")); + truncateTable(String.format("%s.%s", getOhdsiSchema(), "source")); + resetSequence(String.format("%s.%s", getOhdsiSchema(), "source_sequence")); sourceRepository.saveAndFlush(getCdmSource()); prepareCdmSchema(); prepareResultSchema(); @@ -99,7 +99,7 @@ public void achillesService_clearCache_nothingInCache_doesNothing() { achillesService.clearCache(); // Assert - String sql = "SELECT COUNT(*) FROM achilles_cache"; + String sql = String.format("SELECT COUNT(*) FROM %s", qualifyOhdsiTable("achilles_cache")); Integer count = jdbcTemplate.queryForObject(sql, Integer.class); assertEquals(0, count.intValue()); } @@ -108,16 +108,16 @@ public void achillesService_clearCache_nothingInCache_doesNothing() { public void achillesService_clearCache_somethingInCache_clearsAllRowsForSource() { // Arrange - String insertSqlRow1 = "INSERT INTO achilles_cache (id, source_id, cache_name, cache) VALUES (1, 1, 'cache1', 'cache1')"; + String insertSqlRow1 = String.format("INSERT INTO %s (id, source_id, cache_name, cache) VALUES (1, 1, 'cache1', 'cache1')", qualifyOhdsiTable("achilles_cache")); jdbcTemplate.execute(insertSqlRow1); - String insertSqlRow2 = "INSERT INTO achilles_cache (id, source_id, cache_name, cache) VALUES (2, 1, 'cache2', 'cache2')"; + String insertSqlRow2 = String.format("INSERT INTO %s (id, source_id, cache_name, cache) VALUES (2, 1, 'cache2', 'cache2')", qualifyOhdsiTable("achilles_cache")); jdbcTemplate.execute(insertSqlRow2); // Act achillesService.clearCache(); // Assert - String sql = "SELECT COUNT(*) FROM achilles_cache"; + String sql = String.format("SELECT COUNT(*) FROM %s", qualifyOhdsiTable("achilles_cache")); Integer count = jdbcTemplate.queryForObject(sql, Integer.class); assertEquals(0, count.intValue()); } @@ -131,7 +131,7 @@ public void cdmCacheService_clearCache_nothingInCache_doesNothing() { cdmCacheService.clearCache(); // Assert - String sql = "SELECT COUNT(*) FROM cdm_cache"; + String sql = String.format("SELECT COUNT(*) FROM %s", qualifyOhdsiTable("cdm_cache")); Integer count = jdbcTemplate.queryForObject(sql, Integer.class); assertEquals(0, count.intValue()); } @@ -140,16 +140,16 @@ public void cdmCacheService_clearCache_nothingInCache_doesNothing() { public void cdmCacheService_clearCache_somethingInCache_clearsAllRowsForSource() { // Arrange - String insertSqlRow1 = "INSERT INTO cdm_cache (id, concept_id, source_id, record_count, descendant_record_count, person_count, descendant_person_count) VALUES (1, 1, 1, 100, 101, 102, 103)"; + String insertSqlRow1 = String.format("INSERT INTO %s (id, concept_id, source_id, record_count, descendant_record_count, person_count, descendant_person_count) VALUES (1, 1, 1, 100, 101, 102, 103)", qualifyOhdsiTable("cdm_cache")); jdbcTemplate.execute(insertSqlRow1); - String insertSqlRow2 = "INSERT INTO cdm_cache (id, concept_id, source_id, record_count, descendant_record_count, person_count, descendant_person_count) VALUES (2, 2, 1, 200, 201, 202, 203)"; + String insertSqlRow2 = String.format("INSERT INTO %s (id, concept_id, source_id, record_count, descendant_record_count, person_count, descendant_person_count) VALUES (2, 2, 1, 200, 201, 202, 203)", qualifyOhdsiTable("cdm_cache")); jdbcTemplate.execute(insertSqlRow2); // Act cdmCacheService.clearCache(); // Assert - String sql = "SELECT COUNT(*) FROM cdm_cache"; + String sql = String.format("SELECT COUNT(*) FROM %s", qualifyOhdsiTable("cdm_cache")); Integer count = jdbcTemplate.queryForObject(sql, Integer.class); assertEquals(0, count.intValue()); } diff --git a/src/test/java/org/ohdsi/webapi/test/CohortAnalysisServiceIT.java b/src/test/java/org/ohdsi/webapi/test/CohortAnalysisServiceIT.java index 81b6930a42..71b762b73b 100644 --- a/src/test/java/org/ohdsi/webapi/test/CohortAnalysisServiceIT.java +++ b/src/test/java/org/ohdsi/webapi/test/CohortAnalysisServiceIT.java @@ -37,8 +37,8 @@ public class CohortAnalysisServiceIT extends WebApiIT { @Before public void init() throws Exception { - truncateTable(String.format("%s.%s", "public", "source")); - resetSequence(String.format("%s.%s", "public", "source_sequence")); + truncateTable(String.format("%s.%s", getOhdsiSchema(), "source")); + resetSequence(String.format("%s.%s", getOhdsiSchema(), "source_sequence")); sourceRepository.saveAndFlush(getCdmSource()); prepareCdmSchema(); prepareResultSchema(); diff --git a/src/test/java/org/ohdsi/webapi/test/VocabularyServiceIT.java b/src/test/java/org/ohdsi/webapi/test/VocabularyServiceIT.java index f6ae6604cd..21fb1d6206 100644 --- a/src/test/java/org/ohdsi/webapi/test/VocabularyServiceIT.java +++ b/src/test/java/org/ohdsi/webapi/test/VocabularyServiceIT.java @@ -28,8 +28,8 @@ public class VocabularyServiceIT extends WebApiIT { @Before public void init() throws Exception { - truncateTable(String.format("%s.%s", "public", "source")); - resetSequence(String.format("%s.%s", "public", "source_sequence")); + truncateTable(String.format("%s.%s", getOhdsiSchema(), "source")); + resetSequence(String.format("%s.%s", getOhdsiSchema(), "source_sequence")); sourceRepository.saveAndFlush(getCdmSource()); prepareCdmSchema(); prepareResultSchema(); diff --git a/src/test/java/org/ohdsi/webapi/test/WebApiIT.java b/src/test/java/org/ohdsi/webapi/test/WebApiIT.java index 3c7cab183c..e42519d582 100644 --- a/src/test/java/org/ohdsi/webapi/test/WebApiIT.java +++ b/src/test/java/org/ohdsi/webapi/test/WebApiIT.java @@ -5,6 +5,7 @@ import com.github.springtestdbunit.DbUnitTestExecutionListener; import com.github.springtestdbunit.annotation.DbUnitConfiguration; +import com.github.springtestdbunit.bean.DatabaseConfigBean; import java.io.IOException; import java.sql.SQLException; import java.util.Arrays; @@ -24,6 +25,7 @@ import org.ohdsi.webapi.WebApi; import org.ohdsi.webapi.source.Source; import org.ohdsi.webapi.source.SourceDaimon; +import org.dbunit.ext.postgresql.PostgresqlDataTypeFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Value; @@ -52,6 +54,9 @@ public abstract class WebApiIT { protected static final String CDM_SCHEMA_NAME = "cdm"; protected static final String RESULT_SCHEMA_NAME = "results"; + @Value("${datasource.ohdsi.schema:public}") + private String ohdsiSchema; + private static final Collection CDM_DDL_FILE_PATHS = Arrays.asList("/cdm-postgresql-ddl.sql"); private static final Collection RESULTS_DDL_FILE_PATHS = Arrays.asList( "/ddl/results/cohort.sql", @@ -74,13 +79,22 @@ public abstract class WebApiIT { @TestConfiguration public static class DbUnitConfiguration { - @Bean - DatabaseDataSourceConnectionFactoryBean dbUnitDatabaseConnection() { + @Bean + DatabaseDataSourceConnectionFactoryBean dbUnitDatabaseConnection(DatabaseConfigBean dbUnitDatabaseConfig, + @Value("${datasource.ohdsi.schema:public}") String ohdsiSchema) { // Use the embedded PostgreSQL datasource from ITStarter DatabaseDataSourceConnectionFactoryBean dbUnitDatabaseConnection = new DatabaseDataSourceConnectionFactoryBean(ITStarter.getDataSource()); - dbUnitDatabaseConnection.setSchema("public"); + dbUnitDatabaseConnection.setSchema(ohdsiSchema); + dbUnitDatabaseConnection.setDatabaseConfig(dbUnitDatabaseConfig); return dbUnitDatabaseConnection; } + + @Bean + DatabaseConfigBean dbUnitDatabaseConfig() { + DatabaseConfigBean config = new DatabaseConfigBean(); + config.setDatatypeFactory(new PostgresqlDataTypeFactory()); + return config; + } } @Value("${baseUri}") @@ -125,11 +139,22 @@ public void assertOK(ResponseEntity entity) { } } + protected String getOhdsiSchema() { + return ohdsiSchema; + } + + protected String qualifyOhdsiTable(String tableName) { + return String.format("%s.%s", ohdsiSchema, tableName); + } + protected void truncateTable(final String tableName) { - jdbcTemplate.execute(String.format("TRUNCATE %s CASCADE",tableName)); + String qualifiedName = tableName.contains(".") ? tableName : String.format("%s.%s", ohdsiSchema, tableName); + jdbcTemplate.execute(String.format("TRUNCATE %s CASCADE", qualifiedName)); } + protected void resetSequence(final String sequenceName) { - jdbcTemplate.execute(String.format("ALTER SEQUENCE %s RESTART WITH 1", sequenceName)); + String qualifiedName = sequenceName.contains(".") ? sequenceName : String.format("%s.%s", ohdsiSchema, sequenceName); + jdbcTemplate.execute(String.format("ALTER SEQUENCE %s RESTART WITH 1", qualifiedName)); } protected Source getCdmSource() throws SQLException { diff --git a/src/test/resources/application-test.properties b/src/test/resources/application-test.properties index 036d21c02e..77c4a365a4 100644 --- a/src/test/resources/application-test.properties +++ b/src/test/resources/application-test.properties @@ -33,21 +33,23 @@ datasource.url=jdbc:postgresql://localhost:5432/postgres datasource.username=postgres datasource.password= datasource.dialect=postgresql -datasource.ohdsi.schema=public +datasource.ohdsi.schema=${WEBAPI_SCHEMA:public} -spring.flyway.driver-class-name=org.postgresql.Driver +spring.flyway.driver-class-name=${datasource.driverClassName} # System property set by AbstractDatabaseTest with embedded Postgres URL -spring.flyway.url=jdbc:postgresql://localhost:5432/postgres -spring.flyway.user=postgres -spring.flyway.password= -spring.flyway.schemas=public -spring.flyway.placeholders.ohdsiSchema=public +spring.flyway.url=${datasource.url} +spring.flyway.user=${datasource.username} +spring.flyway.password=${datasource.password} +spring.flyway.schemas=${datasource.ohdsi.schema} +spring.flyway.placeholders.ohdsiSchema=${datasource.ohdsi.schema} spring.flyway.locations=classpath:db/migration/postgresql # CRITICAL: Maintain backward compatibility with Flyway 4.x schema history table name spring.flyway.table=schema_version -spring.jpa.properties.hibernate.default_schema=public -spring.batch.repository.tableprefix=public.BATCH_ +spring.jpa.properties.hibernate.default_schema=${datasource.ohdsi.schema} +spring.batch.repository.tableprefix=${datasource.ohdsi.schema}.BATCH_ +spring.batch.jdbc.table-prefix=${datasource.ohdsi.schema}.BATCH_ hibernate.dialect=org.hibernate.dialect.PostgreSQLDialect +jersey.config.server.wadl.disableWadl=true security.provider=DisabledSecurity From ab66dc8464317160006d0998f4f0571c5763171f Mon Sep 17 00:00:00 2001 From: Peter Hoffmann <954078+p-hoffmann@users.noreply.github.com> Date: Sun, 4 Jan 2026 23:14:19 +0800 Subject: [PATCH 02/11] add ci to publish docker image --- .github/workflows/ci.yaml | 45 ++++++++++--------- Dockerfile | 8 ++-- pom.xml | 9 +++- .../org/ohdsi/webapi/service/UserService.java | 7 ++- .../ohdsi/webapi/trexsql/TrexSQLConfig.java | 2 - .../trexsql/TrexSQLInstanceManager.java | 6 +++ .../webapi/trexsql/TrexSQLServletConfig.java | 19 +++++++- 7 files changed, 64 insertions(+), 32 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index bc04e30fda..5e5afe4b2e 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -9,7 +9,7 @@ on: branches: [ master, webapi-3.0 ] env: - DOCKER_IMAGE: ohdsi/webapi + DOCKER_IMAGE: ghcr.io/${{ github.repository }} jobs: # Build and test the code @@ -23,7 +23,7 @@ jobs: # Steps represent a sequence of tasks that will be executed as part of the job steps: # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: actions/setup-java@v4 with: @@ -47,7 +47,7 @@ jobs: run: mvn -B -P${{ env.MAVEN_PROFILE }} test # Check that the docker image builds correctly - # Push to ohdsi/atlas:master for commits on master. + # Push to ghcr.io for commits on master or webapi-3.0. docker: # The type of runner that the job will run on runs-on: ubuntu-latest @@ -55,7 +55,7 @@ jobs: # Steps represent a sequence of tasks that will be executed as part of the job steps: # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Cache Docker layers uses: actions/cache@v4 @@ -68,40 +68,44 @@ jobs: # Add Docker labels and tags - name: Docker meta id: docker_meta - uses: crazy-max/ghaction-docker-meta@v1 + uses: docker/metadata-action@v5 with: images: ${{ env.DOCKER_IMAGE }} + tags: | + type=raw,value=dev,enable=${{ github.ref == 'refs/heads/master' }} + type=raw,value=3.0-dev,enable=${{ github.ref == 'refs/heads/webapi-3.0' }} # Setup docker build environment - name: Set up QEMU - uses: docker/setup-qemu-action@v1 + uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v1 + uses: docker/setup-buildx-action@v3 - name: Set build parameters id: build_params run: | - echo "::set-output name=sha8::${GITHUB_SHA::8}" - if [ "${{ github.event_name }}" != "pull_request" ] && [ "${{ github.ref }}" == "refs/heads/master" ]; then - echo "::set-output name=push::true" - echo "::set-output name=load::false" - echo "::set-output name=platforms::linux/amd64,linux/arm64" + echo "sha8=${GITHUB_SHA::8}" >> $GITHUB_OUTPUT + if [ "${{ github.event_name }}" != "pull_request" ] && ( [ "${{ github.ref }}" == "refs/heads/master" ] || [ "${{ github.ref }}" == "refs/heads/webapi-3.0" ] ); then + echo "push=true" >> $GITHUB_OUTPUT + echo "load=false" >> $GITHUB_OUTPUT + echo "platforms=linux/amd64,linux/arm64" >> $GITHUB_OUTPUT else - echo "::set-output name=push::false" - echo "::set-output name=load::true" - echo "::set-output name=platforms::linux/amd64" + echo "push=false" >> $GITHUB_OUTPUT + echo "load=true" >> $GITHUB_OUTPUT + echo "platforms=linux/amd64" >> $GITHUB_OUTPUT fi - - name: Login to DockerHub - uses: docker/login-action@v1 + - name: Login to GitHub Container Registry + uses: docker/login-action@v3 if: steps.build_params.outputs.push == 'true' with: - username: ${{ secrets.DOCKER_HUB_USERNAME }} - password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} - name: Build and push id: docker_build - uses: docker/build-push-action@v2 + uses: docker/build-push-action@v5 with: context: ./ file: ./Dockerfile @@ -113,6 +117,7 @@ jobs: build-args: | GIT_BRANCH=${{ steps.docker_meta.outputs.version }} GIT_COMMIT_ID_ABBREV=${{ steps.build_params.outputs.sha8 }} + MAVEN_PROFILE=webapi-docker,tcache tags: ${{ steps.docker_meta.outputs.tags }} # Use runtime labels from docker_meta as well as fixed labels labels: | diff --git a/Dockerfile b/Dockerfile index a899f7d542..c73bd311c2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM maven:3.9-eclipse-temurin-21 as builder +FROM maven:3.9-eclipse-temurin-21 AS builder WORKDIR /code @@ -29,7 +29,7 @@ RUN mvn package ${MAVEN_PARAMS} \ # OHDSI WebAPI running as a Spring Boot executable JAR with Java 21 FROM index.docker.io/library/eclipse-temurin:21-jre -MAINTAINER Lee Evans - www.ltscomputingllc.com +LABEL maintainer="Lee Evans - www.ltscomputingllc.com" # Any Java options to pass along, e.g. memory, garbage collection, etc. ENV JAVA_OPTS="" @@ -49,6 +49,4 @@ EXPOSE 8080 USER 101 # Run the executable JAR -CMD exec java ${DEFAULT_JAVA_OPTS} ${JAVA_OPTS} \ - --add-opens java.naming/com.sun.jndi.ldap=ALL-UNNAMED \ - -jar WebAPI.jar +CMD ["sh", "-c", "exec java ${DEFAULT_JAVA_OPTS} ${JAVA_OPTS} --add-opens java.naming/com.sun.jndi.ldap=ALL-UNNAMED -jar WebAPI.jar"] diff --git a/pom.xml b/pom.xml index b78eceb49d..0b0ae23d1e 100644 --- a/pom.xml +++ b/pom.xml @@ -558,6 +558,11 @@ + + central + Maven Central + https://repo.maven.apache.org/maven2 + ohdsi repo.ohdsi.org @@ -1092,7 +1097,7 @@ org.springframework.ldap spring-ldap-core - 2.3.2.RELEASE + 3.2.8 org.ohdsi @@ -1225,7 +1230,7 @@ com.github.p-hoffmann trexsql-ext - v0.1.2 + v0.1.6 diff --git a/src/main/java/org/ohdsi/webapi/service/UserService.java b/src/main/java/org/ohdsi/webapi/service/UserService.java index 8076e8e96b..f1bae66068 100644 --- a/src/main/java/org/ohdsi/webapi/service/UserService.java +++ b/src/main/java/org/ohdsi/webapi/service/UserService.java @@ -17,6 +17,7 @@ import java.util.*; import java.util.stream.Collectors; import java.util.stream.StreamSupport; +import org.ohdsi.webapi.trexsql.TrexSQLConfig; /** * @@ -33,6 +34,9 @@ public class UserService { @Autowired private ApplicationEventPublisher eventPublisher; + @Autowired(required = false) + private TrexSQLConfig trexSQLConfig; + @Value("${security.ad.default.import.group}#{T(java.util.Collections).emptyList()}") private List defaultRoles; @@ -51,6 +55,7 @@ public static class User implements Comparable { public String name; public List permissions; public Map> permissionIdx; + public Boolean trexsqlCacheEnabled; public User() {} @@ -116,7 +121,7 @@ public User getCurrentUser() throws Exception { user.name = currentUser.getName(); user.permissions = convertPermissions(permissions); user.permissionIdx = authorizer.queryUserPermissions(currentUser.getLogin()).permissions; - + user.trexsqlCacheEnabled = trexSQLConfig != null && trexSQLConfig.isEnabled(); return user; } diff --git a/src/main/java/org/ohdsi/webapi/trexsql/TrexSQLConfig.java b/src/main/java/org/ohdsi/webapi/trexsql/TrexSQLConfig.java index 40d3dd7dfb..2fb2b09511 100644 --- a/src/main/java/org/ohdsi/webapi/trexsql/TrexSQLConfig.java +++ b/src/main/java/org/ohdsi/webapi/trexsql/TrexSQLConfig.java @@ -1,7 +1,6 @@ package org.ohdsi.webapi.trexsql; import org.springframework.boot.context.properties.ConfigurationProperties; -import org.springframework.context.annotation.Configuration; import java.util.HashMap; import java.util.Map; @@ -10,7 +9,6 @@ * Configuration properties for trexsql integration. * Maps to trexsql.* in application properties. */ -@Configuration @ConfigurationProperties(prefix = "trexsql") public class TrexSQLConfig { diff --git a/src/main/java/org/ohdsi/webapi/trexsql/TrexSQLInstanceManager.java b/src/main/java/org/ohdsi/webapi/trexsql/TrexSQLInstanceManager.java index 36a6997018..5c7ba9c936 100644 --- a/src/main/java/org/ohdsi/webapi/trexsql/TrexSQLInstanceManager.java +++ b/src/main/java/org/ohdsi/webapi/trexsql/TrexSQLInstanceManager.java @@ -80,6 +80,12 @@ private Map buildConfig() { initConfig.put("extensions-path", config.getExtensionsPath()); } + if (config.getCachePath() != null && !config.getCachePath().isEmpty()) { + initConfig.put("cache-path", config.getCachePath()); + } + + initConfig.put("allow-unsigned-extensions", true); + return initConfig; } diff --git a/src/main/java/org/ohdsi/webapi/trexsql/TrexSQLServletConfig.java b/src/main/java/org/ohdsi/webapi/trexsql/TrexSQLServletConfig.java index 1532ba874c..5fb95bebec 100644 --- a/src/main/java/org/ohdsi/webapi/trexsql/TrexSQLServletConfig.java +++ b/src/main/java/org/ohdsi/webapi/trexsql/TrexSQLServletConfig.java @@ -3,22 +3,37 @@ import org.trex.TrexServlet; import jakarta.servlet.http.HttpServlet; import org.ohdsi.webapi.source.SourceRepository; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.boot.web.servlet.ServletRegistrationBean; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; +import java.util.HashMap; +import java.util.Map; + @Configuration @ConditionalOnProperty(name = "trexsql.enabled", havingValue = "true") +@EnableConfigurationProperties(TrexSQLConfig.class) public class TrexSQLServletConfig { + private static final Logger log = LoggerFactory.getLogger(TrexSQLServletConfig.class); + @Bean - public ServletRegistrationBean trexServlet( + public ServletRegistrationBean trexServlet( TrexSQLInstanceManager instanceManager, + TrexSQLConfig trexConfig, SourceRepository sourceRepository) { TrexServlet servlet = new TrexServlet(); - servlet.initTrex(instanceManager.getInstance(), sourceRepository); + Map config = new HashMap<>(); + String cachePath = trexConfig.getCachePath(); + log.info("TrexSQL cache path configured as: {}", cachePath); + config.put("cache-path", cachePath); + + servlet.initTrex(instanceManager.getInstance(), sourceRepository, config); ServletRegistrationBean registration = new ServletRegistrationBean<>(servlet, "/WebAPI/trexsql/*"); From e0db245007655d37d5f04578769c73f30eace912 Mon Sep 17 00:00:00 2001 From: Peter Hoffmann <954078+p-hoffmann@users.noreply.github.com> Date: Mon, 5 Jan 2026 00:11:15 +0800 Subject: [PATCH 03/11] fix --- pom.xml | 25 ++++++- .../java/org/ohdsi/webapi/test/WebApiIT.java | 70 +++++++++++++++++++ 2 files changed, 93 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 0b0ae23d1e..65e619c960 100644 --- a/pom.xml +++ b/pom.xml @@ -344,6 +344,16 @@ **/*.properties + + application-test.properties + + + + src/test/resources + false + + application-test.properties + src/test/resources @@ -400,8 +410,19 @@ diff --git a/src/test/java/org/ohdsi/webapi/test/WebApiIT.java b/src/test/java/org/ohdsi/webapi/test/WebApiIT.java index e42519d582..ef73ac6280 100644 --- a/src/test/java/org/ohdsi/webapi/test/WebApiIT.java +++ b/src/test/java/org/ohdsi/webapi/test/WebApiIT.java @@ -10,12 +10,17 @@ import java.sql.SQLException; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; +import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; import com.github.springtestdbunit.bean.DatabaseDataSourceConnectionFactoryBean; +import org.flywaydb.core.Flyway; import org.ohdsi.webapi.common.DBMSType; import org.ohdsi.webapi.arachne.datasource.dto.KerberosAuthMechanism; import org.apache.catalina.webresources.TomcatURLStreamHandlerFactory; import org.junit.AfterClass; +import org.junit.Before; import org.junit.BeforeClass; import org.junit.runner.RunWith; import org.ohdsi.circe.helper.ResourceHelper; @@ -57,6 +62,21 @@ public abstract class WebApiIT { @Value("${datasource.ohdsi.schema:public}") private String ohdsiSchema; + @Value("${spring.flyway.locations:classpath:db/migration/postgresql}") + private String flywayLocations; + + @Value("${spring.flyway.table:schema_version}") + private String flywayTable; + + @Value("${spring.flyway.baseline-on-migrate:true}") + private boolean flywayBaselineOnMigrate; + + @Value("${spring.flyway.out-of-order:true}") + private boolean flywayOutOfOrder; + + private static final AtomicBoolean OHDSI_SCHEMA_INITIALIZED = new AtomicBoolean(false); + private static final Object OHDSI_SCHEMA_LOCK = new Object(); + private static final Collection CDM_DDL_FILE_PATHS = Arrays.asList("/cdm-postgresql-ddl.sql"); private static final Collection RESULTS_DDL_FILE_PATHS = Arrays.asList( "/ddl/results/cohort.sql", @@ -111,6 +131,20 @@ public static void before() throws IOException { jdbcTemplate = new JdbcTemplate(ITStarter.getDataSource()); } + @Before + public void ensureOhdsiSchemaInitialized() { + if (OHDSI_SCHEMA_INITIALIZED.get()) { + return; + } + synchronized (OHDSI_SCHEMA_LOCK) { + if (OHDSI_SCHEMA_INITIALIZED.get()) { + return; + } + initializeOhdsiSchemaIfNeeded(); + OHDSI_SCHEMA_INITIALIZED.set(true); + } + } + @AfterClass public static void after() { ITStarter.tearDownSubject(); @@ -208,4 +242,40 @@ private void prepareSchema(final String schemaName, final String schemaToken, fi String ddlSql = SqlTranslate.translateSql(resultSql, DBMSType.POSTGRESQL.getOhdsiDB()); jdbcTemplate.batchUpdate(SqlSplit.splitSql(ddlSql)); } + + private void initializeOhdsiSchemaIfNeeded() { + if (tableExists(ohdsiSchema, "source")) { + return; + } + runFlywayMigrationsWithPrefix("B"); + runFlywayMigrationsWithPrefix("V"); + } + + private void runFlywayMigrationsWithPrefix(String migrationPrefix) { + Map placeholders = Collections.singletonMap("ohdsiSchema", ohdsiSchema); + Flyway.configure() + .dataSource(ITStarter.getDataSource()) + .locations(resolveFlywayLocations()) + .schemas(ohdsiSchema) + .table(flywayTable) + .baselineOnMigrate(flywayBaselineOnMigrate) + .outOfOrder(flywayOutOfOrder) + .placeholders(placeholders) + .sqlMigrationPrefix(migrationPrefix) + .load() + .migrate(); + } + + private String[] resolveFlywayLocations() { + return Arrays.stream(flywayLocations.split(",")) + .map(String::trim) + .filter(location -> !location.isEmpty()) + .toArray(String[]::new); + } + + private boolean tableExists(String schema, String tableName) { + String sql = "SELECT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_schema = ? AND table_name = ?)"; + Boolean exists = jdbcTemplate.queryForObject(sql, Boolean.class, schema, tableName); + return Boolean.TRUE.equals(exists); + } } From f3fd369c1c3fa68290557b52e141b375ec81438a Mon Sep 17 00:00:00 2001 From: Peter Hoffmann <954078+p-hoffmann@users.noreply.github.com> Date: Mon, 5 Jan 2026 00:18:34 +0800 Subject: [PATCH 04/11] fix --- pom.xml | 2 -- 1 file changed, 2 deletions(-) diff --git a/pom.xml b/pom.xml index 65e619c960..9826156bbd 100644 --- a/pom.xml +++ b/pom.xml @@ -454,8 +454,6 @@ --add-opens java.naming/com.sun.jndi.ldap=ALL-UNNAMED - ${git.branch} - ${git.commit.id.abbrev} ${buildinfo.atlas.milestone.id} ${buildinfo.webapi.milestone.id} ${buildinfo.atlas.release.tag} From 986a9213fc2848dc2fda8f41d8268fadacfd9f20 Mon Sep 17 00:00:00 2001 From: Peter Hoffmann <954078+p-hoffmann@users.noreply.github.com> Date: Mon, 5 Jan 2026 00:25:20 +0800 Subject: [PATCH 05/11] fix --- .github/workflows/ci.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 5e5afe4b2e..ff9cabde21 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -8,9 +8,6 @@ on: pull_request: branches: [ master, webapi-3.0 ] -env: - DOCKER_IMAGE: ghcr.io/${{ github.repository }} - jobs: # Build and test the code build: @@ -57,6 +54,9 @@ jobs: # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - uses: actions/checkout@v4 + - name: Set Docker image name + run: echo "DOCKER_IMAGE=ghcr.io/${GITHUB_REPOSITORY,,}" >> $GITHUB_ENV + - name: Cache Docker layers uses: actions/cache@v4 with: From 8ac9b7f9450e83867b7134ed0f9a94a8f997063d Mon Sep 17 00:00:00 2001 From: Peter Hoffmann <954078+p-hoffmann@users.noreply.github.com> Date: Mon, 5 Jan 2026 08:42:21 +0800 Subject: [PATCH 06/11] fix --- .github/workflows/ci.yaml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index ff9cabde21..c6b6b1dbfe 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -55,7 +55,13 @@ jobs: - uses: actions/checkout@v4 - name: Set Docker image name - run: echo "DOCKER_IMAGE=ghcr.io/${GITHUB_REPOSITORY,,}" >> $GITHUB_ENV + run: | + REPO="${GITHUB_REPOSITORY:-ohdsi/webapi}" + DOCKER_IMAGE="ghcr.io/$(echo "${REPO}" | tr '[:upper:]' '[:lower:]')" + echo "DOCKER_IMAGE=${DOCKER_IMAGE}" >> $GITHUB_ENV + + - name: Debug Docker image name + run: echo "DOCKER_IMAGE=${DOCKER_IMAGE}" - name: Cache Docker layers uses: actions/cache@v4 From d49e3b455d75f70df709aba9bd080e837dc8023d Mon Sep 17 00:00:00 2001 From: Peter Hoffmann <954078+p-hoffmann@users.noreply.github.com> Date: Mon, 5 Jan 2026 08:55:33 +0800 Subject: [PATCH 07/11] fix --- .github/workflows/ci.yaml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index c6b6b1dbfe..1718e83bcc 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -80,6 +80,15 @@ jobs: tags: | type=raw,value=dev,enable=${{ github.ref == 'refs/heads/master' }} type=raw,value=3.0-dev,enable=${{ github.ref == 'refs/heads/webapi-3.0' }} + type=sha,prefix={{branch}}- + + - name: Debug Docker metadata + run: | + echo "Docker metadata outputs:" + echo "version: ${{ steps.docker_meta.outputs.version }}" + echo "tags: ${{ steps.docker_meta.outputs.tags }}" + echo "labels: ${{ steps.docker_meta.outputs.labels }}" + echo "json: ${{ steps.docker_meta.outputs.json }}" # Setup docker build environment - name: Set up QEMU From 6d99b71e87166bfa0def0abf169516e39f1095a4 Mon Sep 17 00:00:00 2001 From: Peter Hoffmann <954078+p-hoffmann@users.noreply.github.com> Date: Mon, 5 Jan 2026 09:01:29 +0800 Subject: [PATCH 08/11] fix --- .github/workflows/ci.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 1718e83bcc..1e8d79d5e9 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -80,7 +80,8 @@ jobs: tags: | type=raw,value=dev,enable=${{ github.ref == 'refs/heads/master' }} type=raw,value=3.0-dev,enable=${{ github.ref == 'refs/heads/webapi-3.0' }} - type=sha,prefix={{branch}}- + type=sha,prefix=pr-,enable=${{ github.event_name == 'pull_request' }} + type=ref,event=branch,prefix=branch-,enable=${{ github.ref != 'refs/heads/master' && github.ref != 'refs/heads/webapi-3.0' }} - name: Debug Docker metadata run: | From da4af4e2d3edc26fd169031ba2fbe0f99ebb7831 Mon Sep 17 00:00:00 2001 From: Peter Hoffmann <954078+p-hoffmann@users.noreply.github.com> Date: Mon, 5 Jan 2026 21:15:50 +0800 Subject: [PATCH 09/11] update trexsql --- Dockerfile | 11 +++++++++-- pom.xml | 4 ++-- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/Dockerfile b/Dockerfile index c73bd311c2..c27b0c9a75 100644 --- a/Dockerfile +++ b/Dockerfile @@ -44,9 +44,16 @@ WORKDIR /var/lib/ohdsi/webapi COPY --from=builder /code/opentelemetry-javaagent.jar . COPY --from=builder /code/target/WebAPI.jar . +# Extract TrexSQL native library from the nested JAR for proper loading +RUN mkdir -p /tmp/trexsql && \ + unzip -j WebAPI.jar 'BOOT-INF/lib/trexsql-ext-*.jar' -d /tmp && \ + unzip -j /tmp/trexsql-ext-*.jar 'libtrexsql_java.so_linux_amd64' -d /tmp/trexsql 2>/dev/null || true && \ + mv /tmp/trexsql/libtrexsql_java.so_linux_amd64 /tmp/trexsql/libtrexsql_java.so 2>/dev/null || true && \ + rm -f /tmp/trexsql-ext-*.jar + EXPOSE 8080 USER 101 -# Run the executable JAR -CMD ["sh", "-c", "exec java ${DEFAULT_JAVA_OPTS} ${JAVA_OPTS} --add-opens java.naming/com.sun.jndi.ldap=ALL-UNNAMED -jar WebAPI.jar"] +# Run the executable JAR with TrexSQL native library path +CMD ["sh", "-c", "exec java ${DEFAULT_JAVA_OPTS} ${JAVA_OPTS} -Dorg.duckdb.lib_path=/tmp/trexsql/libtrexsql_java.so --add-opens java.naming/com.sun.jndi.ldap=ALL-UNNAMED -jar WebAPI.jar"] diff --git a/pom.xml b/pom.xml index 9826156bbd..ae9698a2b5 100644 --- a/pom.xml +++ b/pom.xml @@ -1249,7 +1249,7 @@ com.github.p-hoffmann trexsql-ext - v0.1.6 + v0.1.11 @@ -1926,7 +1926,7 @@ war - war + jar From 0a9edb5b65f46a828a20802112c5b11c6f5e7430 Mon Sep 17 00:00:00 2001 From: Peter Hoffmann <954078+p-hoffmann@users.noreply.github.com> Date: Tue, 6 Jan 2026 11:52:33 +0800 Subject: [PATCH 10/11] readd migrations for broadsea --- Dockerfile | 5 +- pom.xml | 24 +- ...0180807192421__cohortDetailsHashcodes.java | 29 ++ ...20190410103000__migratePathwayResults.java | 181 ++++++++++++ ...90520171430__cohortExpressionHashCode.java | 34 +++ ..._0_20191106092815__migrateEventFAType.java | 70 +++++ .../trexsql/TrexSQLInstanceManager.java | 17 +- .../webapi/trexsql/TrexSQLSearchProvider.java | 7 +- .../ohdsi/webapi/trexsql/TrexSQLService.java | 2 + src/main/resources/application.properties | 7 +- .../ensureTables.sql | 32 ++ .../getPathwayGeneratedCodes.sql | 1 + .../getPathwayGeneratedDesigns.sql | 5 + .../migratePathwayResults.sql | 25 ++ .../saveCodes.sql | 2 + .../updateFaType.sql | 3 + .../updateFaTypeImpala.sql | 8 + ...11012325133__assets_tags_add_endpoints.sql | 26 ++ .../V2.11.0.20211101134413__reusables.sql | 129 ++++++++ .../V2.11.0.20211109155216__cdm_cache.sql | 16 + ...0218125000__add_permission_for_checkv2.sql | 9 + ...00__add_tagging_mass_assign_permission.sql | 12 + ...20719110154__add_description_to_assets.sql | 3 + ...02208110000__alter_source_cache_enable.sql | 3 + ....202208240001__concept_recommend_perms.sql | 24 ++ .../V2.12.0.20221102113317__user_originb.sql | 9 + ...alter_source_set_cache_enabled_default.sql | 1 + ...4175000__add_tag_management_permission.sql | 9 + ..._concept_set_csv_comparison_permission.sql | 7 + ...057__add_export_conceptset_permissions.sql | 20 ++ ...__delete-unnecessary-admin-permissions.sql | 1 + ...V2.2.0.20180215143000__remove_password.sql | 2 + ....5.20180212152023__concept-sets-author.sql | 5 + .../V2.2.5.20180212181325__cca-author.sql | 3 + ...2.5.20180215105415__separate-sequences.sql | 19 ++ ....2.6.20180215152023__source_key_unique.sql | 1 + ....20180302143300__negative_control_redo.sql | 37 +++ ....3.0.20180302143400__alter_cs_gen_info.sql | 9 + ...20180330124512__add_source_permissions.sql | 40 +++ ...20180405164306__add_profile_permission.sql | 14 + ....0.20180412000000__increment_sequences.sql | 16 + .../V2.3.0.20180412000001__constraints.sql | 8 + ...85900__cohort_analysis_generation_info.sql | 16 + ...V2.3.0.20180427114800__sec_user_unique.sql | 1 + ...4.0.20180508090000__source-credentials.sql | 2 + .../V2.4.0.20180516223100__roles-unique.sql | 1 + ...20180619113700__permission_for_my_user.sql | 14 + ...702202700__permission_for_ir_execution.sql | 9 + ...0180703144901__permission_for_evidence.sql | 9 + ...2403__schema-add-analysis-gen-progress.sql | 1 + .../V2.5.0.20180713123456__cem_v_1.sql | 12 + ...20117120__source-connection-check-rule.sql | 5 + ...80725172844__add-ad-import-permissions.sql | 64 ++++ ...0192730__schema-add-kerberos-to-source.sql | 4 + ...17154116__add-extra-import-permissions.sql | 8 + ...0180731092421__cohort-characterization.sql | 276 ++++++++++++++++++ ...hort-characterization-generations-view.sql | 27 ++ ...2.6.0.20180906220021__pathway_analysis.sql | 98 +++++++ ...5__migrate_common_entities_to_user_rel.sql | 77 +++++ .../V2.6.0.20180921202400__fe-analysis-id.sql | 1 + ....20181001200021__estimation_prediction.sql | 50 ++++ ...0181002110845__fe_analysis_conceptsets.sql | 1 + ...005122300__schema-create-fe-conceptset.sql | 16 + ...0181008210200__source-deleted-at-field.sql | 1 + ....20181009110500__fix-fe-analysis-types.sql | 45 +++ ...0181009115500__fix-ple-plp-permissions.sql | 47 +++ ...1010133216__schema-add-job-is-canceled.sql | 7 + ...10185036__schema-user-import-scheduler.sql | 44 +++ ...__schema-user-import-scheduler-history.sql | 23 ++ ...6.0.20181015182101__role-group-mapping.sql | 10 + ...00__add_last_viewed_notifications_time.sql | 18 ++ ...150353__separate_system_personal_roles.sql | 8 + ...2.6.0.20181107165252__fe_criteria_type.sql | 7 + .../V2.6.0.20181128150100__add_missing_pk.sql | 45 +++ .../V2.7.0.20181119162154__cc_strata.sql | 29 ++ ....7.0.20190116183005__default_stat_type.sql | 6 + ...25113000__fe-analysis-created-modified.sql | 5 + ...00__fe-analysis-criteria_stat-type-fix.sql | 19 ++ ...0190128134827__create_absent_sequences.sql | 13 + ...3000__fe-analysis-created-modified-fix.sql | 10 + ...0.20190201090000__bjep_idx_and_cleanup.sql | 1 + ..._cc-and-pathway-cancel-job-permissions.sql | 31 ++ ...190204183006__ir-cancel-job-permission.sql | 10 + ...205174343__cc-pathway-copy-permissions.sql | 23 ++ ...8164736__analysis_execution-add-job_id.sql | 11 + ...tion-estimation-generation-permissions.sql | 23 ++ ....7.0.20190211182000__permissions-fixes.sql | 73 +++++ ...190212154939__analysis_execution_files.sql | 13 + .../V2.7.0.201902130900__source-sequences.sql | 11 + ...0190213161124__add-fk-to-source-daimon.sql | 2 + ....0.20190214110000__permissions-fixes-2.sql | 14 + ....0.20190214145000__permissions-fixes-3.sql | 41 +++ ....0.20190215113000__permissions-fixes-4.sql | 38 +++ ...20113500__permissions-fixes-ir-profile.sql | 48 +++ ..._permissions-fixes-source-codes-import.sql | 27 ++ ...22154724__permission-fixes-conceptsets.sql | 9 + .../V2.7.0.20190225165203__plp_gen_view.sql | 22 ++ ....0.20190225165752__estimation_gen_view.sql | 22 ++ ...ssions-fixes-cohort-export-conceptsets.sql | 9 + ...28160000__permissions-fixes-cc-explore.sql | 9 + ....7.0.20190301130000__cc-unique-stratas.sql | 1 + .../V2.7.0.20190304084500__plp-ple-import.sql | 11 + ...0304131519__standardize-permissions-cc.sql | 37 +++ ...0255__standardize-permissions-pathways.sql | 37 +++ ...0304162609__standardize-permissions-ir.sql | 54 ++++ ...13000__standardize-permissions-cohorts.sql | 10 + ...0__standardize-permissions-conceptsets.sql | 3 + .../V2.7.0.20190304220500__role-moderator.sql | 36 +++ ...05123620__ir-executioninfo-permissions.sql | 27 ++ ...306094500__sources-endpoint-permission.sql | 5 + ....0.20190306154500__rename_heracles_seq.sql | 1 + ...190311152238__permissions-fixes-ir-sql.sql | 9 + ...11182048__fix_vocab_search_permissions.sql | 33 +++ ...312164953__fix_permission_id_seq_value.sql | 2 + ...0190313161353__fix_permission_heracles.sql | 19 ++ ...3__alter-ir-execution-status-as-string.sql | 12 + ...203__added-ir-report-perms-for-sources.sql | 21 ++ ...7.1.20190405124500__split_output_files.sql | 16 + ....7.2.20190429174506__run-as_permission.sql | 7 + ....2.20190528153600__fix-ir-report-perms.sql | 27 ++ ...20190905163100__cache-clear-permission.sql | 7 + ...00728164800__add_conceptset_permission.sql | 11 + ...14500__delete_design_column_from_views.sql | 91 ++++++ .../V2.8.0.20190326152000__fix-role-perms.sql | 3 + ...0326180601__add-cc-download-permission.sql | 9 + ...190405140828__cc_generation_export_all.sql | 17 ++ ...14180601__add-entity-exists-permission.sql | 39 +++ ...100__pathway-analysis-minSegmentLength.sql | 1 + ...ntity-exists-permission-cohort-concept.sql | 12 + ...add-unique-name-constraint-to-entities.sql | 82 ++++++ ...527190601__add_cs_name_copy_permission.sql | 10 + ..._alter_job-execution-params_string-val.sql | 119 ++++++++ ...604111801__ir_import_export_permission.sql | 12 + ...2.8.0.20190728224300__ds-common-entity.sql | 5 + ...V2.8.0.20190809215200__daimon-priority.sql | 9 + ...2.8.0.20190816173000__generation-cache.sql | 18 ++ ...191203200000__generation-cache-updates.sql | 5 + ...9183702__migrate_feature_extraction_id.sql | 2 + ...0.20200109100200__cohort_sample_tables.sql | 23 ++ ...8.0.20200109132902__fe_domain_null_fix.sql | 14 + ...3000__insert_cohort_sample_permissions.sql | 19 ++ ...127101702__restore_rest_endpoint_perms.sql | 17 ++ ....20200130124345__fe_analysis_aggregate.sql | 96 ++++++ ....20200325145111__check_required_params.sql | 24 ++ ...33802__add-ir-permission-to-atlas-user.sql | 4 + ...V2.8.0.20200413150815__gis_service_api.sql | 18 ++ ....8.0.20200427161830__modify_user_login.sql | 5 + ...d_created_by_to_cohort_generation_info.sql | 2 + ...0.20200616112935__fe_check_permissions.sql | 9 + ...20200724121114__daimon-priority-public.sql | 6 + ...move-ir-put-permission-from-atlas-user.sql | 4 + ...03120903__drop-cohort-features-columns.sql | 37 +++ ....202010130001__print_friendly_security.sql | 13 + ...0031__concept_ancestor_and_descendants.sql | 24 ++ ...0__add_source_daimon_unique_constraint.sql | 9 + ...is_service_add_check_source_permission.sql | 8 + ...0210219100459__evidence_get_permission.sql | 14 + ...0210226100460__evidence_get_permission.sql | 11 + .../V2.9.0.20210423125133__assets_tags.sql | 189 ++++++++++++ .../V2.9.0.20210513111520__versioning.sql | 176 +++++++++++ .../V2.9.0.20210727101117__achilles_cache.sql | 13 + ...0.20210812164224__assets_tags_renaming.sql | 9 + 162 files changed, 3887 insertions(+), 18 deletions(-) create mode 100644 src/main/java/org/ohdsi/webapi/db/migartion/V2_6_0_20180807192421__cohortDetailsHashcodes.java create mode 100644 src/main/java/org/ohdsi/webapi/db/migartion/V2_8_0_20190410103000__migratePathwayResults.java create mode 100644 src/main/java/org/ohdsi/webapi/db/migartion/V2_8_0_20190520171430__cohortExpressionHashCode.java create mode 100644 src/main/java/org/ohdsi/webapi/db/migartion/V2_8_0_20191106092815__migrateEventFAType.java create mode 100644 src/main/resources/db/migration/java/V2_8_0_20190410103000__migratePathwayResults/ensureTables.sql create mode 100644 src/main/resources/db/migration/java/V2_8_0_20190410103000__migratePathwayResults/getPathwayGeneratedCodes.sql create mode 100644 src/main/resources/db/migration/java/V2_8_0_20190410103000__migratePathwayResults/getPathwayGeneratedDesigns.sql create mode 100644 src/main/resources/db/migration/java/V2_8_0_20190410103000__migratePathwayResults/migratePathwayResults.sql create mode 100644 src/main/resources/db/migration/java/V2_8_0_20190410103000__migratePathwayResults/saveCodes.sql create mode 100644 src/main/resources/db/migration/java/V2_8_0_20191106092815__migrateEventFAType/updateFaType.sql create mode 100644 src/main/resources/db/migration/java/V2_8_0_20191106092815__migrateEventFAType/updateFaTypeImpala.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.11.0.20211012325133__assets_tags_add_endpoints.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.11.0.20211101134413__reusables.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.11.0.20211109155216__cdm_cache.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.11.0.20220218125000__add_permission_for_checkv2.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.12.0.20220710161100__add_tagging_mass_assign_permission.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.12.0.20220719110154__add_description_to_assets.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.12.0.202208110000__alter_source_cache_enable.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.12.0.202208240001__concept_recommend_perms.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.12.0.20221102113317__user_originb.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.12.1.202210120000__alter_source_set_cache_enabled_default.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.13.0.20221024175000__add_tag_management_permission.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.13.0.20221027170000__add_concept_set_csv_comparison_permission.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.13.1.20230524160057__add_export_conceptset_permissions.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.2.0.20180202143000__delete-unnecessary-admin-permissions.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.2.0.20180215143000__remove_password.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.2.5.20180212152023__concept-sets-author.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.2.5.20180212181325__cca-author.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.2.5.20180215105415__separate-sequences.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.2.6.20180215152023__source_key_unique.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.3.0.20180302143300__negative_control_redo.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.3.0.20180302143400__alter_cs_gen_info.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.3.0.20180330124512__add_source_permissions.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.3.0.20180405164306__add_profile_permission.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.3.0.20180412000000__increment_sequences.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.3.0.20180412000001__constraints.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.3.0.20180425185900__cohort_analysis_generation_info.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.3.0.20180427114800__sec_user_unique.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.4.0.20180508090000__source-credentials.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.4.0.20180516223100__roles-unique.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.4.3.20180619113700__permission_for_my_user.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.4.3.20180702202700__permission_for_ir_execution.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.4.3.20180703144901__permission_for_evidence.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.5.0.20180608182403__schema-add-analysis-gen-progress.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.5.0.20180713123456__cem_v_1.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.5.0.20180720117120__source-connection-check-rule.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.5.0.20180725172844__add-ad-import-permissions.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.5.0.20180730192730__schema-add-kerberos-to-source.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.5.0.20180817154116__add-extra-import-permissions.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.6.0.20180731092421__cohort-characterization.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.6.0.20180731092422__cohort-characterization-generations-view.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.6.0.20180906220021__pathway_analysis.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.6.0.20180910113305__migrate_common_entities_to_user_rel.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.6.0.20180921202400__fe-analysis-id.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.6.0.20181001200021__estimation_prediction.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.6.0.20181002110845__fe_analysis_conceptsets.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.6.0.20181005122300__schema-create-fe-conceptset.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.6.0.20181008210200__source-deleted-at-field.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.6.0.20181009110500__fix-fe-analysis-types.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.6.0.20181009115500__fix-ple-plp-permissions.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.6.0.20181010133216__schema-add-job-is-canceled.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.6.0.20181010185036__schema-user-import-scheduler.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.6.0.20181010185037__schema-user-import-scheduler-history.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.6.0.20181015182101__role-group-mapping.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.6.0.20181016171200__add_last_viewed_notifications_time.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.6.0.20181024150353__separate_system_personal_roles.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.6.0.20181107165252__fe_criteria_type.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.6.0.20181128150100__add_missing_pk.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20181119162154__cc_strata.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190116183005__default_stat_type.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190125113000__fe-analysis-created-modified.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190125141500__fe-analysis-criteria_stat-type-fix.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190128134827__create_absent_sequences.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190129083000__fe-analysis-created-modified-fix.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190201090000__bjep_idx_and_cleanup.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190204153542__cc-and-pathway-cancel-job-permissions.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190204183006__ir-cancel-job-permission.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190205174343__cc-pathway-copy-permissions.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190208164736__analysis_execution-add-job_id.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190211181105__prediction-estimation-generation-permissions.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190211182000__permissions-fixes.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190212154939__analysis_execution_files.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.201902130900__source-sequences.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190213161124__add-fk-to-source-daimon.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190214110000__permissions-fixes-2.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190214145000__permissions-fixes-3.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190215113000__permissions-fixes-4.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190220113500__permissions-fixes-ir-profile.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190222113000__permissions-fixes-source-codes-import.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190222154724__permission-fixes-conceptsets.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190225165203__plp_gen_view.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190225165752__estimation_gen_view.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190226160020__permissions-fixes-cohort-export-conceptsets.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190228160000__permissions-fixes-cc-explore.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190301130000__cc-unique-stratas.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190304084500__plp-ple-import.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190304131519__standardize-permissions-cc.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190304160255__standardize-permissions-pathways.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190304162609__standardize-permissions-ir.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190304213000__standardize-permissions-cohorts.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190304220000__standardize-permissions-conceptsets.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190304220500__role-moderator.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190305123620__ir-executioninfo-permissions.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190306094500__sources-endpoint-permission.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190306154500__rename_heracles_seq.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190311152238__permissions-fixes-ir-sql.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190311182048__fix_vocab_search_permissions.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190312164953__fix_permission_id_seq_value.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190313161353__fix_permission_heracles.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190314171003__alter-ir-execution-status-as-string.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.0.20190328172203__added-ir-report-perms-for-sources.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.1.20190405124500__split_output_files.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.2.20190429174506__run-as_permission.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.2.20190528153600__fix-ir-report-perms.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.4.20190905163100__cache-clear-permission.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.7.8.20200728164800__add_conceptset_permission.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.0.20190319114500__delete_design_column_from_views.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.0.20190326152000__fix-role-perms.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.0.20190326180601__add-cc-download-permission.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.0.20190405140828__cc_generation_export_all.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.0.20190414180601__add-entity-exists-permission.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.0.20190417120100__pathway-analysis-minSegmentLength.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.0.20190424140601__add-entity-exists-permission-cohort-concept.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.0.20190424150601__add-unique-name-constraint-to-entities.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.0.20190527190601__add_cs_name_copy_permission.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.0.20190531181956__alter_job-execution-params_string-val.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.0.20190604111801__ir_import_export_permission.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.0.20190728224300__ds-common-entity.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.0.20190809215200__daimon-priority.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.0.20190816173000__generation-cache.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.0.20191203200000__generation-cache-updates.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.0.20191219183702__migrate_feature_extraction_id.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.0.20200109100200__cohort_sample_tables.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.0.20200109132902__fe_domain_null_fix.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.0.20200122173000__insert_cohort_sample_permissions.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.0.20200127101702__restore_rest_endpoint_perms.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.0.20200130124345__fe_analysis_aggregate.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.0.20200325145111__check_required_params.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.0.20200409133802__add-ir-permission-to-atlas-user.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.0.20200413150815__gis_service_api.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.0.20200427161830__modify_user_login.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.0.20200428122109__add_created_by_to_cohort_generation_info.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.0.20200616112935__fe_check_permissions.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.0.20200724121114__daimon-priority-public.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.0.20200811155100__remove-ir-put-permission-from-atlas-user.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.0.20200903120903__drop-cohort-features-columns.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.0.202010130001__print_friendly_security.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.0.20201022120031__concept_ancestor_and_descendants.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.0.20201103171300__add_source_daimon_unique_constraint.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.1.20210203163300__gis_service_add_check_source_permission.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.1.20210219100459__evidence_get_permission.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.8.1.20210226100460__evidence_get_permission.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.9.0.20210423125133__assets_tags.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.9.0.20210513111520__versioning.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.9.0.20210727101117__achilles_cache.sql create mode 100644 src/main/resources/db/migration/postgresql/V2.9.0.20210812164224__assets_tags_renaming.sql diff --git a/Dockerfile b/Dockerfile index c27b0c9a75..e3d98cc282 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,7 +2,7 @@ FROM maven:3.9-eclipse-temurin-21 AS builder WORKDIR /code -ARG MAVEN_PROFILE=webapi-docker +ARG MAVEN_PROFILE=webapi-docker,tcache ARG MAVEN_PARAMS="" # can use maven options, e.g. -DskipTests=true -DskipUnitTests=true ARG OPENTELEMETRY_JAVA_AGENT_VERSION=1.17.0 @@ -41,10 +41,11 @@ ENV DEFAULT_JAVA_OPTS="-Djava.security.egd=file:///dev/./urandom" # set working directory to a fixed WebAPI directory WORKDIR /var/lib/ohdsi/webapi +RUN apt-get update && apt-get install -y unzip && rm -rf /var/lib/apt/lists/* + COPY --from=builder /code/opentelemetry-javaagent.jar . COPY --from=builder /code/target/WebAPI.jar . -# Extract TrexSQL native library from the nested JAR for proper loading RUN mkdir -p /tmp/trexsql && \ unzip -j WebAPI.jar 'BOOT-INF/lib/trexsql-ext-*.jar' -d /tmp && \ unzip -j /tmp/trexsql-ext-*.jar 'libtrexsql_java.so_linux_amd64' -d /tmp/trexsql 2>/dev/null || true && \ diff --git a/pom.xml b/pom.xml index ae9698a2b5..2a955e8bc1 100644 --- a/pom.xml +++ b/pom.xml @@ -497,6 +497,10 @@ -parameters + + + **/trexsql/** + @@ -1246,11 +1250,6 @@ com.fasterxml.jackson.datatype jackson-datatype-jsr310 - - com.github.p-hoffmann - trexsql-ext - v0.1.11 - @@ -1259,8 +1258,23 @@ true + + + com.github.p-hoffmann + trexsql-ext + v0.1.15 + + + + org.apache.maven.plugins + maven-compiler-plugin + + + + + org.springframework.boot spring-boot-maven-plugin diff --git a/src/main/java/org/ohdsi/webapi/db/migartion/V2_6_0_20180807192421__cohortDetailsHashcodes.java b/src/main/java/org/ohdsi/webapi/db/migartion/V2_6_0_20180807192421__cohortDetailsHashcodes.java new file mode 100644 index 0000000000..422daa9f14 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/db/migartion/V2_6_0_20180807192421__cohortDetailsHashcodes.java @@ -0,0 +1,29 @@ +package org.ohdsi.webapi.db.migartion; + +import com.fasterxml.jackson.core.JsonProcessingException; +import org.ohdsi.webapi.arachne.commons.config.flyway.ApplicationContextAwareSpringMigration; +import java.util.List; +import org.ohdsi.webapi.cohortdefinition.CohortDefinitionDetails; +import org.ohdsi.webapi.cohortdefinition.CohortDefinitionDetailsRepository; + +/** + * Flyway Java migration to update hash codes for cohort definition details. + * + * Note: NOT a @Component - Flyway discovers this via classpath scanning. + * Dependencies are retrieved from ApplicationContext to avoid circular dependency issues. + */ +public class V2_6_0_20180807192421__cohortDetailsHashcodes extends ApplicationContextAwareSpringMigration { + + @Override + public void migrate() throws JsonProcessingException { + // Get repository from Spring ApplicationContext (set by Flyway before migration runs) + CohortDefinitionDetailsRepository detailsRepository = + applicationContext.getBean(CohortDefinitionDetailsRepository.class); + + final List allDetails = detailsRepository.findAll(); + for (CohortDefinitionDetails details: allDetails) { + details.updateHashCode(); + detailsRepository.save(details); + } + } +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/db/migartion/V2_8_0_20190410103000__migratePathwayResults.java b/src/main/java/org/ohdsi/webapi/db/migartion/V2_8_0_20190410103000__migratePathwayResults.java new file mode 100644 index 0000000000..bc29310f52 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/db/migartion/V2_8_0_20190410103000__migratePathwayResults.java @@ -0,0 +1,181 @@ +package org.ohdsi.webapi.db.migartion; + +import com.fasterxml.jackson.core.JsonProcessingException; +import org.ohdsi.webapi.arachne.commons.config.flyway.ApplicationContextAwareSpringMigration; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import org.json.JSONObject; +import org.ohdsi.circe.helper.ResourceHelper; +import org.ohdsi.sql.SqlRender; +import org.ohdsi.sql.SqlSplit; +import org.ohdsi.sql.SqlTranslate; +import org.ohdsi.webapi.service.AbstractDaoService; +import org.ohdsi.webapi.source.Source; +import org.ohdsi.webapi.source.SourceDaimon; +import org.ohdsi.webapi.source.SourceRepository; +import org.ohdsi.webapi.util.CancelableJdbcTemplate; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.core.env.Environment; +import org.springframework.jdbc.core.PreparedStatementCreator; +import org.springframework.stereotype.Component; +import org.springframework.stereotype.Service; + +import static org.ohdsi.webapi.Constants.Params.GENERATION_ID; + +/** + * Flyway Java migration to migrate pathway analysis results. + * + * Note: NOT a @Component - Flyway discovers this via classpath scanning. + * Dependencies are retrieved from ApplicationContext to avoid circular dependency issues. + */ +public class V2_8_0_20190410103000__migratePathwayResults extends ApplicationContextAwareSpringMigration { + + private final static String SQL_PATH = "/db/migration/java/V2_8_0_20190410103000__migratePathwayResults/"; + private static final Logger log = LoggerFactory.getLogger(V2_8_0_20190410103000__migratePathwayResults.class); + + @Service + public static class MigrationDAO extends AbstractDaoService { + + public void savePathwayCodes(List pathwayCodes, Source source, CancelableJdbcTemplate jdbcTemplate) { + String resultsSchema = source.getTableQualifier(SourceDaimon.DaimonType.Results); + String[] params; + String[] values; + + List creators = new ArrayList<>(); + + // clear existing results to prevent double-inserts + params = new String[]{"results_schema"}; + values = new String[]{resultsSchema}; + + // delete only codes that belongs current Atlas instance + List executionIdAndCodes = pathwayCodes.stream().map(v -> new Object[]{ v[0], v[1] }).collect(Collectors.toList()); + String deleteSql = SqlRender.renderSql("DELETE FROM @results_schema.pathway_analysis_codes WHERE pathway_analysis_generation_id = ? AND code = ?", params, values); + String translatedSql = SqlTranslate.translateSingleStatementSql(deleteSql, source.getSourceDialect()); + jdbcTemplate.batchUpdate(translatedSql, executionIdAndCodes); + + String saveCodesSql = SqlRender.renderSql(ResourceHelper.GetResourceAsString(SQL_PATH + "saveCodes.sql"), params, values); + saveCodesSql = SqlTranslate.translateSingleStatementSql(saveCodesSql, source.getSourceDialect()); + jdbcTemplate.batchUpdate(saveCodesSql, pathwayCodes); + } + } + + private static class EventCohort { + + public int cohortId; + public String name; + } + + @Override + public void migrate() throws JsonProcessingException { + // Get beans from Spring ApplicationContext (set by Flyway before migration runs) + SourceRepository sourceRepository = applicationContext.getBean(SourceRepository.class); + MigrationDAO migrationDAO = applicationContext.getBean(MigrationDAO.class); + Environment env = applicationContext.getBean(Environment.class); + + String webAPISchema = env.getProperty("spring.jpa.properties.hibernate.default_schema"); + + sourceRepository.findAll().forEach(source -> { + try { + + String[] params; + String[] values; + String translatedSql; + String resultsSchema = source.getTableQualifierOrNull(SourceDaimon.DaimonType.Results); + + if (resultsSchema == null) { + return; // no results in this source + } + + CancelableJdbcTemplate jdbcTemplate = migrationDAO.getSourceJdbcTemplate(source); + + // step 1: ensure tables are created and have correct columns + params = new String[]{"results_schema"}; + values = new String[]{source.getTableQualifier(SourceDaimon.DaimonType.Results)}; + String ensureTablesSql = SqlRender.renderSql(ResourceHelper.GetResourceAsString(SQL_PATH + "ensureTables.sql"), params, values); + translatedSql = SqlTranslate.translateSql(ensureTablesSql, source.getSourceDialect()); + Arrays.asList(SqlSplit.splitSql(translatedSql)).forEach(jdbcTemplate::execute); + + // step 2: populate pathway_analysis_paths + params = new String[]{"results_schema"}; + values = new String[]{source.getTableQualifier(SourceDaimon.DaimonType.Results)}; + String savePathwaysSql = SqlRender.renderSql(ResourceHelper.GetResourceAsString(SQL_PATH + "migratePathwayResults.sql"), params, values); + + translatedSql = SqlTranslate.translateSql(savePathwaysSql, source.getSourceDialect()); + Arrays.asList(SqlSplit.splitSql(translatedSql)).forEach(jdbcTemplate::execute); + + // step 3: populate pathway_analysis_codes from each generated design for the given source + // load the generated designs + params = new String[]{"webapi_schema", "source_id"}; + values = new String[]{webAPISchema, Integer.toString(source.getSourceId())}; + String generatedDesignSql = SqlRender.renderSql(ResourceHelper.GetResourceAsString(SQL_PATH + "getPathwayGeneratedDesigns.sql"), params, values); + translatedSql = SqlTranslate.translateSingleStatementSql(generatedDesignSql, migrationDAO.getDialect()); + + Map> designEventCohorts = migrationDAO.getJdbcTemplate().query(translatedSql, rs -> { + Map> result = new HashMap<>(); + while (rs.next()) { + String design = rs.getString("design"); + JSONObject jsonObject = new JSONObject(design); + // parse design and fetch list of event cohorts + List eventCohorts = jsonObject.getJSONArray("eventCohorts").toList() + .stream().map(obj -> { + Map ecJson = (Map) obj; + EventCohort c = new EventCohort(); + c.name = String.valueOf(ecJson.get("name")); + return c; + }) + .sorted(Comparator.comparing(d -> d.name)) + .collect(Collectors.toList()); + + int index = 0; + for (EventCohort ec : eventCohorts) { + ec.cohortId = (int) Math.pow(2, index++); // assign each cohort an ID based on their name-sort order, as a power of 2 + } + result.put(rs.getLong(GENERATION_ID), eventCohorts); + } + return result; + }); + + //fetch the distinct generation_id, combo_id from the source + params = new String[]{"results_schema"}; + values = new String[]{resultsSchema}; + String distinctGenerationComboIdsSql = SqlRender.renderSql(ResourceHelper.GetResourceAsString(SQL_PATH + "getPathwayGeneratedCodes.sql"), params, values); + translatedSql = SqlTranslate.translateSingleStatementSql(distinctGenerationComboIdsSql, source.getSourceDialect()); + + // retrieve list of generation-comboId-Name-isCombo values + List generatedComboNames = jdbcTemplate.query(translatedSql, (rs) -> { + // values of String[] are: "generation_id", "code", "name", "is_combo" + List result = new ArrayList<>(); + + while (rs.next()) { + Long generationId = rs.getLong("pathway_analysis_generation_id"); + Long comboId = rs.getLong("combo_id"); + + if (!designEventCohorts.containsKey(generationId)) { + continue; // skip this record, since we do not have a design for it + } + List eventCohorts = designEventCohorts.get(generationId); + List comboCohorts = eventCohorts.stream().filter(ec -> (ec.cohortId & comboId) > 0).collect(Collectors.toList()); + String names = comboCohorts.stream() + .map(c -> c.name) + .collect(Collectors.joining(",")); + result.add(new Object[]{generationId, comboId, names, comboCohorts.size() > 1 ? 1 : 0}); + } + return result; + }); + + migrationDAO.savePathwayCodes(generatedComboNames, source, jdbcTemplate); + + } + catch(Exception e) { + log.error(String.format("Failed to migration pathways for source: %s (%s)", source.getSourceName(), source.getSourceKey())); + } + }); + } +} diff --git a/src/main/java/org/ohdsi/webapi/db/migartion/V2_8_0_20190520171430__cohortExpressionHashCode.java b/src/main/java/org/ohdsi/webapi/db/migartion/V2_8_0_20190520171430__cohortExpressionHashCode.java new file mode 100644 index 0000000000..25e269f371 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/db/migartion/V2_8_0_20190520171430__cohortExpressionHashCode.java @@ -0,0 +1,34 @@ +package org.ohdsi.webapi.db.migartion; + +import org.ohdsi.webapi.arachne.commons.config.flyway.ApplicationContextAwareSpringMigration; +import org.ohdsi.analysis.Utils; +import org.ohdsi.circe.cohortdefinition.CohortExpression; +import org.ohdsi.webapi.cohortdefinition.CohortDefinitionDetails; +import org.ohdsi.webapi.cohortdefinition.CohortDefinitionDetailsRepository; + +import java.util.List; + +/** + * Flyway Java migration to update cohort expression hash codes. + * + * Note: NOT a @Component - Flyway discovers this via classpath scanning. + * Dependencies are retrieved from ApplicationContext to avoid circular dependency issues. + */ +public class V2_8_0_20190520171430__cohortExpressionHashCode extends ApplicationContextAwareSpringMigration { + + @Override + public void migrate() throws Exception { + // Get repository from Spring ApplicationContext (set by Flyway before migration runs) + CohortDefinitionDetailsRepository detailsRepository = + applicationContext.getBean(CohortDefinitionDetailsRepository.class); + + List allDetails = detailsRepository.findAll(); + for (CohortDefinitionDetails details: allDetails) { + //after deserialization the field "cdmVersionRange" is added and default value for it is set + CohortExpression expression = Utils.deserialize(details.getExpression(), CohortExpression.class); + details.setExpression(Utils.serialize(expression)); + details.updateHashCode(); + detailsRepository.save(details); + } + } +} diff --git a/src/main/java/org/ohdsi/webapi/db/migartion/V2_8_0_20191106092815__migrateEventFAType.java b/src/main/java/org/ohdsi/webapi/db/migartion/V2_8_0_20191106092815__migrateEventFAType.java new file mode 100644 index 0000000000..c1fde9fbc6 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/db/migartion/V2_8_0_20191106092815__migrateEventFAType.java @@ -0,0 +1,70 @@ +package org.ohdsi.webapi.db.migartion; + +import org.ohdsi.webapi.arachne.commons.config.flyway.ApplicationContextAwareSpringMigration; +import org.ohdsi.circe.helper.ResourceHelper; +import org.ohdsi.sql.SqlRender; +import org.ohdsi.sql.SqlSplit; +import org.ohdsi.webapi.service.AbstractDaoService; +import org.ohdsi.webapi.source.Source; +import org.ohdsi.webapi.source.SourceDaimon; +import org.ohdsi.webapi.source.SourceRepository; +import org.ohdsi.webapi.util.CancelableJdbcTemplate; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Service; + +import java.util.List; + +/** + * Flyway Java migration to migrate event FA type. + * + * Note: NOT a @Component - Flyway discovers this via classpath scanning. + * Dependencies are retrieved from ApplicationContext to avoid circular dependency issues. + */ +public class V2_8_0_20191106092815__migrateEventFAType extends ApplicationContextAwareSpringMigration { + private final static String UPDATE_VALUE_SQL = ResourceHelper.GetResourceAsString( + "/db/migration/java/V2_8_0_20191106092815__migrateEventFAType/updateFaType.sql"); + private final static String UPDATE_VALUE_IMPALA_SQL = ResourceHelper.GetResourceAsString( + "/db/migration/java/V2_8_0_20191106092815__migrateEventFAType/updateFaTypeImpala.sql"); + private static final Logger log = LoggerFactory.getLogger(V2_8_0_20191106092815__migrateEventFAType.class); + + @Override + public void migrate() throws Exception { + // Get beans from Spring ApplicationContext (set by Flyway before migration runs) + SourceRepository sourceRepository = applicationContext.getBean(SourceRepository.class); + MigrationDAO migrationDAO = applicationContext.getBean(MigrationDAO.class); + + List sources = sourceRepository.findAll(); + sources.stream() + .filter(source -> source.getTableQualifierOrNull(SourceDaimon.DaimonType.Results) != null) + .forEach(source -> { + try { + CancelableJdbcTemplate jdbcTemplate = migrationDAO.getSourceJdbcTemplate(source); + + migrationDAO.updateColumnValue(source, jdbcTemplate); + } catch (Exception e) { + log.error(String.format("Failed to update fa type value for source: %s (%s)", source.getSourceName(), source.getSourceKey())); + throw e; + } + }); + } + + @Service + public static class MigrationDAO extends AbstractDaoService { + public void updateColumnValue(Source source, CancelableJdbcTemplate jdbcTemplate) { + String resultsSchema = source.getTableQualifier(SourceDaimon.DaimonType.Results); + String[] params = new String[]{"results_schema"}; + String[] values = new String[]{resultsSchema}; + String translatedSql; + // Impala can't update non-kudu tables, so use special script with temp table + if (Source.IMPALA_DATASOURCE.equals(source.getSourceDialect())) { + translatedSql = SqlRender.renderSql(UPDATE_VALUE_IMPALA_SQL, params, values); + } else { + translatedSql = SqlRender.renderSql(UPDATE_VALUE_SQL, params, values); + } + for (String sql: SqlSplit.splitSql(translatedSql)) { + jdbcTemplate.execute(sql); + } + } + } +} diff --git a/src/main/java/org/ohdsi/webapi/trexsql/TrexSQLInstanceManager.java b/src/main/java/org/ohdsi/webapi/trexsql/TrexSQLInstanceManager.java index 5c7ba9c936..f6ed47cfd1 100644 --- a/src/main/java/org/ohdsi/webapi/trexsql/TrexSQLInstanceManager.java +++ b/src/main/java/org/ohdsi/webapi/trexsql/TrexSQLInstanceManager.java @@ -29,18 +29,29 @@ public TrexSQLInstanceManager(TrexSQLConfig config) { this.config = config; } + private volatile boolean initFailed = false; + public Object getInstance() { if (!config.isEnabled()) { throw new IllegalStateException("TrexSQL is not enabled"); } + if (initFailed) { + return null; + } + if (trexsqlDb == null) { initLock.lock(); try { - if (trexsqlDb == null) { + if (trexsqlDb == null && !initFailed) { log.info("Initializing TrexSQL instance"); - trexsqlDb = Trexsql.init(buildConfig()); - log.info("TrexSQL instance initialized successfully"); + try { + trexsqlDb = Trexsql.init(buildConfig()); + log.info("TrexSQL instance initialized successfully"); + } catch (Exception | Error e) { + log.error("Failed to initialize TrexSQL: {}. TrexSQL features will be unavailable.", e.getMessage()); + initFailed = true; + } } } finally { initLock.unlock(); diff --git a/src/main/java/org/ohdsi/webapi/trexsql/TrexSQLSearchProvider.java b/src/main/java/org/ohdsi/webapi/trexsql/TrexSQLSearchProvider.java index 842d8b9100..aa97a69edc 100644 --- a/src/main/java/org/ohdsi/webapi/trexsql/TrexSQLSearchProvider.java +++ b/src/main/java/org/ohdsi/webapi/trexsql/TrexSQLSearchProvider.java @@ -34,9 +34,7 @@ public TrexSQLSearchProvider(TrexSQLService trexsqlService, TrexSQLConfig config @Override public boolean supports(String vocabularyVersionKey) { - return config.isEnabled() - && trexsqlService.isEnabledForSource(vocabularyVersionKey) - && trexsqlService.isCacheAvailable(vocabularyVersionKey); + return config.isEnabled(); } @Override @@ -75,7 +73,8 @@ private int parseRows(String rows) { return 1000; } try { - return Integer.parseInt(rows); + int parsed = Integer.parseInt(rows); + return Math.min(parsed, 10000); } catch (NumberFormatException e) { return 1000; } diff --git a/src/main/java/org/ohdsi/webapi/trexsql/TrexSQLService.java b/src/main/java/org/ohdsi/webapi/trexsql/TrexSQLService.java index 7ca6a3a82c..689e8f3d13 100644 --- a/src/main/java/org/ohdsi/webapi/trexsql/TrexSQLService.java +++ b/src/main/java/org/ohdsi/webapi/trexsql/TrexSQLService.java @@ -62,6 +62,8 @@ public List> searchVocab(String sourceKey, String searchTerm Map options = new HashMap<>(); options.put("database-code", databaseCode); options.put("max-rows", maxRows); + String cachePath = config.getCachePath(); + options.put("cache-path", cachePath != null ? cachePath : "/data/cache"); try { Object db = instanceManager.getInstance(); diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index 93f9162695..a9179ce7cd 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -30,15 +30,14 @@ cdm.version=${cdm.version} #R Service Host r.serviceHost=${r.serviceHost} -#Flyway database change management (Flyway 11.7 / Spring Boot 3.x properties) #DataSource for Change Managment / Migration spring.flyway.enabled=true spring.flyway.driver-class-name=${datasource.driverClassName} spring.flyway.url=${datasource.url} spring.flyway.user=${datasource.username} spring.flyway.password=${datasource.password} -# CRITICAL: Maintain backward compatibility with Flyway 4.x schema history table name -spring.flyway.table=schema_version +# Flyway schema history table name +spring.flyway.table=flyway_schema_history # check that migration scripts location exists spring.flyway.fail-on-missing-locations=true spring.flyway.locations=${flyway.locations} @@ -51,7 +50,7 @@ spring.flyway.baseline-on-migrate=true #spring.flyway.baseline-version=1.0.0.0 spring.flyway.validate-on-migrate=${flyway.validateOnMigrate} # Enable out of order migrations due to distributed development nature of WebAPI -spring.flyway.out-of-order=true +spring.flyway.out-of-order=false # Flyway Placeholders: spring.flyway.placeholders.ohdsiSchema=${datasource.ohdsi.schema} diff --git a/src/main/resources/db/migration/java/V2_8_0_20190410103000__migratePathwayResults/ensureTables.sql b/src/main/resources/db/migration/java/V2_8_0_20190410103000__migratePathwayResults/ensureTables.sql new file mode 100644 index 0000000000..782c4f1d00 --- /dev/null +++ b/src/main/resources/db/migration/java/V2_8_0_20190410103000__migratePathwayResults/ensureTables.sql @@ -0,0 +1,32 @@ +IF OBJECT_ID('@results_schema.pathway_analysis_codes', 'U') IS NULL +CREATE TABLE @results_schema.pathway_analysis_codes +( + pathway_analysis_generation_id BIGINT NOT NULL, + code BIGINT NOT NULL, + name VARCHAR(2000) NOT NULL, + is_combo int NOT NULL +); + +IF OBJECT_ID('@results_schema.pathway_analysis_paths', 'U') IS NULL +CREATE TABLE @results_schema.pathway_analysis_paths +( + pathway_analysis_generation_id BIGINT NOT NULL, + target_cohort_id INTEGER NOT NULL, + step_1 BIGINT, + step_2 BIGINT, + step_3 BIGINT, + step_4 BIGINT, + step_5 BIGINT, + step_6 BIGINT, + step_7 BIGINT, + step_8 BIGINT, + step_9 BIGINT, + step_10 BIGINT, + count_value BIGINT NOT NULL +); + +-- verify tables exist in the correct structure +select pathway_analysis_generation_id, code, name, is_combo from @results_schema.pathway_analysis_codes WHERE 0 = 1; + +select pathway_analysis_generation_id, target_cohort_id, step_1, step_2, step_3, step_4, step_5, step_6, step_7, step_8, step_9, step_10, count_value +FROM @results_schema.pathway_analysis_paths where 0=1; diff --git a/src/main/resources/db/migration/java/V2_8_0_20190410103000__migratePathwayResults/getPathwayGeneratedCodes.sql b/src/main/resources/db/migration/java/V2_8_0_20190410103000__migratePathwayResults/getPathwayGeneratedCodes.sql new file mode 100644 index 0000000000..a42236931c --- /dev/null +++ b/src/main/resources/db/migration/java/V2_8_0_20190410103000__migratePathwayResults/getPathwayGeneratedCodes.sql @@ -0,0 +1 @@ +select distinct pathway_analysis_generation_id, combo_id from @results_schema.pathway_analysis_events; diff --git a/src/main/resources/db/migration/java/V2_8_0_20190410103000__migratePathwayResults/getPathwayGeneratedDesigns.sql b/src/main/resources/db/migration/java/V2_8_0_20190410103000__migratePathwayResults/getPathwayGeneratedDesigns.sql new file mode 100644 index 0000000000..2600997223 --- /dev/null +++ b/src/main/resources/db/migration/java/V2_8_0_20190410103000__migratePathwayResults/getPathwayGeneratedDesigns.sql @@ -0,0 +1,5 @@ +select pg.id as generation_id, ag.design +from @webapi_schema.pathway_analysis_generation pg +join @webapi_schema.analysis_generation_info ag on pg.id = ag.job_execution_id +where status = 'COMPLETED' + and source_id = @source_id; diff --git a/src/main/resources/db/migration/java/V2_8_0_20190410103000__migratePathwayResults/migratePathwayResults.sql b/src/main/resources/db/migration/java/V2_8_0_20190410103000__migratePathwayResults/migratePathwayResults.sql new file mode 100644 index 0000000000..7775953a9d --- /dev/null +++ b/src/main/resources/db/migration/java/V2_8_0_20190410103000__migratePathwayResults/migratePathwayResults.sql @@ -0,0 +1,25 @@ +TRUNCATE TABLE @results_schema.pathway_analysis_paths; + +INSERT INTO @results_schema.pathway_analysis_paths (pathway_analysis_generation_id, target_cohort_id, step_1, step_2, step_3, step_4, step_5, step_6, step_7, step_8, step_9, step_10, count_value) +select pathway_analysis_generation_id, target_cohort_id, + step_1, step_2, step_3, step_4, step_5, step_6, step_7, step_8, step_9, step_10, + count_big(subject_id) as count_value +from +( + select e.pathway_analysis_generation_id, e.target_cohort_id, e.subject_id, + MAX(case when ordinal = 1 then combo_id end) as step_1, + MAX(case when ordinal = 2 then combo_id end) as step_2, + MAX(case when ordinal = 3 then combo_id end) as step_3, + MAX(case when ordinal = 4 then combo_id end) as step_4, + MAX(case when ordinal = 5 then combo_id end) as step_5, + MAX(case when ordinal = 6 then combo_id end) as step_6, + MAX(case when ordinal = 7 then combo_id end) as step_7, + MAX(case when ordinal = 8 then combo_id end) as step_8, + MAX(case when ordinal = 9 then combo_id end) as step_9, + MAX(case when ordinal = 10 then combo_id end) as step_10 + from @results_schema.pathway_analysis_events e + GROUP BY e.pathway_analysis_generation_id, e.target_cohort_id, e.subject_id +) t1 +group by pathway_analysis_generation_id, target_cohort_id, + step_1, step_2, step_3, step_4, step_5, step_6, step_7, step_8, step_9, step_10 +; diff --git a/src/main/resources/db/migration/java/V2_8_0_20190410103000__migratePathwayResults/saveCodes.sql b/src/main/resources/db/migration/java/V2_8_0_20190410103000__migratePathwayResults/saveCodes.sql new file mode 100644 index 0000000000..f135c04872 --- /dev/null +++ b/src/main/resources/db/migration/java/V2_8_0_20190410103000__migratePathwayResults/saveCodes.sql @@ -0,0 +1,2 @@ +INSERT INTO @results_schema.pathway_analysis_codes (pathway_analysis_generation_id, code, name, is_combo) +SELECT ? as pathway_analysis_generation_id,? as code, CAST(? AS VARCHAR(2000)) as name,? as is_combo; \ No newline at end of file diff --git a/src/main/resources/db/migration/java/V2_8_0_20191106092815__migrateEventFAType/updateFaType.sql b/src/main/resources/db/migration/java/V2_8_0_20191106092815__migrateEventFAType/updateFaType.sql new file mode 100644 index 0000000000..44c6b53b8d --- /dev/null +++ b/src/main/resources/db/migration/java/V2_8_0_20191106092815__migrateEventFAType/updateFaType.sql @@ -0,0 +1,3 @@ +UPDATE @results_schema.cc_results +SET fa_type = 'CRITERIA_SET' +WHERE fa_type = 'CRITERIA'; diff --git a/src/main/resources/db/migration/java/V2_8_0_20191106092815__migrateEventFAType/updateFaTypeImpala.sql b/src/main/resources/db/migration/java/V2_8_0_20191106092815__migrateEventFAType/updateFaTypeImpala.sql new file mode 100644 index 0000000000..5664358e6a --- /dev/null +++ b/src/main/resources/db/migration/java/V2_8_0_20191106092815__migrateEventFAType/updateFaTypeImpala.sql @@ -0,0 +1,8 @@ +CREATE TABLE @results_schema.cc_results_temp LIKE @results_schema.cc_results; + +INSERT INTO TABLE @results_schema.cc_results_temp SELECT type, cast(case fa_type when 'CRITERIA' then 'CRITERIA_SET' else fa_type end as VARCHAR(255)), cc_generation_id, analysis_id, analysis_name, covariate_id, covariate_name, strata_id, strata_name, time_window, concept_id, count_value, avg_value, stdev_value, min_value, p10_value, p25_value, median_value, p75_value, p90_value, max_value, cohort_definition_id +FROM @results_schema.cc_results; + +DROP TABLE @results_schema.cc_results; + +ALTER TABLE @results_schema.cc_results_temp RENAME TO @results_schema.cc_results; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.11.0.20211012325133__assets_tags_add_endpoints.sql b/src/main/resources/db/migration/postgresql/V2.11.0.20211012325133__assets_tags_add_endpoints.sql new file mode 100644 index 0000000000..cb0abe86de --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.11.0.20211012325133__assets_tags_add_endpoints.sql @@ -0,0 +1,26 @@ +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) +VALUES (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'cohortdefinition:byTags:post', + 'Get cohort definitions with certain assigned tags'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'conceptset:byTags:post', + 'Get concept sets with certain assigned tags'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'cohort-characterization:byTags:post', + 'Get cohort characterizations with certain assigned tags'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'ir:byTags:post', + 'Get incidence rates with certain assigned tags'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'pathway-analysis:byTags:post', + 'Get pathways with certain assigned tags'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'reusable:byTags:post', + 'Get reusables with certain assigned tags'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(role_id, permission_id) +SELECT sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, + ${ohdsiSchema}.sec_role sr +WHERE sp.value IN ( + 'cohortdefinition:byTags:post', + 'conceptset:byTags:post', + 'cohort-characterization:byTags:post', + 'ir:byTags:post', + 'pathway-analysis:byTags:post', + 'reusable:byTags:post') + AND sr.name IN ('Atlas users'); diff --git a/src/main/resources/db/migration/postgresql/V2.11.0.20211101134413__reusables.sql b/src/main/resources/db/migration/postgresql/V2.11.0.20211101134413__reusables.sql new file mode 100644 index 0000000000..546a44a660 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.11.0.20211101134413__reusables.sql @@ -0,0 +1,129 @@ +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) +VALUES (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'reusable:get', 'List reusable'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'reusable:post', 'Create reusable'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'reusable:*:exists:get', 'Check name uniqueness of reusable'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'reusable:*:put', 'Update reusable'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'reusable:*:post', 'Copy reusable'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'reusable:*:get', 'Get reusable'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'reusable:*:delete', 'Delete reusable'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(role_id, permission_id) +SELECT sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, + ${ohdsiSchema}.sec_role sr +WHERE sp.value IN ( + 'reusable:get', + 'reusable:post', + 'reusable:*:post', + 'reusable:*:exists:get', + 'reusable:*:get') + AND sr.name IN ('Atlas users'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(role_id, permission_id) +SELECT sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, + ${ohdsiSchema}.sec_role sr +WHERE sp.value IN ( + 'reusable:*:put', 'reusable:*:delete' + ) AND sr.name IN ('Moderator'); + +CREATE SEQUENCE ${ohdsiSchema}.reusable_seq; + +CREATE TABLE ${ohdsiSchema}.reusable +( + id int4 NOT NULL DEFAULT nextval('${ohdsiSchema}.reusable_seq'), + name VARCHAR NOT NULL, + description varchar NULL, + data text NOT NULL, + created_by_id INTEGER, + created_date TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT (now()), + modified_by_id INTEGER, + modified_date TIMESTAMP WITH TIME ZONE, + CONSTRAINT pk_reusable_id PRIMARY KEY (id), + CONSTRAINT fk_reusable_sec_user_creator FOREIGN KEY (created_by_id) REFERENCES ${ohdsiSchema}.sec_user (id), + CONSTRAINT fk_reusable_sec_user_updater FOREIGN KEY (modified_by_id) REFERENCES ${ohdsiSchema}.sec_user (id) +); + +CREATE UNIQUE INDEX reusable_name_idx ON ${ohdsiSchema}.reusable USING btree (LOWER(name)); + +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) +VALUES (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'reusable:*:tag:post', + 'Assign tag to reusable'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'reusable:*:tag:*:delete', + 'Unassign tag from reusable'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'reusable:*:protectedtag:post', + 'Assign tag to reusable'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'reusable:*:protectedtag:*:delete', + 'Unassign tag from reusable'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(role_id, permission_id) +SELECT sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, + ${ohdsiSchema}.sec_role sr +WHERE sp.value IN ( + 'reusable:*:tag:post', + 'reusable:*:tag:*:delete') + AND sr.name IN ('Atlas users'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(role_id, permission_id) +SELECT sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, + ${ohdsiSchema}.sec_role sr +WHERE sp.value IN ( + 'reusable:*:protectedtag:post', + 'reusable:*:protectedtag:*:delete') + AND sr.name IN ('admin'); + +CREATE TABLE ${ohdsiSchema}.reusable_tag +( + asset_id int4 NOT NULL, + tag_id int4 NOT NULL, + CONSTRAINT pk_reusable_tag_id PRIMARY KEY (asset_id, tag_id), + CONSTRAINT reusable_tag_fk_reusable FOREIGN KEY (asset_id) REFERENCES ${ohdsiSchema}.reusable (id) ON DELETE CASCADE, + CONSTRAINT reusable_tag_fk_tag FOREIGN KEY (tag_id) REFERENCES ${ohdsiSchema}.tag (id) ON DELETE CASCADE +); + +CREATE INDEX reusable_tag_reusableidx ON ${ohdsiSchema}.reusable_tag USING btree (asset_id); +CREATE INDEX reusable_tag_tag_id_idx ON ${ohdsiSchema}.reusable_tag USING btree (tag_id); + +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) +VALUES (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'reusable:*:version:get', + 'Get list of reusables versions'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'reusable:*:version:*:get', + 'Get reusable version'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'reusable:*:version:*:put', + 'Update reusable version info'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'reusable:*:version:*:delete', + 'Delete reusable version info'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'reusable:*:version:*:createAsset:put', + 'Copy reusable version as new reusable'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(role_id, permission_id) +SELECT sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, + ${ohdsiSchema}.sec_role sr +WHERE sp.value IN ( + 'reusable:*:version:get', + 'reusable:*:version:*:get', + 'reusable:*:version:*:put', + 'reusable:*:version:*:delete', + 'reusable:*:version:*:createAsset:put') + AND sr.name IN ('Atlas users'); + +-- Reusables +CREATE TABLE ${ohdsiSchema}.reusable_version +( + asset_id int8 NOT NULL, + comment varchar NULL, + description varchar NULL, + version int4 NOT NULL DEFAULT 1, + asset_json varchar NOT NULL, + archived bool NOT NULL DEFAULT FALSE, + created_by_id INTEGER, + created_date TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT (now()), + CONSTRAINT pk_reusable_version_id PRIMARY KEY (asset_id, version), + CONSTRAINT fk_reusable_version_sec_user_creator FOREIGN KEY (created_by_id) REFERENCES ${ohdsiSchema}.sec_user (id), + CONSTRAINT fk_reusable_version_asset_id FOREIGN KEY (asset_id) REFERENCES ${ohdsiSchema}.reusable (id) ON DELETE CASCADE +); + +CREATE INDEX reusable_version_asset_idx ON ${ohdsiSchema}.reusable_version USING btree (asset_id); diff --git a/src/main/resources/db/migration/postgresql/V2.11.0.20211109155216__cdm_cache.sql b/src/main/resources/db/migration/postgresql/V2.11.0.20211109155216__cdm_cache.sql new file mode 100644 index 0000000000..1374879914 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.11.0.20211109155216__cdm_cache.sql @@ -0,0 +1,16 @@ +CREATE SEQUENCE ${ohdsiSchema}.cdm_cache_seq; + +CREATE TABLE ${ohdsiSchema}.cdm_cache +( + id int8 NOT NULL DEFAULT nextval('${ohdsiSchema}.cdm_cache_seq'), + concept_id int4 NOT NULL, + source_id int4 NOT NULL, + record_count int8 NULL, + descendant_record_count int8 NULL, + person_count int8 NULL, + descendant_person_count int8 NULL, + CONSTRAINT cdm_cache_pk PRIMARY KEY (id), + CONSTRAINT cdm_cache_un UNIQUE (concept_id, source_id), + CONSTRAINT cdm_cache_fk FOREIGN KEY (source_id) REFERENCES ${ohdsiSchema}.source (source_id) ON DELETE CASCADE +); +CREATE INDEX cdm_cache_concept_id_idx ON ${ohdsiSchema}.cdm_cache USING btree (concept_id, source_id); diff --git a/src/main/resources/db/migration/postgresql/V2.11.0.20220218125000__add_permission_for_checkv2.sql b/src/main/resources/db/migration/postgresql/V2.11.0.20220218125000__add_permission_for_checkv2.sql new file mode 100644 index 0000000000..d953e72c73 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.11.0.20220218125000__add_permission_for_checkv2.sql @@ -0,0 +1,9 @@ +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohortdefinition:checkv2:post', 'Run diagnostics for cohort definition with tags'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id) +SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr +WHERE sp.value IN ( + 'cohortdefinition:checkv2:post' + ) AND sr.name IN ('Atlas users'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.12.0.20220710161100__add_tagging_mass_assign_permission.sql b/src/main/resources/db/migration/postgresql/V2.12.0.20220710161100__add_tagging_mass_assign_permission.sql new file mode 100644 index 0000000000..f58c8815f3 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.12.0.20220710161100__add_tagging_mass_assign_permission.sql @@ -0,0 +1,12 @@ +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'tag:multiAssign:post', 'Tags multi-assign permission'); +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'tag:multiUnassign:post', 'Tags multi-unassign permission'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id) +SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr +WHERE sp.value IN ( + 'tag:multiAssign:post', + 'tag:multiUnassign:post' + ) AND sr.name IN ('Atlas users'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.12.0.20220719110154__add_description_to_assets.sql b/src/main/resources/db/migration/postgresql/V2.12.0.20220719110154__add_description_to_assets.sql new file mode 100644 index 0000000000..af997b4777 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.12.0.20220719110154__add_description_to_assets.sql @@ -0,0 +1,3 @@ +ALTER TABLE ${ohdsiSchema}.pathway_analysis ADD description varchar(1000) NULL; +ALTER TABLE ${ohdsiSchema}.concept_set ADD description varchar(1000) NULL; +ALTER TABLE ${ohdsiSchema}.cohort_characterization ADD description varchar(1000) NULL; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.12.0.202208110000__alter_source_cache_enable.sql b/src/main/resources/db/migration/postgresql/V2.12.0.202208110000__alter_source_cache_enable.sql new file mode 100644 index 0000000000..9f1c4edd16 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.12.0.202208110000__alter_source_cache_enable.sql @@ -0,0 +1,3 @@ +ALTER TABLE ${ohdsiSchema}.source ADD COLUMN is_cache_enabled boolean; +UPDATE ${ohdsiSchema}.source set is_cache_enabled = true; +ALTER TABLE ${ohdsiSchema}.source ALTER COLUMN is_cache_enabled SET NOT NULL; diff --git a/src/main/resources/db/migration/postgresql/V2.12.0.202208240001__concept_recommend_perms.sql b/src/main/resources/db/migration/postgresql/V2.12.0.202208240001__concept_recommend_perms.sql new file mode 100644 index 0000000000..ca4705cc62 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.12.0.202208240001__concept_recommend_perms.sql @@ -0,0 +1,24 @@ +CREATE TEMP TABLE temp_migration ( + from_perm_id int, + new_value character varying(255) +); + +INSERT INTO temp_migration (from_perm_id, new_value) +SELECT sp.id as from_id, + REPLACE('vocabulary:%s:lookup:recommended:post', '%s', REPLACE(REPLACE(value, 'source:', ''), ':access', '')) as new_value +FROM ${ohdsiSchema}.sec_permission sp +WHERE sp.value LIKE 'source:%:access'; + +INSERT INTO ${ohdsiSchema}.sec_permission (id, value) +SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'), new_value +FROM temp_migration; + +INSERT INTO ${ohdsiSchema}.sec_role_permission (id,role_id, permission_id) +SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), + srp.role_id, + sp.id as permission_id +FROM temp_migration m +JOIN ${ohdsiSchema}.sec_permission sp on m.new_value = sp.value +JOIN ${ohdsiSchema}.sec_role_permission srp on m.from_perm_id = srp.permission_id; + +DROP TABLE temp_migration; diff --git a/src/main/resources/db/migration/postgresql/V2.12.0.20221102113317__user_originb.sql b/src/main/resources/db/migration/postgresql/V2.12.0.20221102113317__user_originb.sql new file mode 100644 index 0000000000..4fbeac9481 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.12.0.20221102113317__user_originb.sql @@ -0,0 +1,9 @@ +ALTER TABLE ${ohdsiSchema}.sec_user ADD origin varchar(32) NULL; +UPDATE ${ohdsiSchema}.sec_user SET origin='SYSTEM'; +ALTER TABLE ${ohdsiSchema}.sec_user ALTER COLUMN origin SET NOT NULL; +ALTER TABLE ${ohdsiSchema}.sec_user ALTER COLUMN origin SET DEFAULT 'SYSTEM'; + +ALTER TABLE ${ohdsiSchema}.sec_user_role ADD origin varchar(32) NULL; +UPDATE ${ohdsiSchema}.sec_user_role SET origin='SYSTEM'; +ALTER TABLE ${ohdsiSchema}.sec_user_role ALTER COLUMN origin SET NOT NULL; +ALTER TABLE ${ohdsiSchema}.sec_user_role ALTER COLUMN origin SET DEFAULT 'SYSTEM'; diff --git a/src/main/resources/db/migration/postgresql/V2.12.1.202210120000__alter_source_set_cache_enabled_default.sql b/src/main/resources/db/migration/postgresql/V2.12.1.202210120000__alter_source_set_cache_enabled_default.sql new file mode 100644 index 0000000000..622ffcc296 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.12.1.202210120000__alter_source_set_cache_enabled_default.sql @@ -0,0 +1 @@ +ALTER TABLE ${ohdsiSchema}.source ALTER COLUMN is_cache_enabled SET DEFAULT false; diff --git a/src/main/resources/db/migration/postgresql/V2.13.0.20221024175000__add_tag_management_permission.sql b/src/main/resources/db/migration/postgresql/V2.13.0.20221024175000__add_tag_management_permission.sql new file mode 100644 index 0000000000..5814c7250a --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.13.0.20221024175000__add_tag_management_permission.sql @@ -0,0 +1,9 @@ +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'tag:management', 'Permission to manage tags'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id) +SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr +WHERE sp.value IN ( + 'tag:management' + ) AND sr.name IN ('admin'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.13.0.20221027170000__add_concept_set_csv_comparison_permission.sql b/src/main/resources/db/migration/postgresql/V2.13.0.20221027170000__add_concept_set_csv_comparison_permission.sql new file mode 100644 index 0000000000..b3469d5227 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.13.0.20221027170000__add_concept_set_csv_comparison_permission.sql @@ -0,0 +1,7 @@ +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'vocabulary:*:compare-arbitrary', 'Concept sets comparison permission (compare-arbitrary method)'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id) +SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr +WHERE sp.value = 'vocabulary:*:compare-arbitrary' AND sr.name IN ('Atlas users'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.13.1.20230524160057__add_export_conceptset_permissions.sql b/src/main/resources/db/migration/postgresql/V2.13.1.20230524160057__add_export_conceptset_permissions.sql new file mode 100644 index 0000000000..f641ab048a --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.13.1.20230524160057__add_export_conceptset_permissions.sql @@ -0,0 +1,20 @@ +INSERT INTO ${ohdsiSchema}.sec_permission (id, value, description) +SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'), + 'feature-analysis:*:export:conceptset:get', + 'Get archive with Feature Analysis Concept Sets'; + +INSERT INTO ${ohdsiSchema}.sec_permission (id, value, description) +SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'), + 'cohort-characterization:*:export:conceptset:get', + 'Get archive with Cohort Characterization Concept Sets'; + +INSERT INTO ${ohdsiSchema}.sec_role_permission (role_id, permission_id) +SELECT sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission sp, + ${ohdsiSchema}.sec_role sr +WHERE sp."value" in + ( + 'feature-analysis:*:export:conceptset:get', + 'cohort-characterization:*:export:conceptset:get' + ) + AND sr.name IN ('Atlas users'); diff --git a/src/main/resources/db/migration/postgresql/V2.2.0.20180202143000__delete-unnecessary-admin-permissions.sql b/src/main/resources/db/migration/postgresql/V2.2.0.20180202143000__delete-unnecessary-admin-permissions.sql new file mode 100644 index 0000000000..0da36df8c6 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.2.0.20180202143000__delete-unnecessary-admin-permissions.sql @@ -0,0 +1 @@ +DELETE FROM ${ohdsiSchema}.sec_role_permission WHERE role_id = 2 AND permission_id BETWEEN 34 AND 44; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.2.0.20180215143000__remove_password.sql b/src/main/resources/db/migration/postgresql/V2.2.0.20180215143000__remove_password.sql new file mode 100644 index 0000000000..6b99dd4c0e --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.2.0.20180215143000__remove_password.sql @@ -0,0 +1,2 @@ +ALTER TABLE ${ohdsiSchema}.sec_user DROP COLUMN password; +ALTER TABLE ${ohdsiSchema}.sec_user DROP COLUMN salt; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.2.5.20180212152023__concept-sets-author.sql b/src/main/resources/db/migration/postgresql/V2.2.5.20180212152023__concept-sets-author.sql new file mode 100644 index 0000000000..2a981bad4f --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.2.5.20180212152023__concept-sets-author.sql @@ -0,0 +1,5 @@ +ALTER TABLE ${ohdsiSchema}.concept_set + ADD COLUMN created_by VARCHAR(255), + ADD COLUMN modified_by VARCHAR(255), + ADD COLUMN created_date TIMESTAMP WITH TIME ZONE, + ADD COLUMN modified_date TIMESTAMP WITH TIME ZONE; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.2.5.20180212181325__cca-author.sql b/src/main/resources/db/migration/postgresql/V2.2.5.20180212181325__cca-author.sql new file mode 100644 index 0000000000..760354b778 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.2.5.20180212181325__cca-author.sql @@ -0,0 +1,3 @@ +ALTER TABLE ${ohdsiSchema}.cca + ADD COLUMN created_by VARCHAR(255), + ADD COLUMN modified_by VARCHAR(255); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.2.5.20180215105415__separate-sequences.sql b/src/main/resources/db/migration/postgresql/V2.2.5.20180215105415__separate-sequences.sql new file mode 100644 index 0000000000..ad392651fb --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.2.5.20180215105415__separate-sequences.sql @@ -0,0 +1,19 @@ +CREATE SEQUENCE ${ohdsiSchema}.cca_sequence; +SELECT setval('${ohdsiSchema}.cca_sequence', coalesce(max(cca_id), 1)) FROM ${ohdsiSchema}.cca; + +CREATE SEQUENCE ${ohdsiSchema}.cohort_definition_sequence; +SELECT setval('${ohdsiSchema}.cohort_definition_sequence', coalesce(max(id), 1)) FROM ${ohdsiSchema}.cohort_definition; + +CREATE SEQUENCE ${ohdsiSchema}.feasibility_study_sequence; +SELECT setval('${ohdsiSchema}.feasibility_study_sequence', coalesce(max(id), 1)) FROM ${ohdsiSchema}.feasibility_study; + +CREATE SEQUENCE ${ohdsiSchema}.ir_analysis_sequence; +SELECT setval('${ohdsiSchema}.ir_analysis_sequence', coalesce(max(id), 1)) FROM ${ohdsiSchema}.ir_analysis; + +CREATE SEQUENCE ${ohdsiSchema}.plp_sequence; +SELECT setval('${ohdsiSchema}.plp_sequence', coalesce(max(plp_id), 1)) FROM ${ohdsiSchema}.plp; + +CREATE SEQUENCE ${ohdsiSchema}.negative_controls_sequence; +SELECT setval('${ohdsiSchema}.negative_controls_sequence', coalesce(max(id), 1)) FROM ${ohdsiSchema}.concept_set_negative_controls; +ALTER TABLE ${ohdsiSchema}.CONCEPT_SET_NEGATIVE_CONTROLS ALTER COLUMN id SET DEFAULT nextval('${ohdsiSchema}.negative_controls_sequence'); +DROP SEQUENCE IF EXISTS ${ohdsiSchema}.CONCEPT_SET_NEGATIVE_CONTROLS_SEQUENCE; diff --git a/src/main/resources/db/migration/postgresql/V2.2.6.20180215152023__source_key_unique.sql b/src/main/resources/db/migration/postgresql/V2.2.6.20180215152023__source_key_unique.sql new file mode 100644 index 0000000000..fecde49754 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.2.6.20180215152023__source_key_unique.sql @@ -0,0 +1 @@ +alter table ${ohdsiSchema}.source ADD CONSTRAINT source_key_unique UNIQUE (source_key); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.3.0.20180302143300__negative_control_redo.sql b/src/main/resources/db/migration/postgresql/V2.3.0.20180302143300__negative_control_redo.sql new file mode 100644 index 0000000000..cdb926938b --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.3.0.20180302143300__negative_control_redo.sql @@ -0,0 +1,37 @@ +DROP TABLE IF EXISTS ${ohdsiSchema}.CONCEPT_SET_NEGATIVE_CONTROLS; +DROP SEQUENCE IF EXISTS ${ohdsiSchema}.negative_controls_sequence; +CREATE SEQUENCE ${ohdsiSchema}.negative_controls_sequence; +CREATE TABLE ${ohdsiSchema}.CONCEPT_SET_NEGATIVE_CONTROLS ( + id INTEGER NOT NULL DEFAULT NEXTVAL('negative_controls_sequence'), + evidence_job_id BIGINT NOT NULL, + source_id INTEGER NOT NULL, + concept_set_id INTEGER NOT NULL, + concept_set_name varchar(255) NOT NULL, + negative_control INTEGER NOT NULL, + concept_id INTEGER NOT NULL, + concept_name varchar(255) NOT NULL, + domain_id varchar(255) NOT NULL, + sort_order bigint, + descendant_pmid_cnt BIGINT, + exact_pmid_cnt BIGINT, + parent_pmid_cnt BIGINT, + ancestor_pmid_cnt BIGINT, + ind_ci INTEGER, + too_broad INTEGER, + drug_induced INTEGER, + pregnancy INTEGER, + descendant_splicer_cnt BIGINT, + exact_splicer_cnt BIGINT, + parent_splicer_cnt BIGINT, + ancestor_splicer_cnt BIGINT, + descendant_faers_cnt BIGINT, + exact_faers_cnt BIGINT, + parent_faers_cnt BIGINT, + ancestor_faers_cnt BIGINT, + user_excluded INTEGER, + user_included INTEGER, + optimized_out INTEGER, + not_prevalent INTEGER, + CONSTRAINT PK_CONCEPT_SET_NC PRIMARY KEY (id) +); + diff --git a/src/main/resources/db/migration/postgresql/V2.3.0.20180302143400__alter_cs_gen_info.sql b/src/main/resources/db/migration/postgresql/V2.3.0.20180302143400__alter_cs_gen_info.sql new file mode 100644 index 0000000000..e70c25cac6 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.3.0.20180302143400__alter_cs_gen_info.sql @@ -0,0 +1,9 @@ +ALTER TABLE ${ohdsiSchema}.concept_set_generation_info + ADD COLUMN params text +; + +UPDATE ${ohdsiSchema}.concept_set_generation_info set params = '{}' +; + +ALTER TABLE ${ohdsiSchema}.concept_set_generation_info + ALTER COLUMN params SET NOT NULL; diff --git a/src/main/resources/db/migration/postgresql/V2.3.0.20180330124512__add_source_permissions.sql b/src/main/resources/db/migration/postgresql/V2.3.0.20180330124512__add_source_permissions.sql new file mode 100644 index 0000000000..1a53ec6231 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.3.0.20180330124512__add_source_permissions.sql @@ -0,0 +1,40 @@ +INSERT INTO ${ohdsiSchema}.sec_permission (id, value, description) + SELECT + nextval('${ohdsiSchema}.sec_permission_id_seq'), + 'source:*:daimons:*:set-priority:post', + 'Set priority daimons'; + +INSERT INTO ${ohdsiSchema}.sec_role_permission (role_id, permission_id) + SELECT + sr.id, + sp.id + FROM ${ohdsiSchema}.sec_permission sp, + ${ohdsiSchema}.sec_role sr + WHERE sp."value" = 'source:*:daimons:*:set-priority:post' + AND sr.name IN ('admin'); + +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES(nextval('${ohdsiSchema}.sec_permission_id_seq'), 'source:post', 'Create source'); +INSERT INTO ${ohdsiSchema}.sec_role_permission (role_id, permission_id) + SELECT sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission sp, + ${ohdsiSchema}.sec_role sr + WHERE sp."value" = 'source:post' AND sr.name IN ('admin'); + +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES(nextval('${ohdsiSchema}.sec_permission_id_seq'), 'source:*:put', 'Edit source'); +INSERT INTO ${ohdsiSchema}.sec_role_permission (role_id, permission_id) + SELECT sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission sp, + ${ohdsiSchema}.sec_role sr + WHERE sp."value" = 'source:*:put' AND sr.name IN ('admin'); + +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES(nextval('${ohdsiSchema}.sec_permission_id_seq'), 'source:*:delete', 'Delete source'); +INSERT INTO ${ohdsiSchema}.sec_role_permission(role_id, permission_id) + SELECT sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission sp, ${ohdsiSchema}.sec_role sr + WHERE sp."value" = 'source:*:delete' AND sr.name IN ('admin'); + +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES(nextval('${ohdsiSchema}.sec_permission_id_seq'), 'source:details:*:get', 'Read source details'); +INSERT INTO ${ohdsiSchema}.sec_role_permission(role_id, permission_id) + SELECT sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission sp, ${ohdsiSchema}.sec_role sr + WHERE sp."value" = 'source:details:*:get' AND sr.name IN ('admin'); diff --git a/src/main/resources/db/migration/postgresql/V2.3.0.20180405164306__add_profile_permission.sql b/src/main/resources/db/migration/postgresql/V2.3.0.20180405164306__add_profile_permission.sql new file mode 100644 index 0000000000..2b0c465891 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.3.0.20180405164306__add_profile_permission.sql @@ -0,0 +1,14 @@ +INSERT INTO ${ohdsiSchema}.sec_permission (id, value, description) + SELECT + nextval('${ohdsiSchema}.sec_permission_id_seq'), + '*:person:*:get:dates', + 'View calendar dates on person profiles'; + +INSERT INTO ${ohdsiSchema}.sec_role_permission (role_id, permission_id) + SELECT + sr.id, + sp.id + FROM ${ohdsiSchema}.sec_permission sp, + ${ohdsiSchema}.sec_role sr + WHERE sp."value" = '*:person:*:get:dates' + AND sr.name IN ('admin', 'Atlas users'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.3.0.20180412000000__increment_sequences.sql b/src/main/resources/db/migration/postgresql/V2.3.0.20180412000000__increment_sequences.sql new file mode 100644 index 0000000000..af1dfa7030 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.3.0.20180412000000__increment_sequences.sql @@ -0,0 +1,16 @@ +-- cca_sequence +SELECT setval('${ohdsiSchema}.cca_sequence', coalesce(max(cca_id) + 1, 1)) FROM ${ohdsiSchema}.cca; +-- cohort_definition_sequence +SELECT setval('${ohdsiSchema}.cohort_definition_sequence', coalesce(max(id) + 1, 1)) FROM ${ohdsiSchema}.cohort_definition; +-- concept_set_item_sequence +SELECT setval('${ohdsiSchema}.concept_set_item_sequence', coalesce(max(concept_set_item_id) + 1, 1)) FROM ${ohdsiSchema}.concept_set_item; +-- concept_set_sequence +SELECT setval('${ohdsiSchema}.concept_set_sequence', coalesce(max(concept_set_id) + 1, 1)) FROM ${ohdsiSchema}.concept_set; +-- feasibility_study_sequence +SELECT setval('${ohdsiSchema}.feasibility_study_sequence', coalesce(max(id) + 1, 1)) FROM ${ohdsiSchema}.feasibility_study; +-- ir_analysis_sequence +SELECT setval('${ohdsiSchema}.ir_analysis_sequence', coalesce(max(id) + 1, 1)) FROM ${ohdsiSchema}.ir_analysis; +-- negative_controls_sequence +SELECT setval('${ohdsiSchema}.negative_controls_sequence', coalesce(max(id) + 1, 1)) FROM ${ohdsiSchema}.concept_set_negative_controls; +-- plp_sequence +SELECT setval('${ohdsiSchema}.plp_sequence', coalesce(max(plp_id) + 1, 1)) FROM ${ohdsiSchema}.plp; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.3.0.20180412000001__constraints.sql b/src/main/resources/db/migration/postgresql/V2.3.0.20180412000001__constraints.sql new file mode 100644 index 0000000000..6a724d6ec1 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.3.0.20180412000001__constraints.sql @@ -0,0 +1,8 @@ +-- Add PK to cca table +ALTER TABLE ${ohdsiSchema}.cca ADD CONSTRAINT PK_cca_cca_id PRIMARY KEY (cca_id); + +-- Add PK to concept_set_item +ALTER TABLE ${ohdsiSchema}.concept_set_item ADD CONSTRAINT PK_concept_set_item PRIMARY KEY (concept_set_item_id); + +-- Add PK to plp table +ALTER TABLE ${ohdsiSchema}.plp ADD CONSTRAINT PK_plp_plp_id PRIMARY KEY (plp_id); diff --git a/src/main/resources/db/migration/postgresql/V2.3.0.20180425185900__cohort_analysis_generation_info.sql b/src/main/resources/db/migration/postgresql/V2.3.0.20180425185900__cohort_analysis_generation_info.sql new file mode 100644 index 0000000000..db0ab395ed --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.3.0.20180425185900__cohort_analysis_generation_info.sql @@ -0,0 +1,16 @@ +CREATE TABLE ${ohdsiSchema}.cohort_analysis_gen_info ( + source_id int NOT NULL, + cohort_id int NOT NULL, + last_execution Timestamp(3), + execution_duration int, + fail_message varchar(2000), + PRIMARY KEY (source_id, cohort_id) +); + +CREATE TABLE ${ohdsiSchema}.cohort_analysis_list_xref ( + source_id int, + cohort_id int, + analysis_id int); + +ALTER TABLE ${ohdsiSchema}.cohort_analysis_gen_info ADD CONSTRAINT FK_cagi_cohort_id FOREIGN KEY (cohort_id) REFERENCES ${ohdsiSchema}.cohort_definition (ID); +ALTER TABLE ${ohdsiSchema}.cohort_analysis_list_xref ADD CONSTRAINT FK_calx_source_id FOREIGN KEY (source_id, cohort_id) REFERENCES ${ohdsiSchema}.cohort_analysis_gen_info (source_id, cohort_id); diff --git a/src/main/resources/db/migration/postgresql/V2.3.0.20180427114800__sec_user_unique.sql b/src/main/resources/db/migration/postgresql/V2.3.0.20180427114800__sec_user_unique.sql new file mode 100644 index 0000000000..f215c8cc82 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.3.0.20180427114800__sec_user_unique.sql @@ -0,0 +1 @@ +ALTER TABLE ${ohdsiSchema}.sec_user ADD CONSTRAINT sec_user_login_unique UNIQUE (login); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.4.0.20180508090000__source-credentials.sql b/src/main/resources/db/migration/postgresql/V2.4.0.20180508090000__source-credentials.sql new file mode 100644 index 0000000000..39f9364e43 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.4.0.20180508090000__source-credentials.sql @@ -0,0 +1,2 @@ +ALTER TABLE ${ohdsiSchema}.source ADD COLUMN username VARCHAR(255); +ALTER TABLE ${ohdsiSchema}.source ADD COLUMN password VARCHAR(255); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.4.0.20180516223100__roles-unique.sql b/src/main/resources/db/migration/postgresql/V2.4.0.20180516223100__roles-unique.sql new file mode 100644 index 0000000000..9f7b571325 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.4.0.20180516223100__roles-unique.sql @@ -0,0 +1 @@ +alter table ${ohdsiSchema}.sec_role ADD CONSTRAINT sec_role_name_uq UNIQUE (name); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.4.3.20180619113700__permission_for_my_user.sql b/src/main/resources/db/migration/postgresql/V2.4.3.20180619113700__permission_for_my_user.sql new file mode 100644 index 0000000000..37666ffee8 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.4.3.20180619113700__permission_for_my_user.sql @@ -0,0 +1,14 @@ +INSERT INTO ${ohdsiSchema}.sec_permission (id, value, description) + SELECT + nextval('${ohdsiSchema}.sec_permission_id_seq'), + 'user:me:get', + 'Get info about current user (myself)'; + +INSERT INTO ${ohdsiSchema}.sec_role_permission (role_id, permission_id) + SELECT + sr.id, + sp.id + FROM ${ohdsiSchema}.sec_permission sp, + ${ohdsiSchema}.sec_role sr + WHERE sp."value" = 'user:me:get' + AND sr.name IN ('public'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.4.3.20180702202700__permission_for_ir_execution.sql b/src/main/resources/db/migration/postgresql/V2.4.3.20180702202700__permission_for_ir_execution.sql new file mode 100644 index 0000000000..da59bfba0c --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.4.3.20180702202700__permission_for_ir_execution.sql @@ -0,0 +1,9 @@ +INSERT INTO ${ohdsiSchema}.sec_permission (id, value, description) + SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'), + 'ir:*:execute:*:get', + 'Execute Incidence Rate job'; + +INSERT INTO ${ohdsiSchema}.sec_role_permission (role_id, permission_id) + SELECT sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission sp, ${ohdsiSchema}.sec_role sr + WHERE sp."value" = 'ir:*:execute:*:get' AND sr.name IN ('Atlas users'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.4.3.20180703144901__permission_for_evidence.sql b/src/main/resources/db/migration/postgresql/V2.4.3.20180703144901__permission_for_evidence.sql new file mode 100644 index 0000000000..9985b06c25 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.4.3.20180703144901__permission_for_evidence.sql @@ -0,0 +1,9 @@ +INSERT INTO ${ohdsiSchema}.sec_permission (id, value, description) + SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'), + 'evidence:*:negativecontrols:post', + 'Execute evidence job'; + +INSERT INTO ${ohdsiSchema}.sec_role_permission (role_id, permission_id) + SELECT sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission sp, ${ohdsiSchema}.sec_role sr + WHERE sp."value" = 'evidence:*:negativecontrols:post' AND sr.name IN ('Atlas users'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.5.0.20180608182403__schema-add-analysis-gen-progress.sql b/src/main/resources/db/migration/postgresql/V2.5.0.20180608182403__schema-add-analysis-gen-progress.sql new file mode 100644 index 0000000000..d2ebefb85c --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.5.0.20180608182403__schema-add-analysis-gen-progress.sql @@ -0,0 +1 @@ +ALTER TABLE ${ohdsiSchema}.cohort_analysis_gen_info ADD COLUMN progress INT DEFAULT 0; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.5.0.20180713123456__cem_v_1.sql b/src/main/resources/db/migration/postgresql/V2.5.0.20180713123456__cem_v_1.sql new file mode 100644 index 0000000000..9d6ad67ac2 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.5.0.20180713123456__cem_v_1.sql @@ -0,0 +1,12 @@ +DELETE FROM ${ohdsiSchema}.CONCEPT_SET_GENERATION_INFO; + +DROP TABLE ${ohdsiSchema}.CONCEPT_SET_NEGATIVE_CONTROLS; +DROP SEQUENCE ${ohdsiSchema}.negative_controls_sequence; +CREATE SEQUENCE ${ohdsiSchema}.negative_controls_sequence; +CREATE TABLE ${ohdsiSchema}.CONCEPT_SET_NEGATIVE_CONTROLS ( + id INTEGER NOT NULL DEFAULT NEXTVAL('negative_controls_sequence'), + evidence_job_id BIGINT NOT NULL, + source_id INTEGER NOT NULL, + concept_set_id INTEGER NOT NULL, + CONSTRAINT PK_CONCEPT_SET_NC PRIMARY KEY (id) +); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.5.0.20180720117120__source-connection-check-rule.sql b/src/main/resources/db/migration/postgresql/V2.5.0.20180720117120__source-connection-check-rule.sql new file mode 100644 index 0000000000..0477ba79c4 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.5.0.20180720117120__source-connection-check-rule.sql @@ -0,0 +1,5 @@ +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES(nextval('${ohdsiSchema}.sec_permission_id_seq'), 'source:connection:*:get', 'Check source connection'); +INSERT INTO ${ohdsiSchema}.sec_role_permission(role_id, permission_id) + SELECT sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission sp, ${ohdsiSchema}.sec_role sr + WHERE sp."value" = 'source:connection:*:get' AND sr.name IN ('admin'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.5.0.20180725172844__add-ad-import-permissions.sql b/src/main/resources/db/migration/postgresql/V2.5.0.20180725172844__add-ad-import-permissions.sql new file mode 100644 index 0000000000..4c5b36fe4b --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.5.0.20180725172844__add-ad-import-permissions.sql @@ -0,0 +1,64 @@ +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) + VALUES (nextval('${ohdsiSchema}.sec_permission_id_seq'), + 'user:providers:get', 'Get list of authentication providers AD/LDAP'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission (role_id, permission_id) + SELECT sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission sp, ${ohdsiSchema}.sec_role sr + WHERE sp."value" = 'user:providers:get' AND sr.name IN ('admin'); + +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) + VALUES (nextval('${ohdsiSchema}.sec_permission_id_seq'), + 'user:import:*:groups:get', 'Search groups in AD/LDAP'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission (role_id, permission_id) + SELECT sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission sp, ${ohdsiSchema}.sec_role sr + WHERE sp."value" = 'user:import:*:groups:get' AND sr.name IN ('admin'); + +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) +VALUES (nextval('${ohdsiSchema}.sec_permission_id_seq'), + 'user:import:*:post', 'Search users in AD/LDAP'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission (role_id, permission_id) + SELECT sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission sp, ${ohdsiSchema}.sec_role sr + WHERE sp."value" = 'user:import:*:post' AND sr.name IN ('admin'); + +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) +VALUES (nextval('${ohdsiSchema}.sec_permission_id_seq'), + 'user:import:post', 'Import users from AD/LDAP'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission (role_id, permission_id) + SELECT sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission sp, ${ohdsiSchema}.sec_role sr + WHERE sp."value" = 'user:import:post' AND sr.name IN ('admin'); + +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) +VALUES (nextval('${ohdsiSchema}.sec_permission_id_seq'), + 'user:import:*:mapping:post', 'Save Atlas roles mappings to LDAP groups'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission (role_id, permission_id) + SELECT sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission sp, ${ohdsiSchema}.sec_role sr + WHERE sp."value" = 'user:import:*:mapping:post' AND sr.name IN ('admin'); + +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) +VALUES (nextval('${ohdsiSchema}.sec_permission_id_seq'), + 'user:import:*:mapping:get', 'Read Atlas roles mappings to LDAP groups'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission (role_id, permission_id) + SELECT sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission sp, ${ohdsiSchema}.sec_role sr + WHERE sp."value" = 'user:import:*:mapping:get' AND sr.name IN ('admin'); + +CREATE SEQUENCE ${ohdsiSchema}.sec_role_group_seq; + +CREATE TABLE ${ohdsiSchema}.sec_role_group( + id INTEGER PRIMARY KEY DEFAULT nextval('${ohdsiSchema}.sec_role_group_seq'), + provider VARCHAR NOT NULL, + group_dn VARCHAR NOT NULL, + group_name VARCHAR, + role_id INTEGER NOT NULL, + UNIQUE(provider, group_dn, role_id) +); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.5.0.20180730192730__schema-add-kerberos-to-source.sql b/src/main/resources/db/migration/postgresql/V2.5.0.20180730192730__schema-add-kerberos-to-source.sql new file mode 100644 index 0000000000..2cbbe86c4a --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.5.0.20180730192730__schema-add-kerberos-to-source.sql @@ -0,0 +1,4 @@ +ALTER TABLE ${ohdsiSchema}.source ADD COLUMN krb_auth_method VARCHAR DEFAULT 'PASSWORD' NOT NULL; +ALTER TABLE ${ohdsiSchema}.source ADD COLUMN keytab_name VARCHAR; +ALTER TABLE ${ohdsiSchema}.source ADD COLUMN krb_keytab BYTEA; +ALTER TABLE ${ohdsiSchema}.source ADD COLUMN krb_admin_server VARCHAR; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.5.0.20180817154116__add-extra-import-permissions.sql b/src/main/resources/db/migration/postgresql/V2.5.0.20180817154116__add-extra-import-permissions.sql new file mode 100644 index 0000000000..25b896fab0 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.5.0.20180817154116__add-extra-import-permissions.sql @@ -0,0 +1,8 @@ +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) +VALUES (nextval('${ohdsiSchema}.sec_permission_id_seq'), + 'user:import:*:test:get', 'Check LDAP/AD connection'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission (role_id, permission_id) + SELECT sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission sp, ${ohdsiSchema}.sec_role sr + WHERE sp."value" = 'user:import:*:test:get' AND sr.name IN ('admin'); diff --git a/src/main/resources/db/migration/postgresql/V2.6.0.20180731092421__cohort-characterization.sql b/src/main/resources/db/migration/postgresql/V2.6.0.20180731092421__cohort-characterization.sql new file mode 100644 index 0000000000..36f9ff61ec --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.6.0.20180731092421__cohort-characterization.sql @@ -0,0 +1,276 @@ +CREATE SEQUENCE ${ohdsiSchema}.cohort_characterization_seq; +CREATE TABLE IF NOT EXISTS ${ohdsiSchema}.cohort_characterization +( + id BIGINT PRIMARY KEY DEFAULT NEXTVAL('cohort_characterization_seq'), + name VARCHAR(255) NOT NULL, + created_by_id INTEGER, + created_date TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT (now()), + modified_by_id INTEGER, + modified_date TIMESTAMP WITH TIME ZONE, + hash_code INTEGER NULL +); + +ALTER TABLE ${ohdsiSchema}.cohort_characterization + ADD CONSTRAINT fk_cc_ser_user_creator FOREIGN KEY (created_by_id) +REFERENCES ${ohdsiSchema}.sec_user (id) +ON UPDATE NO ACTION ON DELETE NO ACTION; + +ALTER TABLE ${ohdsiSchema}.cohort_characterization + ADD CONSTRAINT fk_cc_ser_user_updater FOREIGN KEY (modified_by_id) +REFERENCES ${ohdsiSchema}.sec_user (id) +ON UPDATE NO ACTION ON DELETE NO ACTION; + + + +CREATE SEQUENCE ${ohdsiSchema}.cc_param_sequence; +CREATE TABLE IF NOT EXISTS ${ohdsiSchema}.cc_param +( + id BIGINT PRIMARY KEY DEFAULT NEXTVAL('cc_param_sequence'), + cohort_characterization_id BIGINT NOT NULL, + name VARCHAR(255), + value VARCHAR(255) +); + +ALTER TABLE ${ohdsiSchema}.cc_param + ADD CONSTRAINT fk_ccp_cc FOREIGN KEY (cohort_characterization_id) +REFERENCES ${ohdsiSchema}.cohort_characterization (id) +ON UPDATE NO ACTION ON DELETE CASCADE; + + + +CREATE SEQUENCE ${ohdsiSchema}.fe_analysis_sequence; +CREATE TABLE IF NOT EXISTS ${ohdsiSchema}.fe_analysis +( + id BIGINT PRIMARY KEY DEFAULT NEXTVAL('fe_analysis_sequence'), + type VARCHAR(255), + name VARCHAR(255), + domain VARCHAR(255), + descr VARCHAR(1000), + value VARCHAR(255), + design Text, + is_locked BOOLEAN, + stat_type VARCHAR(255) +); + +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) +VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohort-characterization:post', 'Create cohort characterization'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohort-characterization:import:post', 'Import cohort characterization'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohort-characterization:get', 'Get cohort characterizations list'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohort-characterization:*:get', 'Get cohort characterization'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohort-characterization:*:generation:get', 'Get cohort characterization generations'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohort-characterization:generation:*:get', 'Get cohort characterization generation'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohort-characterization:generation:*:delete', 'Delete cohort characterization generation and results'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohort-characterization:generation:*:result:get', 'Get cohort characterization generation results'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohort-characterization:generation:*:design:get', 'Get cohort characterization generation design'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohort-characterization:*:export:get', 'Export cohort characterization'), + + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'feature-analysis:get', 'Get feature analyses list'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'feature-analysis:*:get', 'Get feature analysis'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'feature-analysis:post', 'Create feature analysis'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(role_id, permission_id) +SELECT sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr +WHERE sp."value" IN ( + 'cohort-characterization:post', + 'cohort-characterization:import:post', + 'cohort-characterization:get', + 'cohort-characterization:*:get', + 'cohort-characterization:*:generation:get', + 'cohort-characterization:generation:*:get', + 'cohort-characterization:generation:*:delete', + 'cohort-characterization:generation:*:result:get', + 'cohort-characterization:generation:*:design:get', + 'cohort-characterization:*:export:get', + + 'feature-analysis:get', + 'feature-analysis:*:get', + 'feature-analysis:post' +) +AND sr.name IN ('Atlas users'); + + +-- SOURCE based permissions + +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) + SELECT nextval('${ohdsiSchema}.sec_permission_id_seq') AS id, + 'source:' || source_key || ':access' AS value, + 'Access to Source with SourceKey = ' || source_key AS description + FROM ${ohdsiSchema}.source; + +INSERT INTO ${ohdsiSchema}.sec_role_permission(role_id, permission_id) + SELECT sr.id, sp.id + FROM ${ohdsiSchema}.source + join ${ohdsiSchema}.sec_permission sp ON sp.value IN ('source:' || source_key || ':access') + join ${ohdsiSchema}.sec_role sr ON sr.name = 'Source user (' || source_key || ')'; + +CREATE TABLE IF NOT EXISTS ${ohdsiSchema}.cc_analysis +( + cohort_characterization_id BIGINT NOT NULL, + fe_analysis_id BIGINT NOT NULL +); + +ALTER TABLE ${ohdsiSchema}.cc_analysis + ADD CONSTRAINT fk_c_char_a_fe_analysis FOREIGN KEY (fe_analysis_id) +REFERENCES ${ohdsiSchema}.fe_analysis(id) +ON UPDATE NO ACTION ON DELETE CASCADE; + +ALTER TABLE ${ohdsiSchema}.cc_analysis + ADD CONSTRAINT fk_c_char_a_cc FOREIGN KEY (cohort_characterization_id) +REFERENCES ${ohdsiSchema}.cohort_characterization(id) +ON UPDATE NO ACTION ON DELETE CASCADE; + + + +CREATE SEQUENCE ${ohdsiSchema}.fe_analysis_criteria_sequence; +CREATE TABLE IF NOT EXISTS ${ohdsiSchema}.fe_analysis_criteria +( + id BIGINT PRIMARY KEY DEFAULT NEXTVAL('fe_analysis_criteria_sequence'), + name VARCHAR(255), + expression Text, + fe_analysis_id BIGINT +); + +ALTER TABLE ${ohdsiSchema}.fe_analysis_criteria + ADD CONSTRAINT fk_fec_fe_analysis FOREIGN KEY (fe_analysis_id) +REFERENCES ${ohdsiSchema}.fe_analysis(id) +ON UPDATE NO ACTION ON DELETE CASCADE; + + + +CREATE TABLE IF NOT EXISTS ${ohdsiSchema}.cc_cohort +( + cohort_characterization_id BIGINT NOT NULL, + cohort_id INT NOT NULL +); + +ALTER TABLE ${ohdsiSchema}.cc_cohort + ADD CONSTRAINT fk_c_char_c_fe_analysis FOREIGN KEY (cohort_id) +REFERENCES ${ohdsiSchema}.cohort_definition(id) +ON UPDATE NO ACTION ON DELETE CASCADE; + +ALTER TABLE ${ohdsiSchema}.cc_cohort + ADD CONSTRAINT fk_c_char_c_cc FOREIGN KEY (cohort_characterization_id) +REFERENCES ${ohdsiSchema}.cohort_characterization(id) +ON UPDATE NO ACTION ON DELETE CASCADE; + + +ALTER TABLE ${ohdsiSchema}.cohort_definition_details ADD hash_code int null; + +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Measurement Range Group Short Term', NULL, 'Covariates indicating whether measurements are below, within, or above normal range in the short term window.', null, 'MeasurementRangeGroupShortTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Condition Group Era Start Long Term', 'CONDITION', 'One covariate per condition era rolled up to groups in the condition_era table starting in the long term window.', null, 'ConditionGroupEraStartLongTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Drug Group Era Start Medium Term', 'DRUG', 'One covariate per drug rolled up to ATC groups in the drug_era table starting in the medium term window.', null, 'DrugGroupEraStartMediumTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Condition Era Short Term', 'CONDITION', 'One covariate per condition in the condition_era table overlapping with any part of the short term window.', null, 'ConditionEraShortTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Drug Group Era Long Term', 'DRUG', 'One covariate per drug rolled up to ATC groups in the drug_era table overlapping with any part of the long term window.', null, 'DrugGroupEraLongTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Condition Group Era Overlapping', 'CONDITION', 'One covariate per condition era rolled up to groups in the condition_era table overlapping with the end of the risk window.', null, 'ConditionGroupEraOverlapping', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Drug Group Era Short Term', 'DRUG', 'One covariate per drug rolled up to ATC groups in the drug_era table overlapping with any part of the short term window.', null, 'DrugGroupEraShortTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Drug Group Era Medium Term', 'DRUG', 'One covariate per drug rolled up to ATC groups in the drug_era table overlapping with any part of the medium term window.', null, 'DrugGroupEraMediumTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Condition Era Start Long Term', 'CONDITION', 'One covariate per condition in the condition_era table starting in the long term window.', null, 'ConditionEraStartLongTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Condition Era Any Time Prior', 'CONDITION', 'One covariate per condition in the condition_era table overlapping with any time prior to index.', null, 'ConditionEraAnyTimePrior', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Condition Group Era Start Medium Term', 'CONDITION', 'One covariate per condition era rolled up to groups in the condition_era table starting in the medium term window.', null, 'ConditionGroupEraStartMediumTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Drug Exposure Long Term', 'DRUG', 'One covariate per drug in the drug_exposure table starting in the long term window.', null, 'DrugExposureLongTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Measurement Range Group Long Term', NULL, 'Covariates indicating whether measurements are below, within, or above normal range in the long term window.', null, 'MeasurementRangeGroupLongTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Measurement Range Group Medium Term', NULL, 'Covariates indicating whether measurements are below, within, or above normal range in the medium term window.', null, 'MeasurementRangeGroupMediumTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Drug Group Era Any Time Prior', 'DRUG', 'One covariate per drug rolled up to ATC groups in the drug_era table overlapping with any time prior to index.', null, 'DrugGroupEraAnyTimePrior', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Condition Era Medium Term', 'CONDITION', 'One covariate per condition in the condition_era table overlapping with any part of the medium term window.', null, 'ConditionEraMediumTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Condition Era Overlapping', 'CONDITION', 'One covariate per condition in the condition_era table overlapping with the end of the risk window.', null, 'ConditionEraOverlapping', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Condition Era Start Short Term', 'CONDITION', 'One covariate per condition in the condition_era table starting in the short term window.', null, 'ConditionEraStartShortTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Drug Group Era Start Short Term', 'DRUG', 'One covariate per drug rolled up to ATC groups in the drug_era table starting in the short term window.', null, 'DrugGroupEraStartShortTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Condition Group Era Short Term', 'CONDITION', 'One covariate per condition era rolled up to groups in the condition_era table overlapping with any part of the short term window.', null, 'ConditionGroupEraShortTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Condition Era Start Medium Term', 'CONDITION', 'One covariate per condition in the condition_era table starting in the medium term window.', null, 'ConditionEraStartMediumTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Procedure Occurrence Medium Term', 'PROCEDURE', 'One covariate per procedure in the procedure_occurrence table in the medium term window.', null, 'ProcedureOccurrenceMediumTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Condition Era Long Term', 'CONDITION', 'One covariate per condition in the condition_era table overlapping with any part of the long term window.', null, 'ConditionEraLongTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Drug Group Era Start Long Term', 'DRUG', 'One covariate per drug rolled up to ATC groups in the drug_era table starting in the long term window.', null, 'DrugGroupEraStartLongTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Drug Group Era Overlapping', 'DRUG', 'One covariate per drug rolled up to ATC groups in the drug_era table overlapping with the end of the risk window.', null, 'DrugGroupEraOverlapping', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Measurement Range Group Any Time Prior', NULL, 'Covariates indicating whether measurements are below, within, or above normal range any time prior to index.', null, 'MeasurementRangeGroupAnyTimePrior', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Condition Group Era Any Time Prior', 'CONDITION', 'One covariate per condition era rolled up to groups in the condition_era table overlapping with any time prior to index.', null, 'ConditionGroupEraAnyTimePrior', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Drug Exposure Any Time Prior', 'DRUG', 'One covariate per drug in the drug_exposure table starting any time prior to index.', null, 'DrugExposureAnyTimePrior', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Condition Group Era Start Short Term', 'CONDITION', 'One covariate per condition era rolled up to groups in the condition_era table starting in the short term window.', null, 'ConditionGroupEraStartShortTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Condition Group Era Long Term', 'CONDITION', 'One covariate per condition era rolled up to groups in the condition_era table overlapping with any part of the long term window.', null, 'ConditionGroupEraLongTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Drug Exposure Short Term', 'DRUG', 'One covariate per drug in the drug_exposure table starting in the short term window.', null, 'DrugExposureShortTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Condition Group Era Medium Term', 'CONDITION', 'One covariate per condition era rolled up to groups in the condition_era table overlapping with any part of the medium term window.', null, 'ConditionGroupEraMediumTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Drug Exposure Medium Term', 'DRUG', 'One covariate per drug in the drug_exposure table starting in the medium term window.', null, 'DrugExposureMediumTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Observation Short Term', 'OBSERVATION', 'One covariate per observation in the observation table in the short term window.', null, 'ObservationShortTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Drug Era Start Long Term', 'DRUG', 'One covariate per drug in the drug_era table starting in the long term window.', null, 'DrugEraStartLongTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Dcsi', 'CONDITION', 'The Diabetes Comorbidity Severity Index (DCSI) using all conditions prior to the window end.', null, 'Dcsi', true, 'DISTRIBUTION'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Drug Era Start Short Term', 'DRUG', 'One covariate per drug in the drug_era table starting in the long short window.', null, 'DrugEraStartShortTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Distinct Ingredient Count Medium Term', 'DRUG', 'The number of distinct ingredients observed in the medium term window.', null, 'DistinctIngredientCountMediumTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Measurement Any Time Prior', 'MEASUREMENT', 'One covariate per measurement in the measurement table any time prior to index.', null, 'MeasurementAnyTimePrior', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Measurement Medium Term', 'MEASUREMENT', 'One covariate per measurement in the measurement table in the medium term window.', null, 'MeasurementMediumTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Distinct Condition Count Long Term', 'CONDITION', 'The number of distinct condition concepts observed in the long term window.', null, 'DistinctConditionCountLongTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Measurement Value Long Term', NULL, 'One covariate containing the value per measurement-unit combination in the long term window.', null, 'MeasurementValueLongTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Drug Era Short Term', 'DRUG', 'One covariate per drug in the drug_era table overlapping with any part of the short window.', null, 'DrugEraShortTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Drug Era Overlapping', 'DRUG', 'One covariate per drug in the drug_era table overlapping with the end of the risk window.', null, 'DrugEraOverlapping', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Observation Any Time Prior', 'OBSERVATION', 'One covariate per observation in the observation table any time prior to index.', null, 'ObservationAnyTimePrior', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Distinct Ingredient Count Long Term', 'DRUG', 'The number of distinct ingredients observed in the long term window.', null, 'DistinctIngredientCountLongTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Distinct Procedure Count Short Term', 'PROCEDURE', 'The number of distinct procedures observed in the short term window.', null, 'DistinctProcedureCountShortTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Distinct Condition Count Short Term', 'CONDITION', 'The number of distinct condition concepts observed in the short term window.', null, 'DistinctConditionCountShortTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Charlson Index', 'CONDITION', 'The Charlson comorbidity index (Romano adaptation) using all conditions prior to the window end.', null, 'CharlsonIndex', true, 'DISTRIBUTION'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Measurement Short Term', 'MEASUREMENT', 'One covariate per measurement in the measurement table in the short term window.', null, 'MeasurementShortTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Distinct Procedure Count Medium Term', 'PROCEDURE', 'The number of distinct procedures observed in the medium term window.', null, 'DistinctProcedureCountMediumTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Device Exposure Any Time Prior', 'DEVICE', 'One covariate per device in the device exposure table starting any time prior to index.', null, 'DeviceExposureAnyTimePrior', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Observation Long Term', 'OBSERVATION', 'One covariate per observation in the observation table in the long term window.', null, 'ObservationLongTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Distinct Condition Count Medium Term', 'CONDITION', 'The number of distinct condition concepts observed in the medium term window.', null, 'DistinctConditionCountMediumTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Procedure Occurrence Short Term', 'PROCEDURE', 'One covariate per procedure in the procedure_occurrence table in the short term window.', null, 'ProcedureOccurrenceShortTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Observation Medium Term', 'OBSERVATION', 'One covariate per observation in the observation table in the medium term window.', null, 'ObservationMediumTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Device Exposure Long Term', 'DEVICE', 'One covariate per device in the device exposure table starting in the long term window.', null, 'DeviceExposureLongTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Measurement Value Short Term', NULL, 'One covariate containing the value per measurement-unit combination in the short term window.', null, 'MeasurementValueShortTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Device Exposure Medium Term', 'DEVICE', 'One covariate per device in the device exposure table starting in the medium term window.', null, 'DeviceExposureMediumTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Measurement Long Term', 'MEASUREMENT', 'One covariate per measurement in the measurement table in the long term window.', null, 'MeasurementLongTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Measurement Value Medium Term', NULL, 'One covariate containing the value per measurement-unit combination in the medium term window.', null, 'MeasurementValueMediumTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Drug Era Start Medium Term', 'DRUG', 'One covariate per drug in the drug_era table starting in the medium term window.', null, 'DrugEraStartMediumTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Measurement Value Any Time Prior', NULL, 'One covariate containing the value per measurement-unit combination any time prior to index.', null, 'MeasurementValueAnyTimePrior', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Distinct Ingredient Count Short Term', 'DRUG', 'The number of distinct ingredients observed in the short term window.', null, 'DistinctIngredientCountShortTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Device Exposure Short Term', 'DEVICE', 'One covariate per device in the device exposure table starting in the short term window.', null, 'DeviceExposureShortTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Distinct Procedure Count Long Term', 'PROCEDURE', 'The number of distinct procedures observed in the long term window.', null, 'DistinctProcedureCountLongTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Condition Occurrence Long Term', 'CONDITION', 'One covariate per condition in the condition_occurrence table starting in the long term window.', null, 'ConditionOccurrenceLongTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Demographics Index Month', 'DEMOGRAPHICS', 'Month of the index date.', null, 'DemographicsIndexMonth', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Condition Occurrence Any Time Prior', 'CONDITION', 'One covariate per condition in the condition_occurrence table starting any time prior to index.', null, 'ConditionOccurrenceAnyTimePrior', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Demographics Ethnicity', 'DEMOGRAPHICS', 'Ethnicity of the subject.', null, 'DemographicsEthnicity', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Demographics Age Group', 'DEMOGRAPHICS', 'Age of the subject on the index date (in 5 year age groups)', null, 'DemographicsAgeGroup', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Demographics Race', 'DEMOGRAPHICS', 'Race of the subject.', null, 'DemographicsRace', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Demographics Prior Observation Time', 'DEMOGRAPHICS', 'Number of continuous days of observation time preceding the index date.', null, 'DemographicsPriorObservationTime', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Demographics Gender', 'DEMOGRAPHICS', 'Gender of the subject.', null, 'DemographicsGender', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Demographics Index Year Month', 'DEMOGRAPHICS', 'Both calendar year and month of the index date in a single variable.', null, 'DemographicsIndexYearMonth', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Condition Occurrence Medium Term', 'CONDITION', 'One covariate per condition in the condition_occurrence table starting in the medium term window.', null, 'ConditionOccurrenceMediumTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Demographics Age', 'DEMOGRAPHICS', 'Age of the subject on the index date (in years).', null, 'DemographicsAge', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Chads 2', 'CONDITION', 'The CHADS2 score using all conditions prior to the window end.', null, 'Chads2', true, 'DISTRIBUTION'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Demographics Time In Cohort', 'DEMOGRAPHICS', 'Number of days of observation time during cohort period.', null, 'DemographicsTimeInCohort', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Demographics Index Year', 'DEMOGRAPHICS', 'Year of the index date.', null, 'DemographicsIndexYear', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Demographics Post Observation Time', 'DEMOGRAPHICS', 'Number of continuous days of observation time following the index date.', null, 'DemographicsPostObservationTime', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Chads 2 Vasc', 'CONDITION', 'The CHADS2VASc score using all conditions prior to the window end.', null, 'Chads2Vasc', true, 'DISTRIBUTION'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Condition Occurrence Primary Inpatient Long Term', 'CONDITION', 'One covariate per condition observed as a primary diagnosis in an inpatient setting in the condition_occurrence table starting in the long term window.', null, 'ConditionOccurrencePrimaryInpatientLongTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Procedure Occurrence Long Term', 'PROCEDURE', 'One covariate per procedure in the procedure_occurrence table in the long term window.', null, 'ProcedureOccurrenceLongTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Condition Occurrence Primary Inpatient Any Time Prior', 'CONDITION', 'One covariate per condition observed as a primary diagnosis in an inpatient setting in the condition_occurrence table starting any time prior to index.', null, 'ConditionOccurrencePrimaryInpatientAnyTimePrior', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Drug Era Long Term', 'DRUG', 'One covariate per drug in the drug_era table overlapping with any part of the long term window.', null, 'DrugEraLongTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Procedure Occurrence Any Time Prior', 'PROCEDURE', 'One covariate per procedure in the procedure_occurrence table any time prior to index.', null, 'ProcedureOccurrenceAnyTimePrior', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Drug Era Medium Term', 'DRUG', 'One covariate per drug in the drug_era table overlapping with any part of the medium term window.', null, 'DrugEraMediumTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Drug Era Any Time Prior', 'DRUG', 'One covariate per drug in the drug_era table overlapping with any time prior to index.', null, 'DrugEraAnyTimePrior', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Condition Occurrence Short Term', 'CONDITION', 'One covariate per condition in the condition_occurrence table starting in the short term window.', null, 'ConditionOccurrenceShortTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Distinct Measurement Count Short Term', 'MEASUREMENT', 'The number of distinct measurements observed in the short term window.', null, 'DistinctMeasurementCountShortTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Visit Concept Count Short Term', 'VISIT', 'The number of visits observed in the short term window, stratified by visit concept ID.', null, 'VisitConceptCountShortTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Distinct Observation Count Medium Term', 'OBSERVATION', 'The number of distinct observations observed in the medium term window.', null, 'DistinctObservationCountMediumTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Distinct Observation Count Long Term', 'OBSERVATION', 'The number of distinct observations observed in the long term window.', null, 'DistinctObservationCountLongTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Distinct Measurement Count Long Term', 'MEASUREMENT', 'The number of distinct measurements observed in the long term window.', null, 'DistinctMeasurementCountLongTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Distinct Measurement Count Medium Term', 'MEASUREMENT', 'The number of distinct measurements observed in the medium term window.', null, 'DistinctMeasurementCountMediumTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Distinct Observation Count Short Term', 'OBSERVATION', 'The number of distinct observations observed in the short term window.', null, 'DistinctObservationCountShortTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Visit Concept Count Long Term', 'VISIT', 'The number of visits observed in the long term window, stratified by visit concept ID.', null, 'VisitConceptCountLongTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Visit Concept Count Medium Term', 'VISIT', 'The number of visits observed in the medium term window, stratified by visit concept ID.', null, 'VisitConceptCountMediumTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Visit Count Medium Term', 'VISIT', 'The number of visits observed in the medium term window.', null, 'VisitCountMediumTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Visit Count Short Term', 'VISIT', 'The number of visits observed in the short term window.', null, 'VisitCountShortTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Occurrence Primary Inpatient Short Term', 'CONDITION', 'One covariate per condition observed as a primary diagnosis in an inpatient setting in the condition_occurrence table starting in the short term window.', null, 'ConditionOccurrencePrimaryInpatientShortTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Visit Count Long Term', 'VISIT', 'The number of visits observed in the long term window.', null, 'VisitCountLongTerm', true, 'PREVALENCE'); +INSERT INTO ${ohdsiSchema}.fe_analysis (type, name, domain, descr, value, design, is_locked, stat_type) VALUES ('PRESET', 'Occurrence Primary Inpatient Medium Term', 'CONDITION', 'One covariate per condition observed as a primary diagnosis in an inpatient setting in the condition_occurrence table starting in the medium term window.', null, 'ConditionOccurrencePrimaryInpatientMediumTerm', true, 'PREVALENCE'); + +CREATE TABLE ${ohdsiSchema}.analysis_generation_info ( + job_execution_id INTEGER NOT NULL, + design VARCHAR NOT NULL, + hash_code VARCHAR NOT NULL, + created_by_id INTEGER +); + +ALTER TABLE ${ohdsiSchema}.analysis_generation_info + ADD CONSTRAINT fk_cgi_sec_user FOREIGN KEY (created_by_id) +REFERENCES ${ohdsiSchema}.sec_user(id) +ON UPDATE NO ACTION ON DELETE NO ACTION; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.6.0.20180731092422__cohort-characterization-generations-view.sql b/src/main/resources/db/migration/postgresql/V2.6.0.20180731092422__cohort-characterization-generations-view.sql new file mode 100644 index 0000000000..aafa67e56e --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.6.0.20180731092422__cohort-characterization-generations-view.sql @@ -0,0 +1,27 @@ +CREATE OR REPLACE VIEW ${ohdsiSchema}.cc_generation as ( + +SELECT + -- Spring batch based + job.job_execution_id id, + job.create_time start_time, + job.end_time end_time, + job.status status, + job.exit_message exit_message, + CAST(cc_id_param.string_val AS INTEGER) cc_id, + CAST(source_param.string_val AS INTEGER) source_id, + -- Generation info based + gen_info.design design, + gen_info.hash_code hash_code, + gen_info.created_by_id created_by_id +FROM ${ohdsiSchema}.batch_job_execution job + JOIN ${ohdsiSchema}.batch_job_execution_params cc_id_param + ON job.job_execution_id = cc_id_param.job_execution_id AND cc_id_param.key_name = 'cohort_characterization_id' + JOIN ${ohdsiSchema}.batch_job_execution_params source_param + ON job.job_execution_id = source_param.job_execution_id AND source_param.key_name = 'source_id' + LEFT JOIN ${ohdsiSchema}.analysis_generation_info gen_info + ON job.job_execution_id = gen_info.job_execution_id +ORDER BY start_time DESC + +); + +-- TODO indexes \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.6.0.20180906220021__pathway_analysis.sql b/src/main/resources/db/migration/postgresql/V2.6.0.20180906220021__pathway_analysis.sql new file mode 100644 index 0000000000..2c1a7e7734 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.6.0.20180906220021__pathway_analysis.sql @@ -0,0 +1,98 @@ +CREATE SEQUENCE ${ohdsiSchema}.pathway_analysis_sequence; +CREATE TABLE ${ohdsiSchema}.pathway_analysis +( + id INTEGER DEFAULT NEXTVAL('pathway_analysis_sequence'), + name VARCHAR NOT NULL, + combination_window INTEGER, + min_cell_count INTEGER, + max_depth INTEGER, + allow_repeats BOOLEAN DEFAULT FALSE, + created_by_id INTEGER, + created_date TIMESTAMP, + modified_by_id INTEGER, + modified_date TIMESTAMP, + hash_code INTEGER, + CONSTRAINT PK_pathway_analysis PRIMARY KEY (id) +); + +CREATE SEQUENCE ${ohdsiSchema}.pathway_cohort_sequence; + +CREATE TABLE ${ohdsiSchema}.pathway_target_cohort +( + id INTEGER DEFAULT NEXTVAL('pathway_cohort_sequence'), + name VARCHAR(255) NOT NULL, + cohort_definition_id INTEGER NOT NULL, + pathway_analysis_id INTEGER NOT NULL, + CONSTRAINT PK_pathway_target_cohort PRIMARY KEY (id), + CONSTRAINT FK_ptc_cd_id + FOREIGN KEY (cohort_definition_id) + REFERENCES ${ohdsiSchema}.cohort_definition (id), + CONSTRAINT FK_ptc_pa_id + FOREIGN KEY (pathway_analysis_id) + REFERENCES ${ohdsiSchema}.pathway_analysis (id) +); + +CREATE TABLE ${ohdsiSchema}.pathway_event_cohort +( + id INTEGER DEFAULT NEXTVAL('pathway_cohort_sequence'), + name VARCHAR NOT NULL, + cohort_definition_id INTEGER NOT NULL, + pathway_analysis_id INTEGER NOT NULL, + CONSTRAINT PK_pathway_event_cohort PRIMARY KEY (id), + CONSTRAINT FK_pec_cd_id + FOREIGN KEY (cohort_definition_id) + REFERENCES ${ohdsiSchema}.cohort_definition (id), + CONSTRAINT FK_pec_pa_id + FOREIGN KEY (pathway_analysis_id) + REFERENCES ${ohdsiSchema}.pathway_analysis (id) +); + +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) +VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'pathway-analysis:post', 'Create Pathways Analysis'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'pathway-analysis:import:post', 'Import Pathways Analysis'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'pathway-analysis:get', 'Get Pathways Analyses list'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'pathway-analysis:*:get', 'Get Pathways Analysis instance'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'pathway-analysis:*:generation:get', 'Get Pathways Analysis generations list'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'pathway-analysis:generation:*:get', 'Get Pathways Analysis generation instance'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'pathway-analysis:generation:*:result:get', 'Get Pathways Analysis generation results'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'pathway-analysis:generation:*:design:get', 'Get Pathways Analysis generation design'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'pathway-analysis:*:export:get', 'Export Pathways Analysis'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(role_id, permission_id) +SELECT sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr +WHERE sp."value" IN ( + 'pathway-analysis:post', + 'pathway-analysis:import:post', + 'pathway-analysis:get', + 'pathway-analysis:*:get', + 'pathway-analysis:*:generation:get', + 'pathway-analysis:generation:*:get', + 'pathway-analysis:generation:*:result:get', + 'pathway-analysis:generation:*:design:get', + 'pathway-analysis:*:export:get' +) +AND sr.name IN ('Atlas users'); + +CREATE OR REPLACE VIEW ${ohdsiSchema}.pathway_analysis_generation as + (SELECT + job.job_execution_id id, + job.create_time start_time, + job.end_time end_time, + job.status status, + job.exit_message exit_message, + CAST(pa_id_param.string_val AS INTEGER) pathway_analysis_id, + CAST(source_param.string_val AS INTEGER) source_id, + -- Generation info based + gen_info.design design, + gen_info.hash_code hash_code, + gen_info.created_by_id created_by_id +FROM ${ohdsiSchema}.batch_job_execution job + JOIN ${ohdsiSchema}.batch_job_execution_params pa_id_param + ON job.job_execution_id = pa_id_param.job_execution_id AND pa_id_param.key_name = 'pathway_analysis_id' + JOIN ${ohdsiSchema}.batch_job_execution_params source_param + ON job.job_execution_id = source_param.job_execution_id AND source_param.key_name = 'source_id' + LEFT JOIN ${ohdsiSchema}.analysis_generation_info gen_info + ON job.job_execution_id = gen_info.job_execution_id +ORDER BY start_time DESC); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.6.0.20180910113305__migrate_common_entities_to_user_rel.sql b/src/main/resources/db/migration/postgresql/V2.6.0.20180910113305__migrate_common_entities_to_user_rel.sql new file mode 100644 index 0000000000..57ea600d1a --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.6.0.20180910113305__migrate_common_entities_to_user_rel.sql @@ -0,0 +1,77 @@ +-- Cohort Definition + +ALTER TABLE ${ohdsiSchema}.cohort_definition ADD COLUMN created_by_id INTEGER REFERENCES ${ohdsiSchema}.sec_user(id), + ADD COLUMN modified_by_id INTEGER REFERENCES ${ohdsiSchema}.sec_user(id); + +UPDATE ${ohdsiSchema}.cohort_definition d SET created_by_id = u.id + FROM ${ohdsiSchema}.sec_user u WHERE u.login = d.created_by AND d.created_by IS NOT NULL; + +UPDATE ${ohdsiSchema}.cohort_definition d SET modified_by_id = u.id + FROM ${ohdsiSchema}.sec_user u WHERE u.login = d.modified_by AND d.modified_by IS NOT NULL; + +ALTER TABLE ${ohdsiSchema}.cohort_definition DROP COLUMN created_by, DROP COLUMN modified_by; + +-- Feasibility Study + +ALTER TABLE ${ohdsiSchema}.feasibility_study ADD COLUMN created_by_id INTEGER REFERENCES ${ohdsiSchema}.sec_user(id), + ADD COLUMN modified_by_id INTEGER REFERENCES ${ohdsiSchema}.sec_user(id); + +UPDATE ${ohdsiSchema}.feasibility_study f SET created_by_id = u.id + FROM ${ohdsiSchema}.sec_user u WHERE u.login = f.created_by AND f.created_by IS NOT NULL; + +UPDATE ${ohdsiSchema}.feasibility_study f SET modified_by_id = u.id + FROM ${ohdsiSchema}.sec_user u WHERE u.login = f.modified_by AND f.modified_by IS NOT NULL; + +ALTER TABLE ${ohdsiSchema}.feasibility_study DROP COLUMN created_by, DROP COLUMN modified_by; + +-- Incidence Rate Analysis + +ALTER TABLE ${ohdsiSchema}.ir_analysis ADD COLUMN created_by_id INTEGER REFERENCES ${ohdsiSchema}.sec_user(id), + ADD COLUMN modified_by_id INTEGER REFERENCES ${ohdsiSchema}.sec_user(id); + +UPDATE ${ohdsiSchema}.ir_analysis i SET created_by_id = u.id + FROM ${ohdsiSchema}.sec_user u WHERE u.login = i.created_by AND i.created_by IS NOT NULL; + +UPDATE ${ohdsiSchema}.ir_analysis i SET modified_by_id = u.id + FROM ${ohdsiSchema}.sec_user u WHERE u.login = i.modified_by AND i.modified_by IS NOT NULL; + +ALTER TABLE ${ohdsiSchema}.ir_analysis DROP COLUMN created_by, DROP COLUMN modified_by; + +-- CCA + +ALTER TABLE ${ohdsiSchema}.cca ADD COLUMN created_by_id INTEGER REFERENCES ${ohdsiSchema}.sec_user(id), + ADD COLUMN modified_by_id INTEGER REFERENCES ${ohdsiSchema}.sec_user(id); + +UPDATE ${ohdsiSchema}.cca c SET created_by_id = u.id + FROM ${ohdsiSchema}.sec_user u WHERE u.login = c.created_by AND c.created_by IS NOT NULL; + +UPDATE ${ohdsiSchema}.cca c SET modified_by_id = u.id + FROM ${ohdsiSchema}.sec_user u WHERE u.login = c.modified_by AND c.modified_by IS NOT NULL; + +ALTER TABLE ${ohdsiSchema}.cca DROP COLUMN created_by, DROP COLUMN modified_by; + +-- ConceptSet + +ALTER TABLE ${ohdsiSchema}.concept_set ADD COLUMN created_by_id INTEGER REFERENCES ${ohdsiSchema}.sec_user(id), + ADD COLUMN modified_by_id INTEGER REFERENCES ${ohdsiSchema}.sec_user(id); + +UPDATE ${ohdsiSchema}.concept_set c SET created_by_id = u.id + FROM ${ohdsiSchema}.sec_user u WHERE u.login = c.created_by AND c.created_by IS NOT NULL; + +UPDATE ${ohdsiSchema}.concept_set c SET modified_by_id = u.id + FROM ${ohdsiSchema}.sec_user u WHERE u.login = c.modified_by AND c.modified_by IS NOT NULL; + +ALTER TABLE ${ohdsiSchema}.concept_set DROP COLUMN created_by, DROP COLUMN modified_by; + +-- Patient Level Prediction + +ALTER TABLE ${ohdsiSchema}.plp ADD COLUMN created_by_id INTEGER REFERENCES ${ohdsiSchema}.sec_user(id), + ADD COLUMN modified_by_id INTEGER REFERENCES ${ohdsiSchema}.sec_user(id); + +UPDATE ${ohdsiSchema}.plp p SET created_by_id = u.id + FROM ${ohdsiSchema}.sec_user u WHERE u.login = p.created_by AND p.created_by IS NOT NULL; + +UPDATE ${ohdsiSchema}.plp p SET modified_by_id = u.id + FROM ${ohdsiSchema}.sec_user u WHERE u.login = p.modified_by AND p.modified_by IS NOT NULL; + +ALTER TABLE ${ohdsiSchema}.plp DROP COLUMN created_by, DROP COLUMN modified_by; diff --git a/src/main/resources/db/migration/postgresql/V2.6.0.20180921202400__fe-analysis-id.sql b/src/main/resources/db/migration/postgresql/V2.6.0.20180921202400__fe-analysis-id.sql new file mode 100644 index 0000000000..8baa5e1476 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.6.0.20180921202400__fe-analysis-id.sql @@ -0,0 +1 @@ +ALTER TABLE ${ohdsiSchema}.fe_analysis ALTER id TYPE INTEGER; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.6.0.20181001200021__estimation_prediction.sql b/src/main/resources/db/migration/postgresql/V2.6.0.20181001200021__estimation_prediction.sql new file mode 100644 index 0000000000..2d87b8e011 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.6.0.20181001200021__estimation_prediction.sql @@ -0,0 +1,50 @@ +-- Estimation +CREATE SEQUENCE ${ohdsiSchema}.estimation_seq START WITH 1; +CREATE TABLE ${ohdsiSchema}.estimation +( + estimation_id INTEGER NOT NULL DEFAULT NEXTVAL('estimation_seq'), + name character varying(255) NOT NULL, + type character varying(255) NOT NULL, + description character varying(1000), + specification text NOT NULL, + created_by_id INTEGER, + created_date TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT (now()), + modified_by_id INTEGER, + modified_date TIMESTAMP WITH TIME ZONE, + CONSTRAINT pk_estimation PRIMARY KEY (estimation_id) +); + +ALTER TABLE ${ohdsiSchema}.estimation + ADD CONSTRAINT fk_estimation_ser_user_creator FOREIGN KEY (created_by_id) +REFERENCES ${ohdsiSchema}.sec_user (id) +ON UPDATE NO ACTION ON DELETE NO ACTION; + +ALTER TABLE ${ohdsiSchema}.estimation + ADD CONSTRAINT fk_estimation_ser_user_updater FOREIGN KEY (modified_by_id) +REFERENCES ${ohdsiSchema}.sec_user (id) +ON UPDATE NO ACTION ON DELETE NO ACTION; + +-- Prediction +CREATE SEQUENCE ${ohdsiSchema}.prediction_seq START WITH 1; +CREATE TABLE ${ohdsiSchema}.prediction +( + prediction_id INTEGER NOT NULL DEFAULT NEXTVAL('prediction_seq'), + name character varying(255) NOT NULL, + description character varying(1000), + specification text NOT NULL, + created_by_id INTEGER, + created_date TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT (now()), + modified_by_id INTEGER, + modified_date TIMESTAMP WITH TIME ZONE, + CONSTRAINT pk_prediction PRIMARY KEY (prediction_id) +); + +ALTER TABLE ${ohdsiSchema}.prediction + ADD CONSTRAINT fk_prediction_ser_user_creator FOREIGN KEY (created_by_id) +REFERENCES ${ohdsiSchema}.sec_user (id) +ON UPDATE NO ACTION ON DELETE NO ACTION; + +ALTER TABLE ${ohdsiSchema}.prediction + ADD CONSTRAINT fk_prediction_ser_user_updater FOREIGN KEY (modified_by_id) +REFERENCES ${ohdsiSchema}.sec_user (id) +ON UPDATE NO ACTION ON DELETE NO ACTION; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.6.0.20181002110845__fe_analysis_conceptsets.sql b/src/main/resources/db/migration/postgresql/V2.6.0.20181002110845__fe_analysis_conceptsets.sql new file mode 100644 index 0000000000..70294cb65f --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.6.0.20181002110845__fe_analysis_conceptsets.sql @@ -0,0 +1 @@ +ALTER TABLE ${ohdsiSchema}.fe_analysis_criteria ADD COLUMN conceptsets VARCHAR; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.6.0.20181005122300__schema-create-fe-conceptset.sql b/src/main/resources/db/migration/postgresql/V2.6.0.20181005122300__schema-create-fe-conceptset.sql new file mode 100644 index 0000000000..2077d845ee --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.6.0.20181005122300__schema-create-fe-conceptset.sql @@ -0,0 +1,16 @@ +CREATE SEQUENCE ${ohdsiSchema}.fe_conceptset_sequence; + +CREATE TABLE ${ohdsiSchema}.fe_analysis_conceptset ( + id bigint NOT NULL DEFAULT NEXTVAL('${ohdsiSchema}.fe_conceptset_sequence'), + fe_analysis_id int NOT NULL, + expression varchar +); + +ALTER TABLE ${ohdsiSchema}.fe_analysis_conceptset + ADD CONSTRAINT pk_fe_conceptset_id PRIMARY KEY (id); + +ALTER TABLE ${ohdsiSchema}.fe_analysis_conceptset + ADD CONSTRAINT fk_fe_conceptset_fe_analysis FOREIGN KEY (fe_analysis_id) + REFERENCES ${ohdsiSchema}.fe_analysis(id) ON UPDATE NO ACTION ON DELETE CASCADE; + +ALTER TABLE ${ohdsiSchema}.fe_analysis_criteria DROP COLUMN conceptsets; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.6.0.20181008210200__source-deleted-at-field.sql b/src/main/resources/db/migration/postgresql/V2.6.0.20181008210200__source-deleted-at-field.sql new file mode 100644 index 0000000000..0ce77092ee --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.6.0.20181008210200__source-deleted-at-field.sql @@ -0,0 +1 @@ +ALTER TABLE ${ohdsiSchema}.source ADD COLUMN deleted_date TIMESTAMP; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.6.0.20181009110500__fix-fe-analysis-types.sql b/src/main/resources/db/migration/postgresql/V2.6.0.20181009110500__fix-fe-analysis-types.sql new file mode 100644 index 0000000000..158c738d35 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.6.0.20181009110500__fix-fe-analysis-types.sql @@ -0,0 +1,45 @@ +UPDATE ${ohdsiSchema}.fe_analysis +SET stat_type = 'DISTRIBUTION' +WHERE type = 'PRESET' and design IN ( + -- DemographicsAge.sql + 'DemographicsAge', + -- DemographicsTime.sql + 'DemographicsPriorObservationTime', + 'DemographicsPostObservationTime', + 'DemographicsTimeInCohort', + -- Chads2.sql + 'Chads2', + -- Chads2Vasc.sql + 'Chads2Vasc', + -- ConceptCounts.sql + 'DistinctConditionCountLongTerm', + 'DistinctConditionCountMediumTerm', + 'DistinctConditionCountShortTerm', + 'DistinctIngredientCountLongTerm', + 'DistinctIngredientCountMediumTerm', + 'DistinctIngredientCountShortTerm', + 'DistinctProcedureCountLongTerm', + 'DistinctProcedureCountMediumTerm', + 'DistinctProcedureCountShortTerm', + 'DistinctMeasurementCountLongTerm', + 'DistinctMeasurementCountMediumTerm', + 'DistinctMeasurementCountShortTerm', + 'DistinctObservationCountLongTerm', + 'DistinctObservationCountMediumTerm', + 'DistinctObservationCountShortTerm', + 'VisitCountLongTerm', + 'VisitCountMediumTerm', + 'VisitCountShortTerm', + 'VisitConceptCountLongTerm', + 'VisitConceptCountMediumTerm', + 'VisitConceptCountShortTerm', + -- MeasurementValue.sql + 'MeasurementValueAnyTimePrior', + 'MeasurementValueLongTerm', + 'MeasurementValueMediumTerm', + 'MeasurementValueShortTerm', + -- CharlsonIndex.sql + 'CharlsonIndex', + -- Dcsi.sql + 'Dcsi' +); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.6.0.20181009115500__fix-ple-plp-permissions.sql b/src/main/resources/db/migration/postgresql/V2.6.0.20181009115500__fix-ple-plp-permissions.sql new file mode 100644 index 0000000000..877c7bc246 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.6.0.20181009115500__fix-ple-plp-permissions.sql @@ -0,0 +1,47 @@ +-- Estimation + +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) +VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'estimation:post', 'Create Estimation'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'estimation:get', 'Get Estimation list'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'estimation:*:get', 'Get Estimation instance'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'estimation:*:copy:get', 'Copy Estimation instance'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'estimation:*:download:get', 'Download Estimation package'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'estimation:*:export:get', 'Export Estimation'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(role_id, permission_id) +SELECT sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr +WHERE sp."value" IN ( + 'estimation:post', + 'estimation:get', + 'estimation:*:get', + 'estimation:*:copy:get', + 'estimation:*:download:get', + 'estimation:*:export:get' +) +AND sr.name IN ('Atlas users'); + +-- Prediction + +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) +VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'prediction:post', 'Create Prediction'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'prediction:get', 'Get Prediction list'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'prediction:*:get', 'Get Prediction instance'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'prediction:*:copy:get', 'Copy Prediction instance'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'prediction:*:download:get', 'Download Prediction package'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'prediction:*:export:get', 'Export Prediction'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(role_id, permission_id) +SELECT sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr +WHERE sp."value" IN ( + 'prediction:post', + 'prediction:get', + 'prediction:*:get', + 'prediction:*:copy:get', + 'prediction:*:download:get', + 'prediction:*:export:get' +) +AND sr.name IN ('Atlas users'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.6.0.20181010133216__schema-add-job-is-canceled.sql b/src/main/resources/db/migration/postgresql/V2.6.0.20181010133216__schema-add-job-is-canceled.sql new file mode 100644 index 0000000000..20d2acbaad --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.6.0.20181010133216__schema-add-job-is-canceled.sql @@ -0,0 +1,7 @@ +ALTER TABLE ${ohdsiSchema}.cohort_generation_info ADD COLUMN is_canceled BOOLEAN NOT NULL DEFAULT FALSE; + +ALTER TABLE ${ohdsiSchema}.concept_set_generation_info ADD COLUMN is_canceled BOOLEAN NOT NULL DEFAULT FALSE; + +ALTER TABLE ${ohdsiSchema}.feas_study_generation_info ADD COLUMN is_canceled BOOLEAN NOT NULL DEFAULT FALSE; + +ALTER TABLE ${ohdsiSchema}.ir_execution ADD COLUMN is_canceled BOOLEAN NOT NULL DEFAULT FALSE; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.6.0.20181010185036__schema-user-import-scheduler.sql b/src/main/resources/db/migration/postgresql/V2.6.0.20181010185036__schema-user-import-scheduler.sql new file mode 100644 index 0000000000..75150d2109 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.6.0.20181010185036__schema-user-import-scheduler.sql @@ -0,0 +1,44 @@ +CREATE SEQUENCE ${ohdsiSchema}.user_import_job_seq; + +CREATE TABLE ${ohdsiSchema}.user_import_job( + id BIGINT NOT NULL DEFAULT nextval('${ohdsiSchema}.user_import_job_seq'), + is_enabled BOOLEAN NOT NULL DEFAULT FALSE, + start_date TIMESTAMP WITH TIME ZONE, + frequency VARCHAR NOT NULL, + recurring_times INTEGER NOT NULL, + recurring_until_date TIMESTAMP WITH TIME ZONE, + cron VARCHAR NOT NULL, + last_executed_at TIMESTAMP WITH TIME ZONE, + executed_times INTEGER DEFAULT 0 NOT NULL, + is_closed BOOLEAN DEFAULT FALSE NOT NULL, + provider_type VARCHAR NOT NULL, + preserve_roles BOOLEAN NOT NULL DEFAULT TRUE, + CONSTRAINT pk_user_import_job PRIMARY KEY(id) +); + +CREATE TABLE ${ohdsiSchema}.user_import_job_weekdays( + user_import_job_id BIGINT NOT NULL, + day_of_week VARCHAR NOT NULL, + CONSTRAINT pk_user_import_job_weekdays PRIMARY KEY(user_import_job_id, day_of_week) +); + +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) +VALUES + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'user:import:job:get', 'List user import jobs'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'user:import:job:post', 'Create new user import job'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'user:import:job:*:put', 'Update user import job'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'user:import:job:*:get', 'Get user import job'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'user:import:job:*:delete', 'Delete user import job'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'user:import:job:*:history:get', 'Get user import history'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(role_id, permission_id) + SELECT sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr + WHERE sp.value IN ( + 'user:import:job:get', + 'user:import:job:post', + 'user:import:job:*:put', + 'user:import:job:*:get', + 'user:import:job:*:delete', + 'user:import:job:*:history:get') + AND sr.name IN ('admin'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.6.0.20181010185037__schema-user-import-scheduler-history.sql b/src/main/resources/db/migration/postgresql/V2.6.0.20181010185037__schema-user-import-scheduler-history.sql new file mode 100644 index 0000000000..5a8bd7de78 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.6.0.20181010185037__schema-user-import-scheduler-history.sql @@ -0,0 +1,23 @@ +CREATE OR REPLACE VIEW ${ohdsiSchema}.user_import_job_history + AS + SELECT + job.job_execution_id as id, + job.start_time as start_time, + job.end_time as end_time, + job.status as status, + job.exit_code as exit_code, + job.exit_message as exit_message, + name_param.STRING_VAL as job_name, + provider_param.STRING_VAL as provider_type, + author_param.STRING_VAL as author + FROM + ${ohdsiSchema}.BATCH_JOB_EXECUTION job + JOIN ${ohdsiSchema}.BATCH_JOB_INSTANCE instance ON instance.JOB_INSTANCE_ID = job.JOB_INSTANCE_ID + JOIN ${ohdsiSchema}.batch_job_execution_params name_param + ON job.job_execution_id = name_param.job_execution_id AND name_param.KEY_NAME = 'jobName' + JOIN ${ohdsiSchema}.BATCH_JOB_EXECUTION_PARAMS provider_param + ON job.JOB_EXECUTION_ID = provider_param.JOB_EXECUTION_ID AND provider_param.KEY_NAME = 'provider' + JOIN ${ohdsiSchema}.BATCH_JOB_EXECUTION_PARAMS author_param + ON job.JOB_EXECUTION_ID = author_param.JOB_EXECUTION_ID AND author_param.KEY_NAME = 'jobAuthor' + WHERE + instance.JOB_NAME = 'usersImport'; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.6.0.20181015182101__role-group-mapping.sql b/src/main/resources/db/migration/postgresql/V2.6.0.20181015182101__role-group-mapping.sql new file mode 100644 index 0000000000..839aa5b846 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.6.0.20181015182101__role-group-mapping.sql @@ -0,0 +1,10 @@ +ALTER TABLE ${ohdsiSchema}.sec_role_group ADD job_id BIGINT; + +ALTER TABLE ${ohdsiSchema}.sec_role_group + ADD CONSTRAINT fk_role_group_job FOREIGN KEY(job_id) + REFERENCES ${ohdsiSchema}.user_import_job(id) ON UPDATE NO ACTION ON DELETE CASCADE; + +ALTER TABLE ${ohdsiSchema}.sec_role_group DROP CONSTRAINT sec_role_group_provider_group_dn_role_id_key; + +ALTER TABLE ${ohdsiSchema}.sec_role_group + ADD CONSTRAINT UC_PROVIDER_GROUP_ROLE UNIQUE(provider, group_dn, role_id, job_id); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.6.0.20181016171200__add_last_viewed_notifications_time.sql b/src/main/resources/db/migration/postgresql/V2.6.0.20181016171200__add_last_viewed_notifications_time.sql new file mode 100644 index 0000000000..6e595e3fc7 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.6.0.20181016171200__add_last_viewed_notifications_time.sql @@ -0,0 +1,18 @@ +ALTER TABLE ${ohdsiSchema}.sec_user ADD last_viewed_notifications_time TIMESTAMP WITH TIME ZONE; + +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) +VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'notifications:viewed:post', 'Remember last viewed notification timestamp'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'notifications:viewed:get', 'Get last viewed notification timestamp'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'notifications:get', 'Get notifications'); + + +INSERT INTO ${ohdsiSchema}.sec_role_permission(role_id, permission_id) + SELECT sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr + WHERE sp.value IN ( + 'notifications:viewed:post', + 'notifications:viewed:get', + 'notifications:get' + ) + AND sr.name IN ('Atlas users'); diff --git a/src/main/resources/db/migration/postgresql/V2.6.0.20181024150353__separate_system_personal_roles.sql b/src/main/resources/db/migration/postgresql/V2.6.0.20181024150353__separate_system_personal_roles.sql new file mode 100644 index 0000000000..b9d4ee8d9f --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.6.0.20181024150353__separate_system_personal_roles.sql @@ -0,0 +1,8 @@ +ALTER TABLE ${ohdsiSchema}.sec_role ADD system_role BOOLEAN DEFAULT(FALSE) NOT NULL; + +ALTER TABLE ${ohdsiSchema}.sec_role DROP CONSTRAINT sec_role_name_uq; + +UPDATE ${ohdsiSchema}.sec_role SET system_role = TRUE + WHERE NOT EXISTS(SELECT * FROM ${ohdsiSchema}.sec_user WHERE "login" = sec_role.name); + +ALTER TABLE ${ohdsiSchema}.sec_role ADD CONSTRAINT sec_role_name_uq UNIQUE (name, system_role); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.6.0.20181107165252__fe_criteria_type.sql b/src/main/resources/db/migration/postgresql/V2.6.0.20181107165252__fe_criteria_type.sql new file mode 100644 index 0000000000..ec23411047 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.6.0.20181107165252__fe_criteria_type.sql @@ -0,0 +1,7 @@ +ALTER TABLE ${ohdsiSchema}.fe_analysis_criteria ADD criteria_type VARCHAR; + +UPDATE fe_analysis_criteria + SET criteria_type = CASE WHEN fa.stat_type = 'PREVALENCE' THEN 'CRITERIA_GROUP' + WHEN fa.stat_type = 'DISTRIBUTION' THEN 'WINDOWED_CRITERIA' END +FROM ${ohdsiSchema}.fe_analysis fa +WHERE fa.id = fe_analysis_criteria.fe_analysis_id; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.6.0.20181128150100__add_missing_pk.sql b/src/main/resources/db/migration/postgresql/V2.6.0.20181128150100__add_missing_pk.sql new file mode 100644 index 0000000000..b88d9f8ae0 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.6.0.20181128150100__add_missing_pk.sql @@ -0,0 +1,45 @@ +SELECT DISTINCT cohort_characterization_id, cohort_id +INTO ${ohdsiSchema}.cc_cohort_tmp +FROM ${ohdsiSchema}.cc_cohort; + +DELETE FROM ${ohdsiSchema}.cc_cohort; + +INSERT INTO ${ohdsiSchema}.cc_cohort (cohort_characterization_id, cohort_id) +SELECT cohort_characterization_id, cohort_id +FROM ${ohdsiSchema}.cc_cohort_tmp; + +TRUNCATE TABLE ${ohdsiSchema}.cc_cohort_tmp; +DROP TABLE ${ohdsiSchema}.cc_cohort_tmp; + + +ALTER TABLE ${ohdsiSchema}.analysis_execution ADD PRIMARY KEY (id); +ALTER TABLE ${ohdsiSchema}.analysis_generation_info ADD PRIMARY KEY (job_execution_id); +ALTER TABLE ${ohdsiSchema}.cc_analysis ADD PRIMARY KEY (cohort_characterization_id, fe_analysis_id); +ALTER TABLE ${ohdsiSchema}.cc_cohort ADD PRIMARY KEY (cohort_characterization_id, cohort_id); +ALTER TABLE ${ohdsiSchema}.cca_execution_ext ADD PRIMARY KEY (cca_execution_id); +ALTER TABLE ${ohdsiSchema}.cohort ADD PRIMARY KEY (cohort_definition_id, subject_id); +ALTER TABLE ${ohdsiSchema}.cohort_analysis_list_xref ADD PRIMARY KEY (source_id, cohort_id, analysis_id); +ALTER TABLE ${ohdsiSchema}.cohort_concept_map ADD PRIMARY KEY (cohort_definition_id); +ALTER TABLE ${ohdsiSchema}.cohort_inclusion ADD PRIMARY KEY (cohort_definition_id); +ALTER TABLE ${ohdsiSchema}.cohort_inclusion_result ADD PRIMARY KEY (cohort_definition_id); +ALTER TABLE ${ohdsiSchema}.cohort_inclusion_stats ADD PRIMARY KEY (cohort_definition_id); +ALTER TABLE ${ohdsiSchema}.cohort_summary_stats ADD PRIMARY KEY (cohort_definition_id); +ALTER TABLE ${ohdsiSchema}.feas_study_inclusion_stats ADD PRIMARY KEY (study_id); +ALTER TABLE ${ohdsiSchema}.feas_study_index_stats ADD PRIMARY KEY (study_id); +ALTER TABLE ${ohdsiSchema}.feas_study_result ADD PRIMARY KEY (study_id); +ALTER TABLE ${ohdsiSchema}.feasibility_inclusion ADD PRIMARY KEY (study_id, sequence); +ALTER TABLE ${ohdsiSchema}.heracles_analysis ADD PRIMARY KEY (analysis_id); +ALTER TABLE ${ohdsiSchema}.penelope_laertes_universe ADD PRIMARY KEY (id); + +ALTER TABLE ${ohdsiSchema}.cohort_features ADD COLUMN id SERIAL PRIMARY KEY; +ALTER TABLE ${ohdsiSchema}.cohort_features_analysis_ref ADD COLUMN id SERIAL PRIMARY KEY; +ALTER TABLE ${ohdsiSchema}.cohort_features_dist ADD COLUMN id SERIAL PRIMARY KEY; +ALTER TABLE ${ohdsiSchema}.cohort_features_ref ADD COLUMN id SERIAL PRIMARY KEY; +ALTER TABLE ${ohdsiSchema}.heracles_heel_results ADD COLUMN id SERIAL PRIMARY KEY; +ALTER TABLE ${ohdsiSchema}.heracles_results ADD COLUMN id SERIAL PRIMARY KEY; +ALTER TABLE ${ohdsiSchema}.heracles_results_dist ADD COLUMN id SERIAL PRIMARY KEY; +ALTER TABLE ${ohdsiSchema}.ir_analysis_dist ADD COLUMN id SERIAL PRIMARY KEY; +ALTER TABLE ${ohdsiSchema}.ir_analysis_result ADD COLUMN id SERIAL PRIMARY KEY; +ALTER TABLE ${ohdsiSchema}.ir_analysis_strata_stats ADD COLUMN id SERIAL PRIMARY KEY; +ALTER TABLE ${ohdsiSchema}.ir_strata ADD COLUMN id SERIAL PRIMARY KEY; +ALTER TABLE ${ohdsiSchema}.penelope_laertes_uni_pivot ADD COLUMN id SERIAL PRIMARY KEY; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20181119162154__cc_strata.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20181119162154__cc_strata.sql new file mode 100644 index 0000000000..601655b861 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20181119162154__cc_strata.sql @@ -0,0 +1,29 @@ +CREATE SEQUENCE ${ohdsiSchema}.cc_strata_seq; + +CREATE TABLE ${ohdsiSchema}.cc_strata( + id BIGINT NOT NULL DEFAULT nextval('${ohdsiSchema}.cc_strata_seq'), + cohort_characterization_id BIGINT NOT NULL, + name VARCHAR NOT NULL, + expression VARCHAR, + CONSTRAINT pk_cc_strata_id PRIMARY KEY(id) +); + +ALTER TABLE ${ohdsiSchema}.cc_strata + ADD CONSTRAINT fk_cc_strata_cc FOREIGN KEY (cohort_characterization_id) + REFERENCES ${ohdsiSchema}.cohort_characterization(id) ON UPDATE NO ACTION ON DELETE CASCADE; + +CREATE SEQUENCE ${ohdsiSchema}.cc_strata_conceptset_seq; + +CREATE TABLE ${ohdsiSchema}.cc_strata_conceptset( + id BIGINT NOT NULL DEFAULT nextval('${ohdsiSchema}.cc_strata_conceptset_seq'), + cohort_characterization_id BIGINT NOT NULL, + expression VARCHAR, + CONSTRAINT pk_cc_strata_conceptset_id PRIMARY KEY(id) +); + +ALTER TABLE ${ohdsiSchema}.cc_strata_conceptset + ADD CONSTRAINT fk_cc_strata_conceptset_cc FOREIGN KEY (cohort_characterization_id) + REFERENCES ${ohdsiSchema}.cohort_characterization(id) ON UPDATE NO ACTION ON DELETE CASCADE; + +ALTER TABLE ${ohdsiSchema}.cohort_characterization ADD stratified_by VARCHAR; +ALTER TABLE ${ohdsiSchema}.cohort_characterization ADD strata_only BOOLEAN DEFAULT FALSE; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190116183005__default_stat_type.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190116183005__default_stat_type.sql new file mode 100644 index 0000000000..935769589d --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190116183005__default_stat_type.sql @@ -0,0 +1,6 @@ +ALTER TABLE ${ohdsiSchema}.fe_analysis + ALTER COLUMN stat_type SET DEFAULT 'PREVALENCE'; + +UPDATE ${ohdsiSchema}.fe_analysis +SET stat_type = 'PREVALENCE' +WHERE stat_type ISNULL; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190125113000__fe-analysis-created-modified.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190125113000__fe-analysis-created-modified.sql new file mode 100644 index 0000000000..8ae522e13c --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190125113000__fe-analysis-created-modified.sql @@ -0,0 +1,5 @@ +ALTER TABLE ${ohdsiSchema}.fe_analysis_criteria + ADD COLUMN created_by_id INTEGER REFERENCES ${ohdsiSchema}.sec_user(id), + ADD COLUMN created_date TIMESTAMP, + ADD COLUMN modified_by_id INTEGER REFERENCES ${ohdsiSchema}.sec_user(id), + ADD COLUMN modified_date TIMESTAMP; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190125141500__fe-analysis-criteria_stat-type-fix.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190125141500__fe-analysis-criteria_stat-type-fix.sql new file mode 100644 index 0000000000..2f882dbf3f --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190125141500__fe-analysis-criteria_stat-type-fix.sql @@ -0,0 +1,19 @@ +ALTER TABLE ${ohdsiSchema}.fe_analysis + ALTER COLUMN stat_type SET NOT NULL; + +UPDATE ${ohdsiSchema}.fe_analysis_criteria + SET criteria_type = + CASE WHEN criteria_type IS NULL THEN + CASE + WHEN fa.stat_type = 'PREVALENCE' THEN 'CRITERIA_GROUP' + WHEN fa.stat_type = 'DISTRIBUTION' THEN + CASE WHEN expression LIKE '{"Criteria":%' + THEN 'WINDOWED_CRITERIA' + ELSE 'DEMOGRAPHIC_CRITERIA' + END + END + ELSE + criteria_type + END +FROM ${ohdsiSchema}.fe_analysis fa +WHERE fa.id = fe_analysis_criteria.fe_analysis_id; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190128134827__create_absent_sequences.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190128134827__create_absent_sequences.sql new file mode 100644 index 0000000000..51b6201d76 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190128134827__create_absent_sequences.sql @@ -0,0 +1,13 @@ +CREATE SEQUENCE ${ohdsiSchema}.analysis_execution_sequence; +SELECT setval('${ohdsiSchema}.analysis_execution_sequence', coalesce(max(id), 1)) FROM ${ohdsiSchema}.analysis_execution; +ALTER TABLE ${ohdsiSchema}.analysis_execution ALTER COLUMN id SET DEFAULT nextval('${ohdsiSchema}.analysis_execution_sequence'); +-- Delete old sequence +DROP SEQUENCE IF EXISTS ${ohdsiSchema}.analysis_execution_id_seq; + +CREATE SEQUENCE ${ohdsiSchema}.cca_execution_sequence; +SELECT setval('${ohdsiSchema}.cca_execution_sequence', coalesce(max(cca_execution_id), 1)) FROM ${ohdsiSchema}.cca_execution; +ALTER TABLE ${ohdsiSchema}.cca_execution ALTER COLUMN cca_execution_id SET DEFAULT nextval('${ohdsiSchema}.cca_execution_sequence'); + +CREATE SEQUENCE ${ohdsiSchema}.heracles_visualization_data_sequence; +SELECT setval('${ohdsiSchema}.heracles_visualization_data_sequence', coalesce(max(id), 1)) FROM ${ohdsiSchema}.HERACLES_VISUALIZATION_DATA; +ALTER TABLE ${ohdsiSchema}.HERACLES_VISUALIZATION_DATA ALTER COLUMN id SET DEFAULT nextval('${ohdsiSchema}.heracles_visualization_data_sequence'); diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190129083000__fe-analysis-created-modified-fix.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190129083000__fe-analysis-created-modified-fix.sql new file mode 100644 index 0000000000..37387e292e --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190129083000__fe-analysis-created-modified-fix.sql @@ -0,0 +1,10 @@ +ALTER TABLE ${ohdsiSchema}.fe_analysis_criteria DROP COLUMN created_by_id; +ALTER TABLE ${ohdsiSchema}.fe_analysis_criteria DROP COLUMN created_date; +ALTER TABLE ${ohdsiSchema}.fe_analysis_criteria DROP COLUMN modified_by_id; +ALTER TABLE ${ohdsiSchema}.fe_analysis_criteria DROP COLUMN modified_date; + +ALTER TABLE ${ohdsiSchema}.fe_analysis + ADD COLUMN created_by_id INTEGER REFERENCES ${ohdsiSchema}.sec_user(id), + ADD COLUMN created_date TIMESTAMP, + ADD COLUMN modified_by_id INTEGER REFERENCES ${ohdsiSchema}.sec_user(id), + ADD COLUMN modified_date TIMESTAMP; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190201090000__bjep_idx_and_cleanup.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190201090000__bjep_idx_and_cleanup.sql new file mode 100644 index 0000000000..aa78164dc1 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190201090000__bjep_idx_and_cleanup.sql @@ -0,0 +1 @@ +CREATE INDEX BJEP_JOB_STRING_IDX ON ${ohdsiSchema}.BATCH_JOB_EXECUTION_PARAMS (JOB_EXECUTION_ID,STRING_VAL); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190204153542__cc-and-pathway-cancel-job-permissions.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190204153542__cc-and-pathway-cancel-job-permissions.sql new file mode 100644 index 0000000000..47f528fcb2 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190204153542__cc-and-pathway-cancel-job-permissions.sql @@ -0,0 +1,31 @@ +-- cohort-characterizations permissions + +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) + SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'), REPLACE(value, ':post', ':delete'), + 'Cancel Generation of Pathway Analysis with ID = ' || REPLACE(REPLACE(value, 'cohort-characterization:', ''), ':generation:*:post', '') + FROM ${ohdsiSchema}.sec_permission sp + JOIN ${ohdsiSchema}.SEC_ROLE_PERMISSION srp on srp.PERMISSION_ID = sp.ID + WHERE sp.VALUE like 'cohort-characterization:%:generation:*:post' AND NOT sp.value = 'cohort-characterization:*:generation:*:post'; + +INSERT INTO ${ohdsiSchema}.sec_role_permission(role_id, permission_id) + select srp.role_id, spc.id as permission_id from ${ohdsiSchema}.SEC_PERMISSION sp + join ${ohdsiSchema}.SEC_ROLE_PERMISSION srp on srp.PERMISSION_ID = sp.ID + join ${ohdsiSchema}.SEC_PERMISSION spc ON replace(replace(sp.value, 'cohort-characterization:', ''),':generation:*:post', '') = replace(replace(spc.value, 'cohort-characterization:', ''),':generation:*:delete', '') + and spc.value like 'cohort-characterization:%:generation:*:delete' + where sp.VALUE like 'cohort-characterization:%:generation:*:post'; + +-- pathways permissions + +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) + SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'), REPLACE(value, ':post', ':delete'), + 'Cancel Generation of Pathway Analysis with ID = ' || REPLACE(REPLACE(value, 'pathway-analysis:', ''), ':generation:*:post', '') +FROM ${ohdsiSchema}.sec_permission sp +JOIN ${ohdsiSchema}.SEC_ROLE_PERMISSION srp on srp.PERMISSION_ID = sp.ID +WHERE sp.VALUE like 'pathway-analysis:%:generation:*:post' AND NOT sp.value = 'pathway-analysis:*:generation:*:post'; + +INSERT INTO ${ohdsiSchema}.sec_role_permission(role_id, permission_id) + select srp.role_id, spc.id as permission_id from ${ohdsiSchema}.SEC_PERMISSION sp + join ${ohdsiSchema}.SEC_ROLE_PERMISSION srp on srp.PERMISSION_ID = sp.ID + join ${ohdsiSchema}.SEC_PERMISSION spc ON replace(replace(sp.value, 'pathway-analysis:', ''),':generation:*:post', '') = replace(replace(spc.value, 'pathway-analysis:', ''),':generation:*:delete', '') + and spc.value like 'pathway-analysis:%:generation:*:delete' + where sp.VALUE like 'pathway-analysis:%:generation:*:post'; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190204183006__ir-cancel-job-permission.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190204183006__ir-cancel-job-permission.sql new file mode 100644 index 0000000000..4def8b4596 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190204183006__ir-cancel-job-permission.sql @@ -0,0 +1,10 @@ +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) + VALUES (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'ir:*:execute:*:delete', 'Cancel IR analysis execution'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(role_id, permission_id) + SELECT sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr + WHERE sp.value IN ( + 'ir:*:execute:*:delete' + ) + AND sr.name IN ('Atlas users'); diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190205174343__cc-pathway-copy-permissions.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190205174343__cc-pathway-copy-permissions.sql new file mode 100644 index 0000000000..e7804b17d9 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190205174343__cc-pathway-copy-permissions.sql @@ -0,0 +1,23 @@ +-- cc copy permissions + +INSERT INTO ${ohdsiSchema}.SEC_PERMISSION(id, value, description) + VALUES (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohort-characterization:*:post', ''); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(role_id, permission_id) + SELECT sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr + WHERE sp.value IN ( + 'cohort-characterization:*:post' + ) AND sr.name IN ('Atlas users'); + +-- pathway copy permissions + +INSERT INTO ${ohdsiSchema}.SEC_PERMISSION(id, value, description) +VALUES (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'pathway-analysis:*:post', ''); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(role_id, permission_id) + SELECT sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr + WHERE sp.value IN ( + 'pathway-analysis:*:post' + ) AND sr.name IN ('Atlas users'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190208164736__analysis_execution-add-job_id.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190208164736__analysis_execution-add-job_id.sql new file mode 100644 index 0000000000..cfa177a62c --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190208164736__analysis_execution-add-job_id.sql @@ -0,0 +1,11 @@ +ALTER TABLE ${ohdsiSchema}.analysis_execution ADD job_execution_id BIGINT; + +alter table ${ohdsiSchema}.analysis_execution drop column analysis_id; +ALTER TABLE ${ohdsiSchema}.analysis_execution DROP COLUMN analysis_type; +alter table ${ohdsiSchema}.analysis_execution drop column duration; +alter table ${ohdsiSchema}.analysis_execution drop column executed; +alter table ${ohdsiSchema}.analysis_execution drop column sec_user_id; +alter table ${ohdsiSchema}.analysis_execution drop column source_id; +alter table ${ohdsiSchema}.analysis_execution drop column update_password; + +alter table ${ohdsiSchema}.analysis_execution rename to ee_analysis_status; diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190211181105__prediction-estimation-generation-permissions.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190211181105__prediction-estimation-generation-permissions.sql new file mode 100644 index 0000000000..28240a9d38 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190211181105__prediction-estimation-generation-permissions.sql @@ -0,0 +1,23 @@ +INSERT INTO ${ohdsiSchema}.sec_permission(id, "value", "description") + VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'prediction:*:generation:*:post', 'Execute Prediction Generation Job'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'prediction:*:generation:get', 'View Prediction Generations'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'prediction:generation:*:result:get', 'View Prediction Generation Results'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission (role_id, permission_id) + SELECT sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission sp, ${ohdsiSchema}.sec_role sr + WHERE sp."value" in ('prediction:*:generation:*:post', 'prediction:*:generation:get', 'prediction:generation:*:result:get') + AND sr.name IN ('Atlas users'); + +INSERT INTO ${ohdsiSchema}.sec_permission(id, "value", "description") + VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'estimation:*:generation:*:post', 'Execute Estimation Generation Job'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'estimation:*:generation:get', 'View Estimation Generations'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'estimation:generation:*:result:get', 'View Estimation Generation Results'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission (role_id, permission_id) + SELECT sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission sp, ${ohdsiSchema}.sec_role sr + WHERE sp."value" in ('estimation:*:generation:*:post', 'estimation:*:generation:get', 'estimation:generation:*:result:get') + AND sr.name IN ('Atlas users'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190211182000__permissions-fixes.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190211182000__permissions-fixes.sql new file mode 100644 index 0000000000..3805241641 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190211182000__permissions-fixes.sql @@ -0,0 +1,73 @@ +CREATE TEMP TABLE temp_migration ( + from_perm_id int, + new_value character varying(255) +); + +INSERT INTO temp_migration (from_perm_id, new_value) +SELECT sp.id as from_id, + REPLACE(CAST(new_perms.val AS VARCHAR(255)), '%s', REPLACE(REPLACE(value, 'source:', ''), ':access', '')) as new_value +FROM ${ohdsiSchema}.sec_permission sp +CROSS JOIN ( + SELECT 'vocabulary:%s:*:get' val + UNION ALL + SELECT 'vocabulary:%s:included-concepts:count:post' + UNION ALL + SELECT 'vocabulary:%s:resolveConceptSetExpression:post' + UNION ALL + SELECT 'vocabulary:%s:lookup:identifiers:post' + UNION ALL + SELECT 'vocabulary:%s:lookup:identifiers:ancestors:post' + UNION ALL + SELECT 'vocabulary:%s:lookup:mapped:post' + UNION ALL + SELECT 'vocabulary:%s:compare:post' + UNION ALL + SELECT 'vocabulary:%s:optimize:post' + UNION ALL + SELECT 'cdmresults:%s:*:get' + UNION ALL + SELECT 'cdmresults:%s:*:*:get' + UNION ALL + SELECT 'cdmresults:%s:conceptRecordCount:post' + UNION ALL + SELECT 'cohortresults:%s:*:*:get' + UNION ALL + SELECT 'cohortresults:%s:*:*:*:get' +) new_perms +WHERE sp.value LIKE 'source:%:access'; + +INSERT INTO ${ohdsiSchema}.sec_permission (id, value) +SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'), new_value +FROM temp_migration; + +INSERT INTO ${ohdsiSchema}.sec_role_permission (id,role_id, permission_id) +SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), + srp.role_id, + sp.id as permission_id +FROM temp_migration m +JOIN ${ohdsiSchema}.sec_permission sp on m.new_value = sp.value +JOIN ${ohdsiSchema}.sec_role_permission srp on m.from_perm_id = srp.permission_id; + +drop table temp_migration; + +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) + VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'conceptset:*:expression:get', 'Resolve concept set expression'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'conceptset:*:generationinfo:get', 'Get generation info for concept set'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohortdefinition:*:check:get', 'Get cohort definition design checks'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'sqlrender:translate:post' , 'Translate SQL'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'ir:*:info' , 'Get IR info'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'job:type:*:name:*:get' , 'Get IR info'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id) +SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr +WHERE sp.value IN ( + 'conceptset:*:expression:get', + 'conceptset:*:generationinfo:get', + 'cohortdefinition:*:check:get', + 'sqlrender:translate:post', + 'ir:*:info', + 'job:type:*:name:*:get' +) +AND sr.name IN ('Atlas users'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190212154939__analysis_execution_files.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190212154939__analysis_execution_files.sql new file mode 100644 index 0000000000..a9eb177664 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190212154939__analysis_execution_files.sql @@ -0,0 +1,13 @@ +ALTER TABLE ${ohdsiSchema}.output_files ADD media_type VARCHAR(255); + +ALTER TABLE ${ohdsiSchema}.output_files DROP CONSTRAINT fk_sif_cca_execution; +ALTER TABLE ${ohdsiSchema}.output_files DROP COLUMN cca_execution_id; + +ALTER TABLE ${ohdsiSchema}.input_files DROP CONSTRAINT fk_sof_cca_execution; +ALTER TABLE ${ohdsiSchema}.input_files DROP COLUMN cca_execution_id; + +--ALTER TABLE ${ohdsiSchema}.output_files ADD execution_id INT; +--ALTER TABLE ${ohdsiSchema}.input_files ADD execution_id INT; + +CREATE SEQUENCE ${ohdsiSchema}.output_file_seq; +CREATE SEQUENCE ${ohdsiSchema}.input_file_seq; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.201902130900__source-sequences.sql b/src/main/resources/db/migration/postgresql/V2.7.0.201902130900__source-sequences.sql new file mode 100644 index 0000000000..c71002d254 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.201902130900__source-sequences.sql @@ -0,0 +1,11 @@ +-- Source sequence +CREATE SEQUENCE ${ohdsiSchema}.source_sequence START WITH 1 INCREMENT BY 1 MAXVALUE 9223372036854775807 NO CYCLE; +SELECT setval('${ohdsiSchema}.source_sequence', coalesce(max(source_id), 1)) FROM ${ohdsiSchema}.source; + +ALTER TABLE ${ohdsiSchema}.source ALTER COLUMN source_id SET DEFAULT nextval('${ohdsiSchema}.source_sequence'); + +-- Source_daimon sequence +CREATE SEQUENCE ${ohdsiSchema}.source_daimon_sequence START WITH 1 INCREMENT BY 1 MAXVALUE 9223372036854775807 NO CYCLE; +SELECT setval('${ohdsiSchema}.source_daimon_sequence', coalesce(max(source_daimon_id), 1)) FROM ${ohdsiSchema}.source_daimon; + +ALTER TABLE ${ohdsiSchema}.source_daimon ALTER COLUMN source_daimon_id SET DEFAULT nextval('${ohdsiSchema}.source_daimon_sequence'); diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190213161124__add-fk-to-source-daimon.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190213161124__add-fk-to-source-daimon.sql new file mode 100644 index 0000000000..c4e75c2a6a --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190213161124__add-fk-to-source-daimon.sql @@ -0,0 +1,2 @@ +ALTER TABLE ${ohdsiSchema}.source_daimon + ADD CONSTRAINT FK_source_daimon_source_id FOREIGN KEY (source_id) REFERENCES ${ohdsiSchema}.source (source_id); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190214110000__permissions-fixes-2.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190214110000__permissions-fixes-2.sql new file mode 100644 index 0000000000..95a721bc21 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190214110000__permissions-fixes-2.sql @@ -0,0 +1,14 @@ +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohort-characterization:*:design:get', 'Get cohort characterization design'); +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohort-characterization:design:get', 'Get cohort characterization design list'); + + +INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id) + SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr + WHERE sp.value IN ( + 'cohort-characterization:*:design:get', + 'cohort-characterization:design:get' + ) + AND sr.name IN ('Atlas users'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190214145000__permissions-fixes-3.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190214145000__permissions-fixes-3.sql new file mode 100644 index 0000000000..5450d954fb --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190214145000__permissions-fixes-3.sql @@ -0,0 +1,41 @@ +CREATE TEMP TABLE temp_migration ( + from_perm_id int, + new_value character varying(255) +); + +INSERT INTO temp_migration (from_perm_id, new_value) +SELECT sp.id as from_id, + REPLACE(CAST(new_perms.val AS VARCHAR(255)), '%s', REPLACE(REPLACE(value, 'source:', ''), ':access', '')) as new_value +FROM ${ohdsiSchema}.sec_permission sp + CROSS JOIN ( + SELECT 'vocabulary:%s:concept:*:get' val + UNION ALL + SELECT 'vocabulary:%s:concept:*:related:get' + UNION ALL + SELECT 'cohortdefinition:*:cancel:%s:get' + UNION ALL + SELECT 'featureextraction:query:prevalence:*:%s:get' + UNION ALL + SELECT 'featureextraction:query:distributions:*:%s:get' + UNION ALL + SELECT 'featureextraction:explore:prevalence:*:%s:*:get' + UNION ALL + SELECT 'featureextraction:generate:%s:*:get' + UNION ALL + SELECT 'featureextraction:generatesql:%s:*:get' + ) new_perms +WHERE sp.value LIKE 'source:%:access'; + +INSERT INTO ${ohdsiSchema}.sec_permission (id, value) +SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'), new_value +FROM temp_migration; + +INSERT INTO ${ohdsiSchema}.sec_role_permission (id,role_id, permission_id) +SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), + srp.role_id, + sp.id as permission_id +FROM temp_migration m +JOIN ${ohdsiSchema}.sec_permission sp on m.new_value = sp.value +JOIN ${ohdsiSchema}.sec_role_permission srp on m.from_perm_id = srp.permission_id; + +drop table temp_migration; diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190215113000__permissions-fixes-4.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190215113000__permissions-fixes-4.sql new file mode 100644 index 0000000000..8dac4b7f2f --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190215113000__permissions-fixes-4.sql @@ -0,0 +1,38 @@ +CREATE TEMP TABLE temp_migration ( + from_perm_id int, + new_value character varying(255) +); + +INSERT INTO temp_migration (from_perm_id, new_value) +SELECT sp.id as from_id, + REPLACE(CAST(new_perms.val AS VARCHAR(255)), '%s', REPLACE(REPLACE(value, 'source:', ''), ':access', '')) as new_value +FROM ${ohdsiSchema}.sec_permission sp +CROSS JOIN ( + SELECT 'vocabulary:%s:search:post' val +) new_perms +WHERE sp.value LIKE 'source:%:access'; + +INSERT INTO ${ohdsiSchema}.sec_permission (id, value) +SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'), new_value +FROM temp_migration; + +INSERT INTO ${ohdsiSchema}.sec_role_permission (id,role_id, permission_id) +SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), + srp.role_id, + sp.id as permission_id +FROM temp_migration m +JOIN ${ohdsiSchema}.sec_permission sp on m.new_value = sp.value +JOIN ${ohdsiSchema}.sec_role_permission srp on m.from_perm_id = srp.permission_id; + +drop table temp_migration; + +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) + VALUES (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohortdefinition:*:check:post', 'Fix cohort definition'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id) +SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr +WHERE sp.value IN ( + 'cohortdefinition:*:check:post' +) +AND sr.name IN ('Atlas users'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190220113500__permissions-fixes-ir-profile.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190220113500__permissions-fixes-ir-profile.sql new file mode 100644 index 0000000000..4dbb182a67 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190220113500__permissions-fixes-ir-profile.sql @@ -0,0 +1,48 @@ +DELETE FROM ${ohdsiSchema}.sec_role_permission +WHERE permission_id IN (SELECT id FROM ${ohdsiSchema}.sec_permission WHERE value NOT IN ('user:me:get')) +AND role_id = (SELECT id FROM ${ohdsiSchema}.sec_role WHERE name = 'public'); + +DELETE FROM ${ohdsiSchema}.sec_role_permission +WHERE permission_id IN (SELECT id FROM ${ohdsiSchema}.sec_permission WHERE value IN ('ir:*:execute:*:get', 'ir:*:execute:*:delete', '*:person:*:get')) +AND role_id = (SELECT id FROM ${ohdsiSchema}.sec_role WHERE name = 'Atlas users'); + +CREATE TEMP TABLE temp_migration ( + from_perm_id int, + new_value character varying(255) +); + +INSERT INTO temp_migration (from_perm_id, new_value) +SELECT sp.id as from_id, + REPLACE(CAST(new_perms.val AS VARCHAR(255)), '%s', REPLACE(REPLACE(value, 'source:', ''), ':access', '')) as new_value +FROM ${ohdsiSchema}.sec_permission sp +CROSS JOIN ( + SELECT 'ir:*:execute:%s:get' val + UNION ALL + SELECT 'ir:*:execute:%s:delete' + UNION ALL + SELECT '%s:person:*:get' +) new_perms +WHERE sp.value LIKE 'source:%:access'; + +INSERT INTO ${ohdsiSchema}.sec_permission (id, value) +SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'), new_value +FROM temp_migration; + +INSERT INTO ${ohdsiSchema}.sec_role_permission (id,role_id, permission_id) +SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), + srp.role_id, + sp.id as permission_id +FROM temp_migration m +JOIN ${ohdsiSchema}.sec_permission sp on m.new_value = sp.value +JOIN ${ohdsiSchema}.sec_role_permission srp on m.from_perm_id = srp.permission_id; + +drop table temp_migration; + +-- Allow Atlas users to see list of sources +INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id) +SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr +WHERE sp.value IN ( + 'configuration:edit:ui' +) +AND sr.name IN ('Atlas users'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190222113000__permissions-fixes-source-codes-import.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190222113000__permissions-fixes-source-codes-import.sql new file mode 100644 index 0000000000..78654307e7 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190222113000__permissions-fixes-source-codes-import.sql @@ -0,0 +1,27 @@ +CREATE TEMP TABLE temp_migration ( + from_perm_id int, + new_value character varying(255) +); + +INSERT INTO temp_migration (from_perm_id, new_value) +SELECT sp.id as from_id, + REPLACE(CAST(new_perms.val AS VARCHAR(255)), '%s', REPLACE(REPLACE(value, 'source:', ''), ':access', '')) as new_value +FROM ${ohdsiSchema}.sec_permission sp +CROSS JOIN ( + SELECT 'vocabulary:%s:lookup:sourcecodes:post' val +) new_perms +WHERE sp.value LIKE 'source:%:access'; + +INSERT INTO ${ohdsiSchema}.sec_permission (id, value) +SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'), new_value +FROM temp_migration; + +INSERT INTO ${ohdsiSchema}.sec_role_permission (id,role_id, permission_id) +SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), + srp.role_id, + sp.id as permission_id +FROM temp_migration m +JOIN ${ohdsiSchema}.sec_permission sp on m.new_value = sp.value +JOIN ${ohdsiSchema}.sec_role_permission srp on m.from_perm_id = srp.permission_id; + +drop table temp_migration; diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190222154724__permission-fixes-conceptsets.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190222154724__permission-fixes-conceptsets.sql new file mode 100644 index 0000000000..ba6a66e8aa --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190222154724__permission-fixes-conceptsets.sql @@ -0,0 +1,9 @@ +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'conceptset:*:export:get', 'Export ConceptSet'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id) + SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr +WHERE sp.value IN ( + 'conceptset:*:export:get' +) AND sr.name IN ('Atlas users'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190225165203__plp_gen_view.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190225165203__plp_gen_view.sql new file mode 100644 index 0000000000..257c6df37d --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190225165203__plp_gen_view.sql @@ -0,0 +1,22 @@ +CREATE OR REPLACE VIEW ${ohdsiSchema}.prediction_analysis_generation as + SELECT + job.job_execution_id id, + job.create_time start_time, + job.end_time end_time, + job.status status, + job.exit_message exit_message, + CAST(plp_id_param.string_val AS INTEGER) prediction_id, + CAST(source_param.string_val AS INTEGER) source_id, + passwd_param.string_val update_password, + -- Generation info based + gen_info.design design, + gen_info.hash_code hash_code, + gen_info.created_by_id created_by_id, + -- Execution info based + exec_info.id analysis_execution_id + FROM ${ohdsiSchema}.batch_job_execution job + JOIN ${ohdsiSchema}.batch_job_execution_params plp_id_param ON job.job_execution_id = plp_id_param.job_execution_id AND plp_id_param.key_name = 'prediction_analysis_id' + JOIN ${ohdsiSchema}.batch_job_execution_params source_param ON job.job_execution_id = source_param.job_execution_id AND source_param.key_name = 'source_id' + JOIN ${ohdsiSchema}.batch_job_execution_params passwd_param ON job.job_execution_id = passwd_param.job_execution_id AND passwd_param.key_name = 'update_password' + LEFT JOIN ${ohdsiSchema}.ee_analysis_status exec_info ON job.job_execution_id = exec_info.job_execution_id + LEFT JOIN ${ohdsiSchema}.analysis_generation_info gen_info ON job.job_execution_id = gen_info.job_execution_id; diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190225165752__estimation_gen_view.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190225165752__estimation_gen_view.sql new file mode 100644 index 0000000000..c297443f80 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190225165752__estimation_gen_view.sql @@ -0,0 +1,22 @@ +CREATE OR REPLACE VIEW ${ohdsiSchema}.estimation_analysis_generation as + SELECT + job.job_execution_id id, + job.create_time start_time, + job.end_time end_time, + job.status status, + job.exit_message exit_message, + CAST(estimation_id_param.string_val AS INTEGER) estimation_id, + CAST(source_param.string_val AS INTEGER) source_id, + passwd_param.string_val update_password, + -- Generation info based + gen_info.design design, + gen_info.hash_code hash_code, + gen_info.created_by_id created_by_id, + -- Execution info based + exec_info.id analysis_execution_id + FROM ${ohdsiSchema}.batch_job_execution job + JOIN ${ohdsiSchema}.batch_job_execution_params estimation_id_param ON job.job_execution_id = estimation_id_param.job_execution_id AND estimation_id_param.key_name = 'estimation_analysis_id' + JOIN ${ohdsiSchema}.batch_job_execution_params source_param ON job.job_execution_id = source_param.job_execution_id AND source_param.key_name = 'source_id' + JOIN ${ohdsiSchema}.batch_job_execution_params passwd_param ON job.job_execution_id = passwd_param.job_execution_id AND passwd_param.key_name = 'update_password' + LEFT JOIN ${ohdsiSchema}.ee_analysis_status exec_info ON job.job_execution_id = exec_info.job_execution_id + LEFT JOIN ${ohdsiSchema}.analysis_generation_info gen_info ON job.job_execution_id = gen_info.job_execution_id; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190226160020__permissions-fixes-cohort-export-conceptsets.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190226160020__permissions-fixes-cohort-export-conceptsets.sql new file mode 100644 index 0000000000..b802bf007e --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190226160020__permissions-fixes-cohort-export-conceptsets.sql @@ -0,0 +1,9 @@ +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohortdefinition:*:export:conceptset:get', 'Export ConceptSet'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id) + SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr +WHERE sp.value IN ( + 'cohortdefinition:*:export:conceptset:get' +) AND sr.name IN ('Atlas users'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190228160000__permissions-fixes-cc-explore.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190228160000__permissions-fixes-cc-explore.sql new file mode 100644 index 0000000000..849fbae772 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190228160000__permissions-fixes-cc-explore.sql @@ -0,0 +1,9 @@ +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohort-characterization:generation:*:explore:prevalence:*:*:*:get', 'Explore covariate in Cohort Characterization'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id) + SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr +WHERE sp.value IN ( + 'cohort-characterization:generation:*:explore:prevalence:*:*:*:get' +) AND sr.name IN ('Atlas users'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190301130000__cc-unique-stratas.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190301130000__cc-unique-stratas.sql new file mode 100644 index 0000000000..19e0b8dbaa --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190301130000__cc-unique-stratas.sql @@ -0,0 +1 @@ +ALTER TABLE ${ohdsiSchema}.cc_strata ADD CONSTRAINT cc_strata_name_uq UNIQUE (cohort_characterization_id, name); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190304084500__plp-ple-import.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190304084500__plp-ple-import.sql new file mode 100644 index 0000000000..1fa5bb7f5a --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190304084500__plp-ple-import.sql @@ -0,0 +1,11 @@ +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'prediction:import:post', 'Import PLP analyses'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'estimation:import:post', 'Import PLE analyses'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id) + SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr +WHERE sp.value IN ( + 'prediction:import:post', + 'estimation:import:post' +) AND sr.name IN ('Atlas users'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190304131519__standardize-permissions-cc.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190304131519__standardize-permissions-cc.sql new file mode 100644 index 0000000000..1d7b6ff3e2 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190304131519__standardize-permissions-cc.sql @@ -0,0 +1,37 @@ +delete from ${ohdsiSchema}.sec_role_permission where + permission_id in (select id from ${ohdsiSchema}.sec_permission where + value like 'cohort-characterization:%:generation:*:post' or + value like 'cohort-characterization:%:generation:*:delete' + ); +delete from ${ohdsiSchema}.sec_permission where + value like 'cohort-characterization:%:generation:*:post' or + value like 'cohort-characterization:%:generation:*:delete'; + +CREATE TEMP TABLE temp_migration ( + from_perm_id int, + new_value character varying(255) +); + +INSERT INTO temp_migration (from_perm_id, new_value) +SELECT sp.id as from_id, + REPLACE(CAST(new_perms.val AS VARCHAR(255)), '%s', REPLACE(REPLACE(value, 'source:', ''), ':access', '')) as new_value +FROM ${ohdsiSchema}.sec_permission sp +CROSS JOIN ( + SELECT 'cohort-characterization:*:generation:%s:post' val UNION ALL + SELECT 'cohort-characterization:*:generation:%s:delete' val +) new_perms +WHERE sp.value LIKE 'source:%:access'; + +INSERT INTO ${ohdsiSchema}.sec_permission (id, value) +SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'), new_value +FROM temp_migration; + +INSERT INTO ${ohdsiSchema}.sec_role_permission (id,role_id, permission_id) +SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), + srp.role_id, + sp.id as permission_id +FROM temp_migration m +JOIN ${ohdsiSchema}.sec_permission sp on m.new_value = sp.value +JOIN ${ohdsiSchema}.sec_role_permission srp on m.from_perm_id = srp.permission_id; + +drop table temp_migration; diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190304160255__standardize-permissions-pathways.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190304160255__standardize-permissions-pathways.sql new file mode 100644 index 0000000000..fee3a7001b --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190304160255__standardize-permissions-pathways.sql @@ -0,0 +1,37 @@ +delete from ${ohdsiSchema}.sec_role_permission where + permission_id in (select id from ${ohdsiSchema}.sec_permission where + value like 'pathway-analysis:%:generation:*:post' or + value like 'pathway-analysis:%:generation:*:delete' + ); +delete from ${ohdsiSchema}.sec_permission where + value like 'pathway-analysis:%:generation:*:post' or + value like 'pathway-analysis:%:generation:*:delete'; + +CREATE TEMP TABLE temp_migration ( + from_perm_id int, + new_value character varying(255) +); + +INSERT INTO temp_migration (from_perm_id, new_value) +SELECT sp.id as from_id, + REPLACE(CAST(new_perms.val AS VARCHAR(255)), '%s', REPLACE(REPLACE(value, 'source:', ''), ':access', '')) as new_value +FROM ${ohdsiSchema}.sec_permission sp +CROSS JOIN ( + SELECT 'pathway-analysis:*:generation:%s:post' val UNION ALL + SELECT 'pathway-analysis:*:generation:%s:delete' val +) new_perms +WHERE sp.value LIKE 'source:%:access'; + +INSERT INTO ${ohdsiSchema}.sec_permission (id, value) +SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'), new_value +FROM temp_migration; + +INSERT INTO ${ohdsiSchema}.sec_role_permission (id,role_id, permission_id) +SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), + srp.role_id, + sp.id as permission_id +FROM temp_migration m +JOIN ${ohdsiSchema}.sec_permission sp on m.new_value = sp.value +JOIN ${ohdsiSchema}.sec_role_permission srp on m.from_perm_id = srp.permission_id; + +drop table temp_migration; diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190304162609__standardize-permissions-ir.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190304162609__standardize-permissions-ir.sql new file mode 100644 index 0000000000..9a41775548 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190304162609__standardize-permissions-ir.sql @@ -0,0 +1,54 @@ +delete from ${ohdsiSchema}.sec_role_permission where + permission_id in (select id from ${ohdsiSchema}.sec_permission where value in ('ir:*:delete')) + AND role_id = (SELECT id FROM ${ohdsiSchema}.sec_role WHERE name = 'Atlas users'); + +delete from ${ohdsiSchema}.sec_role_permission where + permission_id in (select id from ${ohdsiSchema}.sec_permission where value in ('ir:*:info')); + +delete from ${ohdsiSchema}.sec_permission where + value in ('ir:*:info'); + +insert into ${ohdsiSchema}.sec_permission(id, value, description) + values + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'ir:*:info:get', 'Get IR info'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id) + SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr + WHERE sp.value IN ( + 'ir:*:info:get' + ) + AND sr.name IN ('Atlas users'); + + +delete from ${ohdsiSchema}.sec_role_permission where + permission_id in (select id from ${ohdsiSchema}.sec_permission where value like 'ir:%:report:*:get'); +delete from ${ohdsiSchema}.sec_permission where value like 'ir:%:report:*:get'; + +CREATE TEMP TABLE temp_migration ( + from_perm_id int, + new_value character varying(255) +); + +INSERT INTO temp_migration (from_perm_id, new_value) +SELECT sp.id as from_id, + REPLACE(CAST(new_perms.val AS VARCHAR(255)), '%s', REPLACE(REPLACE(value, 'source:', ''), ':access', '')) as new_value +FROM ${ohdsiSchema}.sec_permission sp +CROSS JOIN ( + SELECT 'ir:*:report:%s:get' val +) new_perms +WHERE sp.value LIKE 'source:%:access'; + +INSERT INTO ${ohdsiSchema}.sec_permission (id, value) +SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'), new_value +FROM temp_migration; + +INSERT INTO ${ohdsiSchema}.sec_role_permission (id,role_id, permission_id) +SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), + srp.role_id, + sp.id as permission_id +FROM temp_migration m +JOIN ${ohdsiSchema}.sec_permission sp on m.new_value = sp.value +JOIN ${ohdsiSchema}.sec_role_permission srp on m.from_perm_id = srp.permission_id; + +drop table temp_migration; diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190304213000__standardize-permissions-cohorts.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190304213000__standardize-permissions-cohorts.sql new file mode 100644 index 0000000000..e8f6361f99 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190304213000__standardize-permissions-cohorts.sql @@ -0,0 +1,10 @@ +delete from ${ohdsiSchema}.sec_role_permission where + permission_id in (select id from ${ohdsiSchema}.sec_permission where value in ('cohortdefinition:*:put', 'cohortdefinition:*:delete')) + AND role_id = (SELECT id FROM ${ohdsiSchema}.sec_role WHERE name = 'Atlas users'); + +delete from ${ohdsiSchema}.sec_role_permission where + permission_id in (select id from ${ohdsiSchema}.sec_permission where value in ('cohortdefinition:delete')); + +-- Dummy permission: there is no such endpoint +delete from ${ohdsiSchema}.sec_permission where + value in ('cohortdefinition:delete'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190304220000__standardize-permissions-conceptsets.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190304220000__standardize-permissions-conceptsets.sql new file mode 100644 index 0000000000..b3103c7982 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190304220000__standardize-permissions-conceptsets.sql @@ -0,0 +1,3 @@ +delete from ${ohdsiSchema}.sec_role_permission where + permission_id in (select id from ${ohdsiSchema}.sec_permission where value in ('conceptset:*:delete', 'conceptset:*:put', 'conceptset:*:items:put')) + AND role_id = (SELECT id FROM ${ohdsiSchema}.sec_role WHERE name = 'Atlas users'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190304220500__role-moderator.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190304220500__role-moderator.sql new file mode 100644 index 0000000000..705eec371d --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190304220500__role-moderator.sql @@ -0,0 +1,36 @@ +INSERT INTO ${ohdsiSchema}.sec_role(id, name, system_role) VALUES + (nextval('${ohdsiSchema}.sec_role_sequence'), 'Moderator', TRUE); + +insert into ${ohdsiSchema}.sec_permission(id, value, description) +values + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohort-characterization:*:put', 'Edit any Cohort Characterization'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohort-characterization:*:delete', 'Delete any Cohort Characterization'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'pathway-analysis:*:put', 'Edit any Pathways analysis'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'pathway-analysis:*:delete', 'Delete any Pathways analysis'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'ir:*:put', 'Edit any IR analysis'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'ir:*:info:*:delete', 'Delete any IR analysis'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'estimation:*:put', 'Edit any Estimation analysis'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'estimation:*:delete', 'Delete any Estimation analysis'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'prediction:*:put', 'Edit any Prediction analysis'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'prediction:*:delete', 'Delete any Prediction analysis'); + +-- 16 perms +INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id) + SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr +WHERE sp.value IN ( + -- All Concept Sets edit and delete + 'conceptset:*:delete', 'conceptset:*:put', 'conceptset:*:items:put', + -- All Cohort Definitions edit and delete + 'cohortdefinition:*:put', 'cohortdefinition:*:delete', + -- All CC edit and delete + 'cohort-characterization:*:put', 'cohort-characterization:*:delete', + -- All Pathways edit and delete + 'pathway-analysis:*:put', 'pathway-analysis:*:delete', + -- All IRs edit and delete + 'ir:*:put', 'ir:*:delete', 'ir:*:info:*:delete', + -- All Estimation analyses edit and delete + 'estimation:*:put', 'estimation:*:delete', + -- All Prediction analyses edit and delete + 'prediction:*:put', 'prediction:*:delete' +) AND sr.name IN ('Moderator'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190305123620__ir-executioninfo-permissions.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190305123620__ir-executioninfo-permissions.sql new file mode 100644 index 0000000000..8c8c411d29 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190305123620__ir-executioninfo-permissions.sql @@ -0,0 +1,27 @@ +CREATE TEMP TABLE temp_migration ( + from_perm_id int, + new_value character varying(255) +); + +INSERT INTO temp_migration (from_perm_id, new_value) +SELECT sp.id as from_id, + REPLACE(CAST(new_perms.val AS VARCHAR(255)), '%s', REPLACE(REPLACE(value, 'source:', ''), ':access', '')) as new_value +FROM ${ohdsiSchema}.sec_permission sp +CROSS JOIN ( + SELECT 'ir:*:info:%s:get' val +) new_perms +WHERE sp.value LIKE 'source:%:access'; + +INSERT INTO ${ohdsiSchema}.sec_permission (id, value) +SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'), new_value +FROM temp_migration; + +INSERT INTO ${ohdsiSchema}.sec_role_permission (id,role_id, permission_id) +SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), + srp.role_id, + sp.id as permission_id +FROM temp_migration m +JOIN ${ohdsiSchema}.sec_permission sp on m.new_value = sp.value +JOIN ${ohdsiSchema}.sec_role_permission srp on m.from_perm_id = srp.permission_id; + +drop table temp_migration; diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190306094500__sources-endpoint-permission.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190306094500__sources-endpoint-permission.sql new file mode 100644 index 0000000000..fdce39a2c7 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190306094500__sources-endpoint-permission.sql @@ -0,0 +1,5 @@ +INSERT INTO ${ohdsiSchema}.sec_role_permission (id, permission_id, role_id) +SELECT + nextval('${ohdsiSchema}.sec_role_permission_sequence'), + (select id from ${ohdsiSchema}.sec_permission where value in ('source:*:get')) permission_id, + (SELECT id FROM ${ohdsiSchema}.sec_role WHERE name = 'public') role_id; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190306154500__rename_heracles_seq.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190306154500__rename_heracles_seq.sql new file mode 100644 index 0000000000..0d55024ca6 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190306154500__rename_heracles_seq.sql @@ -0,0 +1 @@ +ALTER SEQUENCE ${ohdsiSchema}.heracles_visualization_data_sequence RENAME TO heracles_vis_data_sequence; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190311152238__permissions-fixes-ir-sql.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190311152238__permissions-fixes-ir-sql.sql new file mode 100644 index 0000000000..715f29b0fb --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190311152238__permissions-fixes-ir-sql.sql @@ -0,0 +1,9 @@ +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'ir:sql:post', 'Generate SQL from Incidence Rates expression'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id) + SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr +WHERE sp.value IN ( + 'ir:sql:post' +) AND sr.name IN ('Atlas users'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190311182048__fix_vocab_search_permissions.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190311182048__fix_vocab_search_permissions.sql new file mode 100644 index 0000000000..393d194d45 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190311182048__fix_vocab_search_permissions.sql @@ -0,0 +1,33 @@ +delete from ${ohdsiSchema}.sec_role_permission + where permission_id in (select id from ${ohdsiSchema}.sec_permission where value = 'vocabulary:*:search:*:get'); + +delete from ${ohdsiSchema}.sec_permission + where value = 'vocabulary:*:search:*:get'; + +CREATE TEMP TABLE temp_migration ( + from_perm_id int, + new_value character varying(255) +); + +INSERT INTO temp_migration (from_perm_id, new_value) +SELECT sp.id as from_id, + REPLACE(CAST(new_perms.val AS VARCHAR(255)), '%s', REPLACE(REPLACE(value, 'source:', ''), ':access', '')) as new_value +FROM ${ohdsiSchema}.sec_permission sp +CROSS JOIN ( + SELECT 'vocabulary:%s:search:*:get' val +) new_perms +WHERE sp.value LIKE 'source:%:access'; + +INSERT INTO ${ohdsiSchema}.sec_permission (id, value) +SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'), new_value +FROM temp_migration; + +INSERT INTO ${ohdsiSchema}.sec_role_permission (id,role_id, permission_id) +SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), + srp.role_id, + sp.id as permission_id +FROM temp_migration m +JOIN ${ohdsiSchema}.sec_permission sp on m.new_value = sp.value +JOIN ${ohdsiSchema}.sec_role_permission srp on m.from_perm_id = srp.permission_id; + +drop table temp_migration; diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190312164953__fix_permission_id_seq_value.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190312164953__fix_permission_id_seq_value.sql new file mode 100644 index 0000000000..af9a9722df --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190312164953__fix_permission_id_seq_value.sql @@ -0,0 +1,2 @@ +-- Updates sec_permission_id_seq to maximum identity + 1 +select setval('${ohdsiSchema}.sec_permission_id_seq', (select max(id) + 1 from ${ohdsiSchema}.sec_permission)); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190313161353__fix_permission_heracles.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190313161353__fix_permission_heracles.sql new file mode 100644 index 0000000000..e25f743021 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190313161353__fix_permission_heracles.sql @@ -0,0 +1,19 @@ +ALTER TABLE ${ohdsiSchema}.sec_permission ADD role_id_tmp INTEGER; + +INSERT INTO ${ohdsiSchema}.sec_permission (id, value, description, role_id_tmp) +SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'), REPLACE(new_perms.val, '%s', REPLACE(REPLACE(value, 'source:', ''), ':access', '')), REPLACE(new_perms.descr, '%s', REPLACE(REPLACE(value, 'source:', ''), ':access', '')), role_id + FROM ${ohdsiSchema}.sec_permission sp + JOIN ${ohdsiSchema}.sec_role_permission srp on sp.id = srp.permission_id + CROSS JOIN ( + SELECT 'cohortresults:%s:*:healthcareutilization:*:*:get' val, 'Get cohort results baseline on period for Source with SourceKey = %s' descr + UNION ALL + SELECT 'cohortresults:%s:*:healthcareutilization:*:*:*:get', 'Get cohort results baseline on occurrence for Source with SourceKey = %s' + ) new_perms +WHERE sp.value LIKE 'source:%:access'; + +INSERT INTO ${ohdsiSchema}.sec_role_permission (id, role_id, permission_id) +SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sp.role_id_tmp, sp.id +FROM ${ohdsiSchema}.sec_permission sp +WHERE sp.role_id_tmp IS NOT NULL; + +ALTER TABLE ${ohdsiSchema}.sec_permission DROP COLUMN role_id_tmp; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190314171003__alter-ir-execution-status-as-string.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190314171003__alter-ir-execution-status-as-string.sql new file mode 100644 index 0000000000..497968ef0f --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190314171003__alter-ir-execution-status-as-string.sql @@ -0,0 +1,12 @@ +ALTER TABLE ${ohdsiSchema}.ir_execution RENAME status TO int_status; + +ALTER TABLE ${ohdsiSchema}.ir_execution ADD status VARCHAR(128); + +UPDATE ${ohdsiSchema}.ir_execution SET status = CASE int_status + WHEN -1 THEN 'ERROR' + WHEN 0 THEN 'PENDING' + WHEN 1 THEN 'RUNNING' + WHEN 2 THEN 'COMPLETE' + END; + +ALTER TABLE ${ohdsiSchema}.ir_execution DROP COLUMN int_status; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.0.20190328172203__added-ir-report-perms-for-sources.sql b/src/main/resources/db/migration/postgresql/V2.7.0.20190328172203__added-ir-report-perms-for-sources.sql new file mode 100644 index 0000000000..1e0d46608a --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.0.20190328172203__added-ir-report-perms-for-sources.sql @@ -0,0 +1,21 @@ +ALTER TABLE ${ohdsiSchema}.sec_permission ADD role_id_tmp INTEGER; + +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description, role_id_tmp) + SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'), REPLACE(CAST(new_perms.val AS VARCHAR), '%s', REPLACE(REPLACE(sp.value, 'source:', ''), ':access', '')), + REPLACE(CAST(new_perms.descr AS VARCHAR), '%s', REPLACE(REPLACE(sp.value, 'source:', ''), ':access', '')) description, srp.role_id + FROM ${ohdsiSchema}.sec_permission sp + JOIN ${ohdsiSchema}.sec_role_permission srp on sp.id = srp.permission_id + CROSS JOIN ( + SELECT 'ir:*:report:%s:get' val, 'Get IR generation report with SourceKey = %s' descr + ) new_perms + WHERE sp.value LIKE 'source:%:access' + AND NOT EXISTS(SELECT tsp.id FROM ${ohdsiSchema}.sec_permission tsp JOIN ${ohdsiSchema}.sec_role_permission tsrp ON tsrp.permission_id = tsp.id + AND tsp.value = REPLACE(CAST(new_perms.val AS VARCHAR), '%s', REPLACE(REPLACE(sp.value, 'source:', ''), ':access', ''))); + +INSERT INTO ${ohdsiSchema}.sec_role_permission (id, role_id, permission_id) +SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sp.role_id_tmp, sp.id +FROM ${ohdsiSchema}.sec_permission sp +WHERE sp.role_id_tmp IS NOT NULL; + +ALTER TABLE ${ohdsiSchema}.sec_permission + DROP COLUMN role_id_tmp; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.1.20190405124500__split_output_files.sql b/src/main/resources/db/migration/postgresql/V2.7.1.20190405124500__split_output_files.sql new file mode 100644 index 0000000000..27cf784a33 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.1.20190405124500__split_output_files.sql @@ -0,0 +1,16 @@ +CREATE TABLE ${ohdsiSchema}.output_file_contents ( + output_file_id INTEGER, + file_contents BYTEA, + CONSTRAINT output_file_contents_pkey PRIMARY KEY (output_file_id) +); + +ALTER TABLE ${ohdsiSchema}.output_file_contents + ADD CONSTRAINT fk_ofc_of_id FOREIGN KEY (output_file_id) + REFERENCES ${ohdsiSchema}.output_files (id) + ON UPDATE NO ACTION ON DELETE CASCADE; + +INSERT INTO ${ohdsiSchema}.output_file_contents (output_file_id, file_contents) +SELECT id, file_contents +FROM ${ohdsiSchema}.output_files; + +ALTER TABLE ${ohdsiSchema}.output_files DROP COLUMN file_contents; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.2.20190429174506__run-as_permission.sql b/src/main/resources/db/migration/postgresql/V2.7.2.20190429174506__run-as_permission.sql new file mode 100644 index 0000000000..01cb75b2dc --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.2.20190429174506__run-as_permission.sql @@ -0,0 +1,7 @@ +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) + SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'), 'user:runas:post', 'Sign in as another user'; + +INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id) + SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id + FROM (SELECT id FROM ${ohdsiSchema}.sec_permission WHERE value = 'user:runas:post') sp + CROSS JOIN (SELECT id FROM ${ohdsiSchema}.sec_role WHERE name = 'admin' AND system_role = TRUE) sr; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.2.20190528153600__fix-ir-report-perms.sql b/src/main/resources/db/migration/postgresql/V2.7.2.20190528153600__fix-ir-report-perms.sql new file mode 100644 index 0000000000..a12ed17289 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.2.20190528153600__fix-ir-report-perms.sql @@ -0,0 +1,27 @@ +CREATE TEMP TABLE temp_migration ( + from_perm_id int, + new_value character varying(255) +); + +INSERT INTO temp_migration (from_perm_id, new_value) +SELECT sp.id as from_id, + REPLACE(CAST(new_perms.val AS VARCHAR(255)), '%s', REPLACE(REPLACE(value, 'source:', ''), ':access', '')) as new_value +FROM ${ohdsiSchema}.sec_permission sp +CROSS JOIN ( + SELECT 'ir:%s:info:*:delete' val +) new_perms +WHERE sp.value LIKE 'source:%:access'; + +INSERT INTO ${ohdsiSchema}.sec_permission (id, value) +SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'), new_value +FROM temp_migration; + +INSERT INTO ${ohdsiSchema}.sec_role_permission (id,role_id, permission_id) +SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), + srp.role_id, + sp.id as permission_id +FROM temp_migration m +JOIN ${ohdsiSchema}.sec_permission sp on m.new_value = sp.value +JOIN ${ohdsiSchema}.sec_role_permission srp on m.from_perm_id = srp.permission_id; + +drop table temp_migration; diff --git a/src/main/resources/db/migration/postgresql/V2.7.4.20190905163100__cache-clear-permission.sql b/src/main/resources/db/migration/postgresql/V2.7.4.20190905163100__cache-clear-permission.sql new file mode 100644 index 0000000000..fcdef39a9d --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.4.20190905163100__cache-clear-permission.sql @@ -0,0 +1,7 @@ +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) + SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cache:clear:get', 'Clear middle-tier caches'; + +INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id) + SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id + FROM (SELECT id FROM ${ohdsiSchema}.sec_permission WHERE value = 'cache:clear:get') sp + CROSS JOIN (SELECT id FROM ${ohdsiSchema}.sec_role WHERE name = 'admin' AND system_role = TRUE) sr; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.7.8.20200728164800__add_conceptset_permission.sql b/src/main/resources/db/migration/postgresql/V2.7.8.20200728164800__add_conceptset_permission.sql new file mode 100644 index 0000000000..801a41a85d --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.7.8.20200728164800__add_conceptset_permission.sql @@ -0,0 +1,11 @@ +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) +VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'conceptset:*:expression:*:get', 'Resolve concept set expression with data source'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(role_id, permission_id) +SELECT sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr +WHERE sp.value IN ( + 'conceptset:*:expression:*:get' +) +AND sr.name IN ('Atlas users'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.8.0.20190319114500__delete_design_column_from_views.sql b/src/main/resources/db/migration/postgresql/V2.8.0.20190319114500__delete_design_column_from_views.sql new file mode 100644 index 0000000000..771040ff0e --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.0.20190319114500__delete_design_column_from_views.sql @@ -0,0 +1,91 @@ +DROP VIEW ${ohdsiSchema}.cc_generation; +CREATE OR REPLACE VIEW ${ohdsiSchema}.cc_generation as ( +SELECT + -- Spring batch based + job.job_execution_id id, + job.create_time start_time, + job.end_time end_time, + job.status status, + job.exit_message exit_message, + CAST(cc_id_param.string_val AS INTEGER) cc_id, + CAST(source_param.string_val AS INTEGER) source_id, + -- Generation info based + gen_info.hash_code hash_code, + gen_info.created_by_id created_by_id +FROM ${ohdsiSchema}.batch_job_execution job + JOIN ${ohdsiSchema}.batch_job_execution_params cc_id_param + ON job.job_execution_id = cc_id_param.job_execution_id AND cc_id_param.key_name = 'cohort_characterization_id' + JOIN ${ohdsiSchema}.batch_job_execution_params source_param + ON job.job_execution_id = source_param.job_execution_id AND source_param.key_name = 'source_id' + LEFT JOIN ${ohdsiSchema}.analysis_generation_info gen_info + ON job.job_execution_id = gen_info.job_execution_id +ORDER BY start_time DESC +); + +DROP VIEW ${ohdsiSchema}.estimation_analysis_generation; +CREATE OR REPLACE VIEW ${ohdsiSchema}.estimation_analysis_generation as + SELECT + job.job_execution_id id, + job.create_time start_time, + job.end_time end_time, + job.status status, + job.exit_message exit_message, + CAST(estimation_id_param.string_val AS INTEGER) estimation_id, + CAST(source_param.string_val AS INTEGER) source_id, + passwd_param.string_val update_password, + -- Generation info based + gen_info.hash_code hash_code, + gen_info.created_by_id created_by_id, + -- Execution info based + exec_info.id analysis_execution_id + FROM ${ohdsiSchema}.batch_job_execution job + JOIN ${ohdsiSchema}.batch_job_execution_params estimation_id_param ON job.job_execution_id = estimation_id_param.job_execution_id AND estimation_id_param.key_name = 'estimation_analysis_id' + JOIN ${ohdsiSchema}.batch_job_execution_params source_param ON job.job_execution_id = source_param.job_execution_id AND source_param.key_name = 'source_id' + JOIN ${ohdsiSchema}.batch_job_execution_params passwd_param ON job.job_execution_id = passwd_param.job_execution_id AND passwd_param.key_name = 'update_password' + LEFT JOIN ${ohdsiSchema}.ee_analysis_status exec_info ON job.job_execution_id = exec_info.job_execution_id + LEFT JOIN ${ohdsiSchema}.analysis_generation_info gen_info ON job.job_execution_id = gen_info.job_execution_id; + +DROP VIEW ${ohdsiSchema}.pathway_analysis_generation; +CREATE OR REPLACE VIEW ${ohdsiSchema}.pathway_analysis_generation as + (SELECT + job.job_execution_id id, + job.create_time start_time, + job.end_time end_time, + job.status status, + job.exit_message exit_message, + CAST(pa_id_param.string_val AS INTEGER) pathway_analysis_id, + CAST(source_param.string_val AS INTEGER) source_id, + -- Generation info based + gen_info.hash_code hash_code, + gen_info.created_by_id created_by_id +FROM ${ohdsiSchema}.batch_job_execution job + JOIN ${ohdsiSchema}.batch_job_execution_params pa_id_param + ON job.job_execution_id = pa_id_param.job_execution_id AND pa_id_param.key_name = 'pathway_analysis_id' + JOIN ${ohdsiSchema}.batch_job_execution_params source_param + ON job.job_execution_id = source_param.job_execution_id AND source_param.key_name = 'source_id' + LEFT JOIN ${ohdsiSchema}.analysis_generation_info gen_info + ON job.job_execution_id = gen_info.job_execution_id +ORDER BY start_time DESC); + +DROP VIEW ${ohdsiSchema}.prediction_analysis_generation; +CREATE OR REPLACE VIEW ${ohdsiSchema}.prediction_analysis_generation as + SELECT + job.job_execution_id id, + job.create_time start_time, + job.end_time end_time, + job.status status, + job.exit_message exit_message, + CAST(plp_id_param.string_val AS INTEGER) prediction_id, + CAST(source_param.string_val AS INTEGER) source_id, + passwd_param.string_val update_password, + -- Generation info based + gen_info.hash_code hash_code, + gen_info.created_by_id created_by_id, + -- Execution info based + exec_info.id analysis_execution_id + FROM ${ohdsiSchema}.batch_job_execution job + JOIN ${ohdsiSchema}.batch_job_execution_params plp_id_param ON job.job_execution_id = plp_id_param.job_execution_id AND plp_id_param.key_name = 'prediction_analysis_id' + JOIN ${ohdsiSchema}.batch_job_execution_params source_param ON job.job_execution_id = source_param.job_execution_id AND source_param.key_name = 'source_id' + JOIN ${ohdsiSchema}.batch_job_execution_params passwd_param ON job.job_execution_id = passwd_param.job_execution_id AND passwd_param.key_name = 'update_password' + LEFT JOIN ${ohdsiSchema}.ee_analysis_status exec_info ON job.job_execution_id = exec_info.job_execution_id + LEFT JOIN ${ohdsiSchema}.analysis_generation_info gen_info ON job.job_execution_id = gen_info.job_execution_id; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.8.0.20190326152000__fix-role-perms.sql b/src/main/resources/db/migration/postgresql/V2.8.0.20190326152000__fix-role-perms.sql new file mode 100644 index 0000000000..b6ed772467 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.0.20190326152000__fix-role-perms.sql @@ -0,0 +1,3 @@ +UPDATE ${ohdsiSchema}.sec_permission +SET value = REPLACE(value, ':post', ':put') +WHERE value LIKE 'role:%:post'; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.8.0.20190326180601__add-cc-download-permission.sql b/src/main/resources/db/migration/postgresql/V2.8.0.20190326180601__add-cc-download-permission.sql new file mode 100644 index 0000000000..3dcaa6f0b3 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.0.20190326180601__add-cc-download-permission.sql @@ -0,0 +1,9 @@ +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohort-characterization:*:download:get', 'Download Cohort Characterization package'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id) + SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr + WHERE sp.value IN ( + 'cohort-characterization:*:download:get' + ) AND sr.name IN ('Atlas users'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.8.0.20190405140828__cc_generation_export_all.sql b/src/main/resources/db/migration/postgresql/V2.8.0.20190405140828__cc_generation_export_all.sql new file mode 100644 index 0000000000..c57baec7f2 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.0.20190405140828__cc_generation_export_all.sql @@ -0,0 +1,17 @@ +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) +VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohort-characterization:generation:*:result:export:post', 'Export all cohort characterization generation results'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohort-characterization:generation:*:result:count:get', 'Get total count of results for this generation'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(role_id, permission_id) +SELECT sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr +WHERE sp."value" IN ( + 'cohort-characterization:generation:*:result:export:post', + 'cohort-characterization:generation:*:result:count:get' +) +AND sr.name IN ('Atlas users'); + +UPDATE ${ohdsiSchema}.sec_permission +SET value = 'cohort-characterization:generation:*:result:post' +WHERE VALUE = 'cohort-characterization:generation:*:result:get' \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.8.0.20190414180601__add-entity-exists-permission.sql b/src/main/resources/db/migration/postgresql/V2.8.0.20190414180601__add-entity-exists-permission.sql new file mode 100644 index 0000000000..12609b735d --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.0.20190414180601__add-entity-exists-permission.sql @@ -0,0 +1,39 @@ +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) +VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'conceptset:*:exists:get', 'Check name uniqueness of concept set'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohortdefinition:*:exists:get', 'Check name uniqueness of cohort definition'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'pathway-analysis:*:exists:get', 'Check name uniqueness of pathway analysis'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohort-characterization:*:exists:get', 'Check name uniqueness of cohort characterization'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'feature-analysis:*:exists:get', 'Check name uniqueness of feature analysis'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'ir:*:exists:get', 'Check name uniqueness of incidence rate'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'prediction:*:exists:get', 'Check name uniqueness of prediction'), + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'estimation:*:exists:get', 'Check name uniqueness of estimation'); + + +INSERT INTO ${ohdsiSchema}.sec_role_permission(role_id, permission_id) +SELECT sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr +WHERE sp.value = 'conceptset:*:exists:get' +AND sr.name IN ('concept set creator', 'Moderator'); + + +INSERT INTO ${ohdsiSchema}.sec_role_permission(role_id, permission_id) +SELECT sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr +WHERE sp.value = 'cohortdefinition:*:exists:get' +AND sr.name IN ('cohort creator', 'Moderator'); + + + +INSERT INTO ${ohdsiSchema}.sec_role_permission(role_id, permission_id) +SELECT sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr +WHERE sp.value IN ( + 'pathway-analysis:*:exists:get', + 'cohort-characterization:*:exists:get', + 'feature-analysis:*:exists:get', + 'ir:*:exists:get', + 'prediction:*:exists:get', + 'estimation:*:exists:get' +) +AND sr.name IN ('Atlas users', 'Moderator'); diff --git a/src/main/resources/db/migration/postgresql/V2.8.0.20190417120100__pathway-analysis-minSegmentLength.sql b/src/main/resources/db/migration/postgresql/V2.8.0.20190417120100__pathway-analysis-minSegmentLength.sql new file mode 100644 index 0000000000..335540998c --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.0.20190417120100__pathway-analysis-minSegmentLength.sql @@ -0,0 +1 @@ +ALTER TABLE ${ohdsiSchema}.pathway_analysis ADD COLUMN min_segment_length INTEGER; diff --git a/src/main/resources/db/migration/postgresql/V2.8.0.20190424140601__add-entity-exists-permission-cohort-concept.sql b/src/main/resources/db/migration/postgresql/V2.8.0.20190424140601__add-entity-exists-permission-cohort-concept.sql new file mode 100644 index 0000000000..55deab6035 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.0.20190424140601__add-entity-exists-permission-cohort-concept.sql @@ -0,0 +1,12 @@ +INSERT INTO ${ohdsiSchema}.sec_role_permission(role_id, permission_id) +SELECT sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr +WHERE sp.value = 'conceptset:*:exists:get' +AND sr.name = 'Atlas users'; + + +INSERT INTO ${ohdsiSchema}.sec_role_permission(role_id, permission_id) +SELECT sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr +WHERE sp.value = 'cohortdefinition:*:exists:get' +AND sr.name = 'Atlas users'; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.8.0.20190424150601__add-unique-name-constraint-to-entities.sql b/src/main/resources/db/migration/postgresql/V2.8.0.20190424150601__add-unique-name-constraint-to-entities.sql new file mode 100644 index 0000000000..01ed236296 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.0.20190424150601__add-unique-name-constraint-to-entities.sql @@ -0,0 +1,82 @@ +CREATE OR REPLACE FUNCTION ${ohdsiSchema}.rename_duplicate_names(name_title VARCHAR(20), id_title VARCHAR(15), + table_title VARCHAR(30), constraint_title VARCHAR(2)) RETURNS VOID + LANGUAGE 'plpgsql' +AS +$$ +DECLARE + duplicate_names VARCHAR(400)[]; + name_repeats INT[]; + amount_of_duplicate_names INT; + amount_of_constraints INT; + constraint_name VARCHAR(100); + all_duplicates INT; + +BEGIN + EXECUTE format('SELECT COUNT(*) + FROM (SELECT %I + FROM %I.%I + GROUP BY %I + HAVING COUNT(*) > 1) as temp;', name_title, '${ohdsiSchema}', table_title, + name_title) INTO all_duplicates; + FOR k IN 0 .. coalesce(all_duplicates, 0) + LOOP + EXECUTE format('SELECT ARRAY(SELECT %I + FROM %I.%I + GROUP BY %I + HAVING COUNT(*) > 1)', name_title, '${ohdsiSchema}', table_title, + name_title) INTO duplicate_names; + + EXECUTE format('SELECT ARRAY(SELECT COUNT(*) + FROM %I.%I + GROUP BY %I + HAVING COUNT(*) > 1);', '${ohdsiSchema}', table_title, + name_title) INTO name_repeats; + + + amount_of_duplicate_names := (SELECT array_length(duplicate_names, 1)); + + FOR i IN 1 .. coalesce(amount_of_duplicate_names, 0) + LOOP + FOR j IN 1 .. coalesce(name_repeats[i], 0) + LOOP + EXECUTE format('UPDATE %I.%I + SET %I = concat(%I, '' ('', $1, '')'') + WHERE %I = (SELECT %I + FROM %I.%I + WHERE %I = $2 + ORDER BY %I + LIMIT 1);', '${ohdsiSchema}', table_title, name_title, name_title, id_title, + id_title, + '${ohdsiSchema}', table_title, + name_title, id_title) USING j, duplicate_names[i]; + END LOOP; + END LOOP; + END LOOP; + + constraint_name := concat('uq_', constraint_title, '_name'); + + EXECUTE format('SELECT COUNT(*) + FROM information_schema.table_constraints + WHERE constraint_schema = ''%I'' + AND constraint_name = ''%I'' + AND table_name = ''%I''', '${ohdsiSchema}', constraint_name, + table_title) INTO amount_of_constraints; + + IF amount_of_constraints = 0 THEN + EXECUTE format('ALTER TABLE %I.%I + ADD CONSTRAINT %I UNIQUE (%I);', '${ohdsiSchema}', table_title, constraint_name, + name_title); + END IF; +END; +$$; + +SELECT ${ohdsiSchema}.rename_duplicate_names('concept_set_name', 'concept_set_id', 'concept_set', 'cs'); +SELECT ${ohdsiSchema}.rename_duplicate_names('name', 'id', 'cohort_definition', 'cd'); +SELECT ${ohdsiSchema}.rename_duplicate_names('name', 'id', 'cohort_characterization', 'cc'); +SELECT ${ohdsiSchema}.rename_duplicate_names('name', 'id', 'fe_analysis', 'fe'); +SELECT ${ohdsiSchema}.rename_duplicate_names('name', 'id', 'pathway_analysis', 'pw'); +SELECT ${ohdsiSchema}.rename_duplicate_names('name', 'id', 'ir_analysis', 'ir'); +SELECT ${ohdsiSchema}.rename_duplicate_names('name', 'estimation_id', 'estimation', 'es'); +SELECT ${ohdsiSchema}.rename_duplicate_names('name', 'prediction_id', 'prediction', 'pd'); + +DROP FUNCTION ${ohdsiSchema}.rename_duplicate_names(name_title VARCHAR(20), id_title VARCHAR(15), table_title VARCHAR(30), constraint_title VARCHAR(2)); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.8.0.20190527190601__add_cs_name_copy_permission.sql b/src/main/resources/db/migration/postgresql/V2.8.0.20190527190601__add_cs_name_copy_permission.sql new file mode 100644 index 0000000000..00017d6659 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.0.20190527190601__add_cs_name_copy_permission.sql @@ -0,0 +1,10 @@ +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) +VALUES + (nextval('sec_permission_id_seq'), 'conceptset:*:copy-name:get', 'Get name for copying concept set'); + + +INSERT INTO ${ohdsiSchema}.sec_role_permission(role_id, permission_id) +SELECT sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr +WHERE sp.value = 'conceptset:*:copy-name:get' +AND sr.name IN ('concept set creator', 'Moderator', 'Atlas users'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.8.0.20190531181956__alter_job-execution-params_string-val.sql b/src/main/resources/db/migration/postgresql/V2.8.0.20190531181956__alter_job-execution-params_string-val.sql new file mode 100644 index 0000000000..5f44087c2c --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.0.20190531181956__alter_job-execution-params_string-val.sql @@ -0,0 +1,119 @@ +DROP VIEW ${ohdsiSchema}.cc_generation; +DROP VIEW ${ohdsiSchema}.estimation_analysis_generation; +DROP VIEW ${ohdsiSchema}.pathway_analysis_generation; +DROP VIEW ${ohdsiSchema}.prediction_analysis_generation; +DROP VIEW ${ohdsiSchema}.user_import_job_history; + +ALTER TABLE ${ohdsiSchema}.user_import_job ADD user_roles VARCHAR; + +CREATE OR REPLACE VIEW ${ohdsiSchema}.cc_generation as ( + SELECT + -- Spring batch based + job.job_execution_id id, + job.create_time start_time, + job.end_time end_time, + job.status status, + job.exit_message exit_message, + CAST(cc_id_param.string_val AS INTEGER) cc_id, + CAST(source_param.string_val AS INTEGER) source_id, + -- Generation info based + gen_info.hash_code hash_code, + gen_info.created_by_id created_by_id + FROM ${ohdsiSchema}.batch_job_execution job + JOIN ${ohdsiSchema}.batch_job_execution_params cc_id_param + ON job.job_execution_id = cc_id_param.job_execution_id AND cc_id_param.key_name = 'cohort_characterization_id' + JOIN ${ohdsiSchema}.batch_job_execution_params source_param + ON job.job_execution_id = source_param.job_execution_id AND source_param.key_name = 'source_id' + LEFT JOIN ${ohdsiSchema}.analysis_generation_info gen_info + ON job.job_execution_id = gen_info.job_execution_id + ORDER BY start_time DESC +); + +CREATE OR REPLACE VIEW ${ohdsiSchema}.estimation_analysis_generation as + SELECT + job.job_execution_id id, + job.create_time start_time, + job.end_time end_time, + job.status status, + job.exit_message exit_message, + CAST(estimation_id_param.string_val AS INTEGER) estimation_id, + CAST(source_param.string_val AS INTEGER) source_id, + passwd_param.string_val update_password, + -- Generation info based + gen_info.hash_code hash_code, + gen_info.created_by_id created_by_id, + -- Execution info based + exec_info.id analysis_execution_id + FROM ${ohdsiSchema}.batch_job_execution job + JOIN ${ohdsiSchema}.batch_job_execution_params estimation_id_param ON job.job_execution_id = estimation_id_param.job_execution_id AND estimation_id_param.key_name = 'estimation_analysis_id' + JOIN ${ohdsiSchema}.batch_job_execution_params source_param ON job.job_execution_id = source_param.job_execution_id AND source_param.key_name = 'source_id' + JOIN ${ohdsiSchema}.batch_job_execution_params passwd_param ON job.job_execution_id = passwd_param.job_execution_id AND passwd_param.key_name = 'update_password' + LEFT JOIN ${ohdsiSchema}.ee_analysis_status exec_info ON job.job_execution_id = exec_info.job_execution_id + LEFT JOIN ${ohdsiSchema}.analysis_generation_info gen_info ON job.job_execution_id = gen_info.job_execution_id; + +CREATE OR REPLACE VIEW ${ohdsiSchema}.pathway_analysis_generation as + (SELECT + job.job_execution_id id, + job.create_time start_time, + job.end_time end_time, + job.status status, + job.exit_message exit_message, + CAST(pa_id_param.string_val AS INTEGER) pathway_analysis_id, + CAST(source_param.string_val AS INTEGER) source_id, + -- Generation info based + gen_info.hash_code hash_code, + gen_info.created_by_id created_by_id + FROM ${ohdsiSchema}.batch_job_execution job + JOIN ${ohdsiSchema}.batch_job_execution_params pa_id_param + ON job.job_execution_id = pa_id_param.job_execution_id AND pa_id_param.key_name = 'pathway_analysis_id' + JOIN ${ohdsiSchema}.batch_job_execution_params source_param + ON job.job_execution_id = source_param.job_execution_id AND source_param.key_name = 'source_id' + LEFT JOIN ${ohdsiSchema}.analysis_generation_info gen_info + ON job.job_execution_id = gen_info.job_execution_id + ORDER BY start_time DESC); + +CREATE OR REPLACE VIEW ${ohdsiSchema}.prediction_analysis_generation as + SELECT + job.job_execution_id id, + job.create_time start_time, + job.end_time end_time, + job.status status, + job.exit_message exit_message, + CAST(plp_id_param.string_val AS INTEGER) prediction_id, + CAST(source_param.string_val AS INTEGER) source_id, + passwd_param.string_val update_password, + -- Generation info based + gen_info.hash_code hash_code, + gen_info.created_by_id created_by_id, + -- Execution info based + exec_info.id analysis_execution_id + FROM ${ohdsiSchema}.batch_job_execution job + JOIN ${ohdsiSchema}.batch_job_execution_params plp_id_param ON job.job_execution_id = plp_id_param.job_execution_id AND plp_id_param.key_name = 'prediction_analysis_id' + JOIN ${ohdsiSchema}.batch_job_execution_params source_param ON job.job_execution_id = source_param.job_execution_id AND source_param.key_name = 'source_id' + JOIN ${ohdsiSchema}.batch_job_execution_params passwd_param ON job.job_execution_id = passwd_param.job_execution_id AND passwd_param.key_name = 'update_password' + LEFT JOIN ${ohdsiSchema}.ee_analysis_status exec_info ON job.job_execution_id = exec_info.job_execution_id + LEFT JOIN ${ohdsiSchema}.analysis_generation_info gen_info ON job.job_execution_id = gen_info.job_execution_id; + +CREATE OR REPLACE VIEW ${ohdsiSchema}.user_import_job_history + AS + SELECT + job.job_execution_id as id, + job.start_time as start_time, + job.end_time as end_time, + job.status as status, + job.exit_code as exit_code, + job.exit_message as exit_message, + name_param.STRING_VAL as job_name, + author_param.STRING_VAL as author, + CAST(user_import_param.string_val AS INTEGER) user_import_id + FROM + ${ohdsiSchema}.BATCH_JOB_EXECUTION job + JOIN ${ohdsiSchema}.BATCH_JOB_INSTANCE instance ON instance.JOB_INSTANCE_ID = job.JOB_INSTANCE_ID + JOIN ${ohdsiSchema}.batch_job_execution_params name_param + ON job.job_execution_id = name_param.job_execution_id AND name_param.KEY_NAME = 'jobName' + JOIN ${ohdsiSchema}.batch_job_execution_params user_import_param + ON job.job_execution_id = user_import_param.job_execution_id AND user_import_param.key_name = 'user_import_id' + JOIN ${ohdsiSchema}.BATCH_JOB_EXECUTION_PARAMS author_param + ON job.JOB_EXECUTION_ID = author_param.JOB_EXECUTION_ID AND author_param.KEY_NAME = 'jobAuthor' + WHERE + instance.JOB_NAME = 'usersImport'; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.8.0.20190604111801__ir_import_export_permission.sql b/src/main/resources/db/migration/postgresql/V2.8.0.20190604111801__ir_import_export_permission.sql new file mode 100644 index 0000000000..ac4677e4cc --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.0.20190604111801__ir_import_export_permission.sql @@ -0,0 +1,12 @@ +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'ir:design:post', 'Import Incidence Rates design'); +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'ir:*:design:get', 'Export Incidence Rates design'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id) + SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr + WHERE sp.value IN ( + 'ir:design:post', + 'ir:*:design:get' + ) AND sr.name IN ('Atlas users'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.8.0.20190728224300__ds-common-entity.sql b/src/main/resources/db/migration/postgresql/V2.8.0.20190728224300__ds-common-entity.sql new file mode 100644 index 0000000000..2efad0f748 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.0.20190728224300__ds-common-entity.sql @@ -0,0 +1,5 @@ +ALTER TABLE ${ohdsiSchema}.source + ADD COLUMN created_by_id INTEGER REFERENCES ${ohdsiSchema}.sec_user(id), + ADD COLUMN created_date DATE, + ADD COLUMN modified_by_id INTEGER REFERENCES ${ohdsiSchema}.sec_user(id), + ADD COLUMN modified_date DATE; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.8.0.20190809215200__daimon-priority.sql b/src/main/resources/db/migration/postgresql/V2.8.0.20190809215200__daimon-priority.sql new file mode 100644 index 0000000000..8f5033ea40 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.0.20190809215200__daimon-priority.sql @@ -0,0 +1,9 @@ +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'source:daimon:priority:get', 'Get priority of Source Daimons'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id) + SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr + WHERE sp.value IN ( + 'source:daimon:priority:get' + ) AND sr.name IN ('Atlas users'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.8.0.20190816173000__generation-cache.sql b/src/main/resources/db/migration/postgresql/V2.8.0.20190816173000__generation-cache.sql new file mode 100644 index 0000000000..127fa657cc --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.0.20190816173000__generation-cache.sql @@ -0,0 +1,18 @@ +CREATE SEQUENCE ${ohdsiSchema}.generation_cache_sequence; + +CREATE TABLE ${ohdsiSchema}.generation_cache ( + id INTEGER DEFAULT NEXTVAL('generation_cache_sequence'), + type VARCHAR NOT NULL, + design_hash VARCHAR NOT NULL, + source_id INTEGER NOT NULL, + result_identifier INTEGER NOT NULL, + result_checksum VARCHAR, -- can be null in case of empty result set + created_date DATE NOT NULL DEFAULT NOW(), + CONSTRAINT PK_generation_cache PRIMARY KEY (id), + CONSTRAINT FK_gc_source_id_source + FOREIGN KEY (source_id) + REFERENCES ${ohdsiSchema}.source (source_id) +); + +ALTER TABLE ${ohdsiSchema}.generation_cache ADD CONSTRAINT uq_gc_hash UNIQUE (type, design_hash, source_id); +ALTER TABLE ${ohdsiSchema}.generation_cache ADD CONSTRAINT uq_gc_result UNIQUE (type, source_id, result_identifier); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.8.0.20191203200000__generation-cache-updates.sql b/src/main/resources/db/migration/postgresql/V2.8.0.20191203200000__generation-cache-updates.sql new file mode 100644 index 0000000000..5a0e7202b2 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.0.20191203200000__generation-cache-updates.sql @@ -0,0 +1,5 @@ +ALTER TABLE ${ohdsiSchema}.generation_cache DROP CONSTRAINT uq_gc_result; +ALTER TABLE ${ohdsiSchema}.generation_cache DROP COLUMN result_identifier; +ALTER TABLE ${ohdsiSchema}.generation_cache DROP CONSTRAINT uq_gc_hash; +ALTER TABLE ${ohdsiSchema}.generation_cache ALTER COLUMN design_hash TYPE integer USING design_hash::integer; +ALTER TABLE ${ohdsiSchema}.generation_cache ADD CONSTRAINT uq_gc_hash UNIQUE (type, design_hash, source_id); diff --git a/src/main/resources/db/migration/postgresql/V2.8.0.20191219183702__migrate_feature_extraction_id.sql b/src/main/resources/db/migration/postgresql/V2.8.0.20191219183702__migrate_feature_extraction_id.sql new file mode 100644 index 0000000000..3806895e2d --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.0.20191219183702__migrate_feature_extraction_id.sql @@ -0,0 +1,2 @@ +INSERT INTO ${ohdsiSchema}.fe_analysis(type, name, domain, descr, value, design, is_locked, stat_type) + values ('PRESET', 'Hospital Frailty Risk Score', 'CONDITION', 'The Hospital Frailty Risk Score score using all conditions prior to the window end.', null, 'Hfrs', true, 'DISTRIBUTION'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.8.0.20200109100200__cohort_sample_tables.sql b/src/main/resources/db/migration/postgresql/V2.8.0.20200109100200__cohort_sample_tables.sql new file mode 100644 index 0000000000..ebedb5cb72 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.0.20200109100200__cohort_sample_tables.sql @@ -0,0 +1,23 @@ +CREATE SEQUENCE ${ohdsiSchema}.cohort_sample_sequence; + +CREATE TABLE ${ohdsiSchema}.cohort_sample( + id INTEGER PRIMARY KEY NOT NULL, + name VARCHAR(255) NOT NULL, + cohort_definition_id INTEGER NOT NULL, + source_id INTEGER NOT NULL, + size INTEGER NOT NULL, + age_min SMALLINT NULL, + age_max SMALLINT NULL, + age_mode VARCHAR(24), + gender_concept_ids VARCHAR(255) NULL, + created_by_id INTEGER, + created_date TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT (now()), + modified_by_id INTEGER, + modified_date TIMESTAMP WITH TIME ZONE, + CONSTRAINT fk_cohort_sample_definition_id FOREIGN KEY (cohort_definition_id) + REFERENCES ${ohdsiSchema}.cohort_definition (id) ON DELETE CASCADE, + CONSTRAINT fk_cohort_sample_source_id FOREIGN KEY (source_id) + REFERENCES ${ohdsiSchema}.source (source_id) ON DELETE CASCADE +); + +CREATE INDEX idx_cohort_sample_source ON ${ohdsiSchema}.cohort_sample (cohort_definition_id, source_id); diff --git a/src/main/resources/db/migration/postgresql/V2.8.0.20200109132902__fe_domain_null_fix.sql b/src/main/resources/db/migration/postgresql/V2.8.0.20200109132902__fe_domain_null_fix.sql new file mode 100644 index 0000000000..5fec3a4f71 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.0.20200109132902__fe_domain_null_fix.sql @@ -0,0 +1,14 @@ +UPDATE ${ohdsiSchema}.fe_analysis +SET domain = 'MEASUREMENT' +WHERE type = 'PRESET' +AND design IN ( + 'MeasurementRangeGroupShortTerm', + 'MeasurementRangeGroupLongTerm', + 'MeasurementRangeGroupMediumTerm', + 'MeasurementRangeGroupAnyTimePrior', + 'MeasurementValueLongTerm', + 'MeasurementValueShortTerm', + 'MeasurementValueMediumTerm', + 'MeasurementValueAnyTimePrior' +) +; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.8.0.20200122173000__insert_cohort_sample_permissions.sql b/src/main/resources/db/migration/postgresql/V2.8.0.20200122173000__insert_cohort_sample_permissions.sql new file mode 100644 index 0000000000..27a2bc586b --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.0.20200122173000__insert_cohort_sample_permissions.sql @@ -0,0 +1,19 @@ +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES +(nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohortsample:*:*:get', 'List cohort samples'), +(nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohortsample:*:*:*:get', 'Get single cohort samples'), +(nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohortsample:*:*:*:delete', 'Delete cohort sample'), +(nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohortsample:*:*:delete', 'Delete all cohort samples of a cohort.'), +(nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohortsample:*:*:post', 'Create cohort sample'), +(nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohortsample:*:*:*:refresh:post', 'Refresh cohort sample'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id) +SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr +WHERE sp.value IN ( + 'cohortsample:*:*:get', + 'cohortsample:*:*:*:get', + 'cohortsample:*:*:*:delete', + 'cohortsample:*:*:delete', + 'cohortsample:*:*:post', + 'cohortsample:*:*:*:refresh:post' + ) AND sr.name IN ('Atlas users'); diff --git a/src/main/resources/db/migration/postgresql/V2.8.0.20200127101702__restore_rest_endpoint_perms.sql b/src/main/resources/db/migration/postgresql/V2.8.0.20200127101702__restore_rest_endpoint_perms.sql new file mode 100644 index 0000000000..b126f790dd --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.0.20200127101702__restore_rest_endpoint_perms.sql @@ -0,0 +1,17 @@ +INSERT INTO ${ohdsiSchema}.sec_permission (id, value, description) +SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'), 'source:priorityVocabulary:get', 'Get source with highest priority vocabulary daimon' +; + +INSERT INTO ${ohdsiSchema}.sec_permission (id, value, description) +SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohort-characterization:generation:*:result:get', 'Get cohort characterization generation results - 2.7.x compatible' +; + +INSERT INTO ${ohdsiSchema}.sec_role_permission(role_id, permission_id) +SELECT sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr +WHERE sp.value IN ( + 'source:priorityVocabulary:get', + 'cohort-characterization:generation:*:result:get' +) +AND sr.name IN ('Atlas users', 'Moderator') +; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.8.0.20200130124345__fe_analysis_aggregate.sql b/src/main/resources/db/migration/postgresql/V2.8.0.20200130124345__fe_analysis_aggregate.sql new file mode 100644 index 0000000000..e540bc0722 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.0.20200130124345__fe_analysis_aggregate.sql @@ -0,0 +1,96 @@ +CREATE SEQUENCE ${ohdsiSchema}.fe_aggregate_sequence; + +CREATE TABLE ${ohdsiSchema}.fe_analysis_aggregate( + id INTEGER NOT NULL DEFAULT nextval('${ohdsiSchema}.fe_aggregate_sequence'), + name VARCHAR NOT NULL, + domain VARCHAR, + agg_function VARCHAR, + criteria_columns VARCHAR, + expression VARCHAR, + join_table VARCHAR, + join_type VARCHAR, + join_condition VARCHAR, + is_default BOOLEAN DEFAULT FALSE, + missing_means_zero BOOLEAN DEFAULT FALSE, + CONSTRAINT pk_fe_aggregate PRIMARY KEY(id) +); + +ALTER TABLE ${ohdsiSchema}.fe_analysis_criteria ADD fe_aggregate_id INTEGER; + +INSERT INTO ${ohdsiSchema}.fe_analysis_aggregate(name, domain, agg_function, criteria_columns, expression, join_table, join_type, join_condition, is_default, missing_means_zero) VALUES + ('Events count', null, 'COUNT', null, '*', null, null, null, TRUE, TRUE), + ('Distinct start dates', null, 'COUNT', 'START_DATE', 'DISTINCT v.start_date', null, null, null, FALSE, TRUE), + ('Duration', null, null, 'DURATION', 'duration', null, null, null, FALSE, FALSE), + ('Duration (max)', null, 'MAX', 'DURATION', 'duration', null, null, null, FALSE, FALSE), + ('Duration (min)', null, 'MIN', 'DURATION', 'duration', null, null, null, FALSE, FALSE), + ('Duration (average)', null, 'AVG', 'DURATION', 'duration', null, null, null, FALSE, FALSE), + ('Value as number', 'MEASUREMENT', null, 'VALUE_AS_NUMBER', 'value_as_number', null, null, null, FALSE, FALSE), + ('Value as number (max)', 'MEASUREMENT', 'MAX', 'VALUE_AS_NUMBER', 'value_as_number', null, null, null, FALSE, FALSE), + ('Value as number (min)', 'MEASUREMENT', 'MIN', 'VALUE_AS_NUMBER', 'value_as_number', null, null, null, FALSE, FALSE), + ('Value as number (average)', 'MEASUREMENT', 'AVG', 'VALUE_AS_NUMBER', 'value_as_number', null, null, null, FALSE, FALSE), + ('Range high', 'MEASUREMENT', null, 'RANGE_HIGH', 'range_high', null, null, null, FALSE, FALSE), + ('Range high (max)', 'MEASUREMENT', 'MAX', 'RANGE_HIGH', 'range_high', null, null, null, FALSE, FALSE), + ('Range high (min)', 'MEASUREMENT', 'MIN', 'RANGE_HIGH', 'range_high', null, null, null, FALSE, FALSE), + ('Range high (average)', 'MEASUREMENT', 'AVG', 'RANGE_HIGH', 'range_high', null, null, null, FALSE, FALSE), + ('Range low', 'MEASUREMENT', null, 'RANGE_LOW', 'range_low', null, null, null, FALSE, FALSE), + ('Range low (max)', 'MEASUREMENT', 'MAX', 'RANGE_LOW', 'range_low', null, null, null, FALSE, FALSE), + ('Range low (min)', 'MEASUREMENT', 'MIN', 'RANGE_LOW', 'range_low', null, null, null, FALSE, FALSE), + ('Range low (average)', 'MEASUREMENT', 'AVG', 'RANGE_LOW', 'range_low', null, null, null, FALSE, FALSE), + ('Refills', 'DRUG', null, 'REFILLS', 'refills', null, null, null, FALSE, FALSE), + ('Refills (max)', 'DRUG', 'MAX', 'REFILLS', 'refills', null, null, null, FALSE, FALSE), + ('Refills (min)', 'DRUG', 'MIN', 'REFILLS', 'refills', null, null, null, FALSE, FALSE), + ('Refills (average)', 'DRUG', 'AVG', 'REFILLS', 'refills', null, null, null, FALSE, FALSE), + ('Quantity', 'DRUG', null, 'QUANTITY', 'quantity', null, null, null, FALSE, FALSE), + ('Quantity (max)', 'DRUG', 'MAX', 'QUANTITY', 'quantity', null, null, null, FALSE, FALSE), + ('Quantity (min)', 'DRUG', 'MIN', 'QUANTITY', 'quantity', null, null, null, FALSE, FALSE), + ('Quantity (average)', 'DRUG', 'AVG', 'QUANTITY', 'quantity', null, null, null, FALSE, FALSE), + ('Days supply', 'DRUG', null, 'DAYS_SUPPLY', 'days_supply', null, null, null, FALSE, FALSE), + ('Days supply (max)', 'DRUG', 'MAX', 'DAYS_SUPPLY', 'days_supply', null, null, null, FALSE, FALSE), + ('Days supply (min)', 'DRUG', 'MIN', 'DAYS_SUPPLY', 'days_supply', null, null, null, FALSE, FALSE), + ('Days supply (average)', 'DRUG', 'AVG', 'DAYS_SUPPLY', 'days_supply', null, null, null, FALSE, FALSE), + ('Drug exposure count', 'DRUG_ERA', null, 'ERA_OCCURRENCES', 'era_occurrences', null, null, null, FALSE, FALSE), + ('Drug exposure count (max)', 'DRUG_ERA', 'MAX', 'ERA_OCCURRENCES', 'era_occurrences', null, null, null, FALSE, FALSE), + ('Drug exposure count (min)', 'DRUG_ERA', 'MIN', 'ERA_OCCURRENCES', 'era_occurrences', null, null, null, FALSE, FALSE), + ('Drug exposure count (average)', 'DRUG_ERA', 'AVG', 'ERA_OCCURRENCES', 'era_occurrences', null, null, null, FALSE, FALSE), + ('Gap days', 'DRUG_ERA', null, 'GAP_DAYS', 'gap_days', null, null, null, FALSE, FALSE), + ('Gap days (max)', 'DRUG_ERA', 'MAX', 'GAP_DAYS', 'gap_days', null, null, null, FALSE, FALSE), + ('Gap days (min)', 'DRUG_ERA', 'MIN', 'GAP_DAYS', 'gap_days', null, null, null, FALSE, FALSE), + ('Gap days (average)', 'DRUG_ERA', 'AVG', 'GAP_DAYS', 'gap_days', null, null, null, FALSE, FALSE), + ('Condition occurrence count', 'CONDITION_ERA', null, 'ERA_OCCURRENCES', 'era_occurrences', null, null, null, FALSE, FALSE), + ('Condition occurrence count (max)', 'CONDITION_ERA', 'MAX', 'ERA_OCCURRENCES', 'era_occurrences', null, null, null, FALSE, FALSE), + ('Condition occurrence count (min)', 'CONDITION_ERA', 'MIN', 'ERA_OCCURRENCES', 'era_occurrences', null, null, null, FALSE, FALSE), + ('Condition occurrence count (average)', 'CONDITION_ERA', 'AVG', 'ERA_OCCURRENCES', 'era_occurrences', null, null, null, FALSE, FALSE), + ('Value as number', 'OBSERVATION', null, 'VALUE_AS_NUMBER', 'value_as_number', null, null, null, FALSE, FALSE), + ('Value as number (max)', 'OBSERVATION', 'MAX', 'VALUE_AS_NUMBER', 'value_as_number', null, null, null, FALSE, FALSE), + ('Value as number (min)', 'OBSERVATION', 'MIN', 'VALUE_AS_NUMBER', 'value_as_number', null, null, null, FALSE, FALSE), + ('Value as number (average)', 'OBSERVATION', 'AVG', 'VALUE_AS_NUMBER', 'value_as_number', null, null, null, FALSE, FALSE), + ('Quantity', 'PROCEDURE', null, 'QUANTITY', 'quantity', null, null, null, FALSE, FALSE), + ('Quantity (max)', 'PROCEDURE', 'MAX', 'QUANTITY', 'quantity', null, null, null, FALSE, FALSE), + ('Quantity (min)', 'PROCEDURE', 'MIN', 'QUANTITY', 'quantity', null, null, null, FALSE, FALSE), + ('Quantity (average)', 'PROCEDURE', 'AVG', 'QUANTITY', 'quantity', null, null, null, FALSE, FALSE); + +UPDATE + ${ohdsiSchema}.fe_analysis_criteria +SET + fe_aggregate_id = ag.id +FROM + ${ohdsiSchema}.fe_analysis_criteria feac JOIN + ${ohdsiSchema}.fe_analysis fea ON fea.id = feac.fe_analysis_id, + ${ohdsiSchema}.fe_analysis_aggregate ag +WHERE + ag.name = 'Events count' + AND fea.type = 'CRITERIA_SET' + AND fea.stat_type = 'DISTRIBUTION'; + +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) + SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'), 'feature-analysis:aggregates:get', 'List available aggregates for Feature Analyses'; + +INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id) + SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr + WHERE sp.value IN ( + 'feature-analysis:aggregates:get' + ) AND sr.name IN ('Atlas users'); + +ALTER TABLE ${ohdsiSchema}.fe_analysis_criteria ADD CONSTRAINT fk_criteria_aggregate + FOREIGN KEY (fe_aggregate_id) REFERENCES fe_analysis_aggregate(id); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.8.0.20200325145111__check_required_params.sql b/src/main/resources/db/migration/postgresql/V2.8.0.20200325145111__check_required_params.sql new file mode 100644 index 0000000000..b450475897 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.0.20200325145111__check_required_params.sql @@ -0,0 +1,24 @@ +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohort-characterization:check:post', 'Run diagnostics for cohort characterization params'); +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'pathway-analysis:check:post', 'Run diagnostics for pathway params'); +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'ir:check:post', 'Run diagnostics for incident rates'); +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'prediction:check:post', 'Run diagnostics for prediction'); +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'estimation:check:post', 'Run diagnostics for estimation'); +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohortdefinition:check:post', 'Run diagnostics for cohort definition'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id) + SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr + WHERE sp.value IN ( + 'cohort-characterization:check:post', + 'pathway-analysis:check:post', + 'ir:check:post', + 'prediction:check:post', + 'estimation:check:post', + 'cohortdefinition:check:post' + ) AND sr.name IN ('Atlas users'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.8.0.20200409133802__add-ir-permission-to-atlas-user.sql b/src/main/resources/db/migration/postgresql/V2.8.0.20200409133802__add-ir-permission-to-atlas-user.sql new file mode 100644 index 0000000000..16de849a6f --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.0.20200409133802__add-ir-permission-to-atlas-user.sql @@ -0,0 +1,4 @@ +INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id) + SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr +WHERE sp.value ='ir:*:put' AND sr.name = 'Atlas users'; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.8.0.20200413150815__gis_service_api.sql b/src/main/resources/db/migration/postgresql/V2.8.0.20200413150815__gis_service_api.sql new file mode 100644 index 0000000000..154ad31a19 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.0.20200413150815__gis_service_api.sql @@ -0,0 +1,18 @@ +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'gis:cohort:*:bounds:*:get', 'Get gis bounds for source'); +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'gis:cohort:*:clusters:*:get', 'Get gis clusters for source'); +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'gis:cohort:*:density:*:get', 'Get gis density for source'); +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'gis:person:*:bounds:*:get', 'Get bounds for person'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id) + SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr + WHERE sp.value IN ( + 'gis:cohort:*:bounds:*:get', + 'gis:cohort:*:clusters:*:get', + 'gis:cohort:*:density:*:get', + 'gis:person:*:bounds:*:get' + ) AND sr.name IN ('Atlas users'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.8.0.20200427161830__modify_user_login.sql b/src/main/resources/db/migration/postgresql/V2.8.0.20200427161830__modify_user_login.sql new file mode 100644 index 0000000000..a67a56902f --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.0.20200427161830__modify_user_login.sql @@ -0,0 +1,5 @@ +ALTER TABLE ${ohdsiSchema}.sec_user DROP CONSTRAINT sec_user_login_unique; + +ALTER TABLE ${ohdsiSchema}.sec_user ALTER COLUMN login SET DATA TYPE VARCHAR(1024); + +ALTER TABLE ${ohdsiSchema}.sec_user ADD CONSTRAINT sec_user_login_unique UNIQUE (login); diff --git a/src/main/resources/db/migration/postgresql/V2.8.0.20200428122109__add_created_by_to_cohort_generation_info.sql b/src/main/resources/db/migration/postgresql/V2.8.0.20200428122109__add_created_by_to_cohort_generation_info.sql new file mode 100644 index 0000000000..2e1645bcf6 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.0.20200428122109__add_created_by_to_cohort_generation_info.sql @@ -0,0 +1,2 @@ +ALTER TABLE ${ohdsiSchema}.cohort_generation_info + ADD COLUMN created_by_id INTEGER REFERENCES ${ohdsiSchema}.sec_user(id); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.8.0.20200616112935__fe_check_permissions.sql b/src/main/resources/db/migration/postgresql/V2.8.0.20200616112935__fe_check_permissions.sql new file mode 100644 index 0000000000..8ff6356e80 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.0.20200616112935__fe_check_permissions.sql @@ -0,0 +1,9 @@ +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'feature-analysis:*:copy:get', 'Copy the specified feature analysis'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id) + SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr + WHERE sp.value IN ( + 'feature-analysis:*:copy:get' + ) AND sr.name IN ('Atlas users'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.8.0.20200724121114__daimon-priority-public.sql b/src/main/resources/db/migration/postgresql/V2.8.0.20200724121114__daimon-priority-public.sql new file mode 100644 index 0000000000..b23994c04c --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.0.20200724121114__daimon-priority-public.sql @@ -0,0 +1,6 @@ +INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id) + SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr + WHERE sp.value IN ( + 'source:daimon:priority:get' + ) AND sr.name IN ('public'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.8.0.20200811155100__remove-ir-put-permission-from-atlas-user.sql b/src/main/resources/db/migration/postgresql/V2.8.0.20200811155100__remove-ir-put-permission-from-atlas-user.sql new file mode 100644 index 0000000000..0a1a25136e --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.0.20200811155100__remove-ir-put-permission-from-atlas-user.sql @@ -0,0 +1,4 @@ +DELETE FROM ${ohdsiSchema}.sec_role_permission srp WHERE + srp.permission_id in (SELECT id FROM ${ohdsiSchema}.sec_permission sp WHERE sp.value = 'ir:*:put') + AND + srp.role_id in (SELECT id FROM ${ohdsiSchema}.sec_role sr WHERE sr.name = 'Atlas users'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.8.0.20200903120903__drop-cohort-features-columns.sql b/src/main/resources/db/migration/postgresql/V2.8.0.20200903120903__drop-cohort-features-columns.sql new file mode 100644 index 0000000000..0afe8f9fe6 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.0.20200903120903__drop-cohort-features-columns.sql @@ -0,0 +1,37 @@ +alter table ${ohdsiSchema}.cohort_generation_info drop column include_features; + +delete +from + ${ohdsiSchema}.sec_role_permission srp +where + id in ( + select + srp.id + from + ${ohdsiSchema}.sec_role_permission srp + join ${ohdsiSchema}.sec_permission sp on + sp.id = srp.permission_id + where + sp.value like 'featureextraction:query:prevalence:*:%:get' + or sp.value like 'featureextraction:query:distributions:*:%:get' + or sp.value like 'featureextraction:explore:prevalence:*:%:*:get' + or sp.value like 'featureextraction:generatesql:%:*:get' + or sp.value like 'featureextraction:generate:%:*:get'); + +delete +from + ${ohdsiSchema}.sec_permission sp +where + sp.value like 'featureextraction:query:prevalence:*:%:get' + or sp.value like 'featureextraction:query:distributions:*:%:get' + or sp.value like 'featureextraction:explore:prevalence:*:%:*:get' + or sp.value like 'featureextraction:generatesql:%:*:get' + or sp.value like 'featureextraction:generate:%:*:get'; + +drop table ${ohdsiSchema}.cohort_features_dist; + +drop table ${ohdsiSchema}.cohort_features; + +drop table ${ohdsiSchema}.cohort_features_ref; + +drop table ${ohdsiSchema}.cohort_features_analysis_ref; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.8.0.202010130001__print_friendly_security.sql b/src/main/resources/db/migration/postgresql/V2.8.0.202010130001__print_friendly_security.sql new file mode 100644 index 0000000000..5d1d9a40ac --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.0.202010130001__print_friendly_security.sql @@ -0,0 +1,13 @@ +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohortdefinition:printfriendly:cohort:post', 'Get print-friendly HTML of cohort expression'); +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'cohortdefinition:printfriendly:conceptsets:post', 'Get print-friendly HTML of conceptset list'); + + +INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id) + SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr + WHERE sp.value IN ( + 'cohortdefinition:printfriendly:cohort:post', + 'cohortdefinition:printfriendly:conceptsets:post' + ) AND sr.name IN ('Atlas users'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.8.0.20201022120031__concept_ancestor_and_descendants.sql b/src/main/resources/db/migration/postgresql/V2.8.0.20201022120031__concept_ancestor_and_descendants.sql new file mode 100644 index 0000000000..67d7724833 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.0.20201022120031__concept_ancestor_and_descendants.sql @@ -0,0 +1,24 @@ +CREATE TEMP TABLE temp_migration ( + from_perm_id int, + new_value character varying(255) +); + +INSERT INTO temp_migration (from_perm_id, new_value) +SELECT sp.id as from_id, + REPLACE('vocabulary:%s:concept:*:ancestorAndDescendant:get', '%s', REPLACE(REPLACE(value, 'source:', ''), ':access', '')) as new_value +FROM ${ohdsiSchema}.sec_permission sp +WHERE sp.value LIKE 'source:%:access'; + +INSERT INTO ${ohdsiSchema}.sec_permission (id, value) +SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'), new_value +FROM temp_migration; + +INSERT INTO ${ohdsiSchema}.sec_role_permission (id,role_id, permission_id) +SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), + srp.role_id, + sp.id as permission_id +FROM temp_migration m +JOIN ${ohdsiSchema}.sec_permission sp on m.new_value = sp.value +JOIN ${ohdsiSchema}.sec_role_permission srp on m.from_perm_id = srp.permission_id; + +DROP TABLE temp_migration; diff --git a/src/main/resources/db/migration/postgresql/V2.8.0.20201103171300__add_source_daimon_unique_constraint.sql b/src/main/resources/db/migration/postgresql/V2.8.0.20201103171300__add_source_daimon_unique_constraint.sql new file mode 100644 index 0000000000..3f0ccb1a30 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.0.20201103171300__add_source_daimon_unique_constraint.sql @@ -0,0 +1,9 @@ +delete from ${ohdsiSchema}.source_daimon sd1 +where sd1.priority = -1 + and sd1.source_daimon_id < ( + select max(source_daimon_id) + from ${ohdsiSchema}.source_daimon sd2 + where sd1.source_id = sd2.source_id + and sd1.daimon_type = sd2.daimon_type); + +ALTER TABLE ${ohdsiSchema}.source_daimon ADD CONSTRAINT un_source_daimon UNIQUE (source_id,daimon_type); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.8.1.20210203163300__gis_service_add_check_source_permission.sql b/src/main/resources/db/migration/postgresql/V2.8.1.20210203163300__gis_service_add_check_source_permission.sql new file mode 100644 index 0000000000..5401cb4723 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.1.20210203163300__gis_service_add_check_source_permission.sql @@ -0,0 +1,8 @@ +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) VALUES + (nextval('${ohdsiSchema}.sec_permission_id_seq'), 'gis:source:check:*:get', 'Check availability of geodata in source'); + + +INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id) + SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr + WHERE sp.value = 'gis:source:check:*:get' AND sr.name IN ('Atlas users'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.8.1.20210219100459__evidence_get_permission.sql b/src/main/resources/db/migration/postgresql/V2.8.1.20210219100459__evidence_get_permission.sql new file mode 100644 index 0000000000..00cf5d7cd1 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.1.20210219100459__evidence_get_permission.sql @@ -0,0 +1,14 @@ +INSERT INTO ${ohdsiSchema}.sec_permission (id, value, description) + SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'),'evidence:*:negativecontrols:*:get','Get evidence information'; + INSERT INTO ${ohdsiSchema}.sec_permission (id, value, description) + SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'),'evidence:*:druglabel:post','Get drug label information'; + +INSERT INTO ${ohdsiSchema}.sec_role_permission (role_id, permission_id) + SELECT sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission sp, ${ohdsiSchema}.sec_role sr + WHERE sp."value" in + ( + 'evidence:*:negativecontrols:*:get', + 'evidence:*:druglabel:post' + ) + AND sr.name IN ('Atlas users'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.8.1.20210226100460__evidence_get_permission.sql b/src/main/resources/db/migration/postgresql/V2.8.1.20210226100460__evidence_get_permission.sql new file mode 100644 index 0000000000..b6e46ba9d8 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.8.1.20210226100460__evidence_get_permission.sql @@ -0,0 +1,11 @@ +INSERT INTO ${ohdsiSchema}.sec_permission (id, value, description) + SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'),'evidence:*:drugconditionpairs:post','Get drug condition pairs information'; + +INSERT INTO ${ohdsiSchema}.sec_role_permission (role_id, permission_id) + SELECT sr.id, sp.id + FROM ${ohdsiSchema}.sec_permission sp, ${ohdsiSchema}.sec_role sr + WHERE sp."value" in + ( + 'evidence:*:drugconditionpairs:post' + ) + AND sr.name IN ('Atlas users'); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.9.0.20210423125133__assets_tags.sql b/src/main/resources/db/migration/postgresql/V2.9.0.20210423125133__assets_tags.sql new file mode 100644 index 0000000000..16e580b589 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.9.0.20210423125133__assets_tags.sql @@ -0,0 +1,189 @@ +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) +VALUES (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'tag:get', 'List tags'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'tag:search:get', 'Search tags by name'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'tag:post', 'Create tag'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'tag:*:put', 'Update tag'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'tag:*:get', 'Get tag'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'tag:*:delete', 'Delete tag'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'cohortdefinition:*:tag:post', + 'Assign tag to cohort definition'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'cohortdefinition:*:tag:*:delete', + 'Unassign tag from cohort definition'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'cohortdefinition:*:protectedtag:post', + 'Assign tag to cohort definition'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'cohortdefinition:*:protectedtag:*:delete', + 'Unassign tag from cohort definition'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'conceptset:*:tag:post', + 'Assign tag to cohort definition'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'conceptset:*:tag:*:delete', + 'Unassign tag from cohort definition'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'conceptset:*:protectedtag:post', + 'Assign tag to cohort definition'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'conceptset:*:protectedtag:*:delete', + 'Unassign tag from cohort definition'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'conceptset:check:post', + 'Unassign tag from cohort definition'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'cohort-characterization:*:tag:post', + 'Assign tag to cohort definition'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'cohort-characterization:*:tag:*:delete', + 'Unassign tag from cohort definition'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'cohort-characterization:*:protectedtag:post', + 'Assign tag to cohort definition'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'cohort-characterization:*:protectedtag:*:delete', + 'Unassign tag from cohort definition'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'ir:*:tag:post', + 'Assign tag to cohort definition'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'ir:*:tag:*:delete', + 'Unassign tag from cohort definition'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'ir:*:protectedtag:post', + 'Assign tag to cohort definition'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'ir:*:protectedtag:*:delete', + 'Unassign tag from cohort definition'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'pathway-analysis:*:tag:post', + 'Assign tag to cohort definition'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'pathway-analysis:*:tag:*:delete', + 'Unassign tag from cohort definition'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'pathway-analysis:*:protectedtag:post', + 'Assign tag to cohort definition'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'pathway-analysis:*:protectedtag:*:delete', + 'Unassign tag from cohort definition'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(role_id, permission_id) +SELECT sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, + ${ohdsiSchema}.sec_role sr +WHERE sp.value IN ( + 'tag:get', + 'tag:search:get', + 'tag:post', + 'tag:*:put', + 'tag:*:get', + 'tag:*:delete', + 'cohortdefinition:*:tag:post', + 'cohortdefinition:*:tag:*:delete', + 'conceptset:*:tag:post', + 'conceptset:*:tag:*:delete', + 'conceptset:check:post', + 'cohort-characterization:*:tag:post', + 'cohort-characterization:*:tag:*:delete', + 'ir:*:tag:post', + 'ir:*:tag:*:delete', + 'pathway-analysis:*:tag:post', + 'pathway-analysis:*:tag:*:delete') + AND sr.name IN ('Atlas users'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(role_id, permission_id) +SELECT sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, + ${ohdsiSchema}.sec_role sr +WHERE sp.value IN ( + 'cohortdefinition:*:protectedtag:post', + 'cohortdefinition:*:protectedtag:*:delete', + 'conceptset:*:protectedtag:post', + 'conceptset:*:protectedtag:*:delete', + 'cohort-characterization:*:protectedtag:post', + 'cohort-characterization:*:protectedtag:*:delete', + 'ir:*:protectedtag:post', + 'ir:*:protectedtag:*:delete', + 'pathway-analysis:*:protectedtag:post', + 'pathway-analysis:*:protectedtag:*:delete') + AND sr.name IN ('admin'); + +CREATE SEQUENCE ${ohdsiSchema}.tags_seq; + +-- Possible types are: +-- 0 - System (predefined) tags +-- 1 - Custom tags +-- 2 - Prizm tags +CREATE TABLE ${ohdsiSchema}.tags +( + id int4 NOT NULL DEFAULT nextval('${ohdsiSchema}.tags_seq'), + name VARCHAR NOT NULL, + type int4 NOT NULL DEFAULT 0, + count int4 NOT NULL DEFAULT 0, + show_group bool NOT NULL DEFAULT FALSE, + icon varchar NULL, + color varchar NULL, + multi_selection bool NOT NULL DEFAULT FALSE, + permission_protected bool NOT NULL DEFAULT FALSE, + mandatory bool NOT NULL DEFAULT FALSE, + allow_custom bool NOT NULL DEFAULT FALSE, + description varchar NULL, + created_by_id INTEGER, + created_date TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT (now()), + modified_by_id INTEGER, + modified_date TIMESTAMP WITH TIME ZONE, + CONSTRAINT pk_tags_id PRIMARY KEY (id), + CONSTRAINT fk_tags_sec_user_creator FOREIGN KEY (created_by_id) REFERENCES ${ohdsiSchema}.sec_user (id), + CONSTRAINT fk_tags_sec_user_updater FOREIGN KEY (modified_by_id) REFERENCES ${ohdsiSchema}.sec_user (id) +); + +CREATE UNIQUE INDEX tags_name_idx ON ${ohdsiSchema}.tags USING btree (LOWER(name)); + +CREATE TABLE ${ohdsiSchema}.tag_groups +( + tag_id int4 NOT NULL, + group_id int4 NOT NULL, + CONSTRAINT tag_groups_group_fk FOREIGN KEY (group_id) REFERENCES ${ohdsiSchema}.tags (id) ON DELETE CASCADE, + CONSTRAINT tag_groups_tag_fk FOREIGN KEY (tag_id) REFERENCES ${ohdsiSchema}.tags (id) ON DELETE CASCADE +); + +CREATE TABLE ${ohdsiSchema}.concept_set_tags +( + asset_id int4 NOT NULL, + tag_id int4 NOT NULL, + CONSTRAINT pk_concept_set_tags_id PRIMARY KEY (asset_id, tag_id), + CONSTRAINT concept_set_tags_fk_sets FOREIGN KEY (asset_id) REFERENCES ${ohdsiSchema}.concept_set (concept_set_id) ON DELETE CASCADE, + CONSTRAINT concept_set_tags_fk_tags FOREIGN KEY (tag_id) REFERENCES ${ohdsiSchema}.tags (id) ON DELETE CASCADE +); + +CREATE INDEX concept_set_tags_concept_id_idx ON ${ohdsiSchema}.concept_set_tags USING btree (asset_id); +CREATE INDEX concept_set_tags_tag_id_idx ON ${ohdsiSchema}.concept_set_tags USING btree (tag_id); + +CREATE TABLE ${ohdsiSchema}.cohort_tags +( + asset_id int4 NOT NULL, + tag_id int4 NOT NULL, + CONSTRAINT pk_cohort_tags_id PRIMARY KEY (asset_id, tag_id), + CONSTRAINT cohort_tags_fk_definitions FOREIGN KEY (asset_id) REFERENCES ${ohdsiSchema}.cohort_definition (id) ON DELETE CASCADE, + CONSTRAINT cohort_tags_fk_tags FOREIGN KEY (tag_id) REFERENCES ${ohdsiSchema}.tags (id) ON DELETE CASCADE +); + +CREATE INDEX cohort_tags_cohort_id_idx ON ${ohdsiSchema}.cohort_tags USING btree (asset_id); +CREATE INDEX cohort_tags_tag_id_idx ON ${ohdsiSchema}.cohort_tags USING btree (tag_id); + +CREATE TABLE ${ohdsiSchema}.cohort_characterization_tags +( + asset_id int4 NOT NULL, + tag_id int4 NOT NULL, + CONSTRAINT pk_cc_tags_id PRIMARY KEY (asset_id, tag_id), + CONSTRAINT cc_tags_fk_ccs FOREIGN KEY (asset_id) REFERENCES ${ohdsiSchema}.cohort_characterization (id) ON DELETE CASCADE, + CONSTRAINT cc_tags_fk_tags FOREIGN KEY (tag_id) REFERENCES ${ohdsiSchema}.tags (id) ON DELETE CASCADE +); + +CREATE INDEX cc_tags_cc_id_idx ON ${ohdsiSchema}.cohort_characterization_tags USING btree (asset_id); +CREATE INDEX cc_tags_tag_id_idx ON ${ohdsiSchema}.cohort_characterization_tags USING btree (tag_id); + +CREATE TABLE ${ohdsiSchema}.ir_tags +( + asset_id int4 NOT NULL, + tag_id int4 NOT NULL, + CONSTRAINT pk_ir_tags_id PRIMARY KEY (asset_id, tag_id), + CONSTRAINT ir_tags_fk_irs FOREIGN KEY (asset_id) REFERENCES ${ohdsiSchema}.ir_analysis (id) ON DELETE CASCADE, + CONSTRAINT ir_tags_fk_tags FOREIGN KEY (tag_id) REFERENCES ${ohdsiSchema}.tags (id) ON DELETE CASCADE +); + +CREATE INDEX ir_tags_ir_id_idx ON ${ohdsiSchema}.ir_tags USING btree (asset_id); +CREATE INDEX ir_tags_tag_id_idx ON ${ohdsiSchema}.ir_tags USING btree (tag_id); + +CREATE TABLE ${ohdsiSchema}.pathway_tags +( + asset_id int4 NOT NULL, + tag_id int4 NOT NULL, + CONSTRAINT pk_pathway_tags_id PRIMARY KEY (asset_id, tag_id), + CONSTRAINT ir_tags_fk_irs FOREIGN KEY (asset_id) REFERENCES ${ohdsiSchema}.pathway_analysis (id) ON DELETE CASCADE, + CONSTRAINT ir_tags_fk_tags FOREIGN KEY (tag_id) REFERENCES ${ohdsiSchema}.tags (id) ON DELETE CASCADE +); + +CREATE INDEX pathway_tags_pathway_id_idx ON ${ohdsiSchema}.pathway_tags USING btree (asset_id); +CREATE INDEX pathway_tags_tag_id_idx ON ${ohdsiSchema}.pathway_tags USING btree (tag_id); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.9.0.20210513111520__versioning.sql b/src/main/resources/db/migration/postgresql/V2.9.0.20210513111520__versioning.sql new file mode 100644 index 0000000000..f55927ce53 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.9.0.20210513111520__versioning.sql @@ -0,0 +1,176 @@ +INSERT INTO ${ohdsiSchema}.sec_permission(id, value, description) +VALUES (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'cohortdefinition:*:version:get', + 'Get list of cohort versions'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'cohortdefinition:*:version:*:get', + 'Get cohort version'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'cohortdefinition:*:version:*:put', + 'Update cohort version info'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'cohortdefinition:*:version:*:delete', + 'Delete cohort version info'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'cohortdefinition:*:version:*:createAsset:put', + 'Copy cohort version as new cohort'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'conceptset:*:version:get', + 'Get list of concept set versions'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'conceptset:*:version:*:get', + 'Get concept set version'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'conceptset:*:version:*:put', + 'Update concept set version info'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'conceptset:*:version:*:delete', + 'Delete concept set version info'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'conceptset:*:version:*:createAsset:put', + 'Copy concept set version as new concept set'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'conceptset:*:version:*:expression:get', + 'Get expression for concept set items for default source'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'conceptset:*:version:*:expression:*:get', + 'Get expression for concept set items for certain source'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'cohort-characterization:*:version:get', + 'Get list of characterization versions'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'cohort-characterization:*:version:*:get', + 'Get characterization version'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'cohort-characterization:*:version:*:put', + 'Update characterization version info'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'cohort-characterization:*:version:*:delete', + 'Delete characterization version info'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'cohort-characterization:*:version:*:createAsset:put', + 'Copy characterization version as new cohort'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'ir:*:version:get', + 'Get list of incidence rate analysis versions'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'ir:*:version:*:get', + 'Get incidence rate analysis version'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'ir:*:version:*:put', + 'Update incidence rate analysis version info'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'ir:*:version:*:delete', + 'Delete incidence rate analysis version info'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'ir:*:version:*:createAsset:put', + 'Copy incidence rate analysis version as new cohort'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'pathway-analysis:*:version:get', + 'Get list of pathway analysis versions'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'pathway-analysis:*:version:*:get', + 'Get pathway analysis version'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'pathway-analysis:*:version:*:put', + 'Update pathway analysis version info'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'pathway-analysis:*:version:*:delete', + 'Delete pathway analysis version info'), + (NEXTVAL('${ohdsiSchema}.sec_permission_id_seq'), 'pathway-analysis:*:version:*:createAsset:put', + 'Copy pathway analysis version as new cohort'); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(role_id, permission_id) +SELECT sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, + ${ohdsiSchema}.sec_role sr +WHERE sp.value IN ( + 'cohortdefinition:*:version:get', + 'cohortdefinition:*:version:*:get', + 'cohortdefinition:*:version:*:put', + 'cohortdefinition:*:version:*:delete', + 'cohortdefinition:*:version:*:createAsset:put', + 'conceptset:*:version:get', + 'conceptset:*:version:*:get', + 'conceptset:*:version:*:put', + 'conceptset:*:version:*:delete', + 'conceptset:*:version:*:createAsset:put', + 'conceptset:*:version:*:expression:get', + 'conceptset:*:version:*:expression:*:get', + 'cohort-characterization:*:version:get', + 'cohort-characterization:*:version:*:get', + 'cohort-characterization:*:version:*:put', + 'cohort-characterization:*:version:*:delete', + 'cohort-characterization:*:version:*:createAsset:put', + 'ir:*:version:get', + 'ir:*:version:*:get', + 'ir:*:version:*:put', + 'ir:*:version:*:delete', + 'ir:*:version:*:createAsset:put', + 'pathway-analysis:*:version:get', + 'pathway-analysis:*:version:*:get', + 'pathway-analysis:*:version:*:put', + 'pathway-analysis:*:version:*:delete', + 'pathway-analysis:*:version:*:createAsset:put') + AND sr.name IN ('Atlas users'); + +-- Cohorts +CREATE TABLE ${ohdsiSchema}.cohort_version +( + asset_id int8 NOT NULL, + comment varchar NULL, + description varchar NULL, + version int4 NOT NULL DEFAULT 1, + asset_json varchar NOT NULL, + archived bool NOT NULL DEFAULT FALSE, + created_by_id INTEGER, + created_date TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT (now()), + CONSTRAINT pk_cohort_version_id PRIMARY KEY (asset_id, version), + CONSTRAINT fk_cohort_version_sec_user_creator FOREIGN KEY (created_by_id) REFERENCES ${ohdsiSchema}.sec_user (id), + CONSTRAINT fk_cohort_version_asset_id FOREIGN KEY (asset_id) REFERENCES ${ohdsiSchema}.cohort_definition (id) ON DELETE CASCADE +); + +CREATE INDEX cohort_version_asset_idx ON ${ohdsiSchema}.cohort_version USING btree (asset_id); + +-- Cohort characterizations +CREATE TABLE ${ohdsiSchema}.cohort_characterization_version +( + asset_id int8 NOT NULL, + comment varchar NULL, + version int4 NOT NULL DEFAULT 1, + asset_json varchar NOT NULL, + archived bool NOT NULL DEFAULT FALSE, + created_by_id INTEGER, + created_date TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT (now()), + CONSTRAINT pk_cc_version_id PRIMARY KEY (asset_id, version), + CONSTRAINT fk_cc_version_sec_user_creator FOREIGN KEY (created_by_id) REFERENCES ${ohdsiSchema}.sec_user (id), + CONSTRAINT fk_cc_version_asset_id FOREIGN KEY (asset_id) REFERENCES ${ohdsiSchema}.cohort_characterization (id) ON DELETE CASCADE +); + +CREATE INDEX cc_version_asset_idx ON ${ohdsiSchema}.cohort_characterization_version USING btree (asset_id); + +-- Concept sets +CREATE TABLE ${ohdsiSchema}.concept_set_version +( + asset_id int8 NOT NULL, + comment varchar NULL, + version int4 NOT NULL DEFAULT 1, + asset_json varchar NOT NULL, + archived bool NOT NULL DEFAULT FALSE, + created_by_id INTEGER, + created_date TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT (now()), + CONSTRAINT pk_concept_set_version_id PRIMARY KEY (asset_id, version), + CONSTRAINT fk_concept_set_version_sec_user_creator FOREIGN KEY (created_by_id) REFERENCES ${ohdsiSchema}.sec_user (id), + CONSTRAINT fk_concept_set_version_asset_id FOREIGN KEY (asset_id) REFERENCES ${ohdsiSchema}.concept_set (concept_set_id) ON DELETE CASCADE +); + +CREATE INDEX concept_set_version_asset_idx ON ${ohdsiSchema}.concept_set_version USING btree (asset_id); + +-- Incidence rates +CREATE TABLE ${ohdsiSchema}.ir_version +( + asset_id int8 NOT NULL, + comment varchar NULL, + description varchar NULL, + version int4 NOT NULL DEFAULT 1, + asset_json varchar NOT NULL, + archived bool NOT NULL DEFAULT FALSE, + created_by_id INTEGER, + created_date TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT (now()), + CONSTRAINT pk_ir_version_id PRIMARY KEY (asset_id, version), + CONSTRAINT fk_ir_version_sec_user_creator FOREIGN KEY (created_by_id) REFERENCES ${ohdsiSchema}.sec_user (id), + CONSTRAINT fk_ir_version_asset_id FOREIGN KEY (asset_id) REFERENCES ${ohdsiSchema}.ir_analysis (id) ON DELETE CASCADE +); + +CREATE INDEX ir_version_asset_idx ON ${ohdsiSchema}.ir_version USING btree (asset_id); + +-- Pathways +CREATE TABLE ${ohdsiSchema}.pathway_version +( + asset_id int8 NOT NULL, + comment varchar NULL, + version int4 NOT NULL DEFAULT 1, + asset_json varchar NOT NULL, + archived bool NOT NULL DEFAULT FALSE, + created_by_id INTEGER, + created_date TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT (now()), + CONSTRAINT pk_pathway_version_id PRIMARY KEY (asset_id, version), + CONSTRAINT fk_pathway_version_sec_user_creator FOREIGN KEY (created_by_id) REFERENCES ${ohdsiSchema}.sec_user (id), + CONSTRAINT fk_pathway_version_asset_id FOREIGN KEY (asset_id) REFERENCES ${ohdsiSchema}.pathway_analysis (id) ON DELETE CASCADE +); + +CREATE INDEX pathway_version_asset_idx ON ${ohdsiSchema}.pathway_version USING btree (asset_id); diff --git a/src/main/resources/db/migration/postgresql/V2.9.0.20210727101117__achilles_cache.sql b/src/main/resources/db/migration/postgresql/V2.9.0.20210727101117__achilles_cache.sql new file mode 100644 index 0000000000..1915c254b3 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.9.0.20210727101117__achilles_cache.sql @@ -0,0 +1,13 @@ +CREATE SEQUENCE ${ohdsiSchema}.achilles_cache_seq START WITH 1; + +CREATE TABLE ${ohdsiSchema}.achilles_cache +( + id bigint NOT NULL DEFAULT nextval('${ohdsiSchema}.achilles_cache_seq'), + source_id int4 NOT NULL, + cache_name varchar NOT NULL, + cache text, + CONSTRAINT achilles_cache_pk PRIMARY KEY (id), + CONSTRAINT achilles_cache_fk FOREIGN KEY (source_id) REFERENCES ${ohdsiSchema}."source" (source_id) ON DELETE CASCADE +); + +CREATE UNIQUE INDEX achilles_cache_source_id_idx ON ${ohdsiSchema}.achilles_cache USING btree (source_id, cache_name); diff --git a/src/main/resources/db/migration/postgresql/V2.9.0.20210812164224__assets_tags_renaming.sql b/src/main/resources/db/migration/postgresql/V2.9.0.20210812164224__assets_tags_renaming.sql new file mode 100644 index 0000000000..1aaf3f77cc --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.9.0.20210812164224__assets_tags_renaming.sql @@ -0,0 +1,9 @@ +ALTER SEQUENCE ${ohdsiSchema}.tags_seq RENAME TO tag_seq; + +ALTER TABLE ${ohdsiSchema}.tags RENAME TO tag; +ALTER TABLE ${ohdsiSchema}.tag_groups RENAME TO tag_group; +ALTER TABLE ${ohdsiSchema}.concept_set_tags RENAME TO concept_set_tag; +ALTER TABLE ${ohdsiSchema}.cohort_tags RENAME TO cohort_tag; +ALTER TABLE ${ohdsiSchema}.cohort_characterization_tags RENAME TO cohort_characterization_tag; +ALTER TABLE ${ohdsiSchema}.ir_tags RENAME TO ir_tag; +ALTER TABLE ${ohdsiSchema}.pathway_tags RENAME TO pathway_tag; From a493548da4c3b32708b54c8d1eaeec4c4bedace2 Mon Sep 17 00:00:00 2001 From: Peter Hoffmann <954078+p-hoffmann@users.noreply.github.com> Date: Tue, 6 Jan 2026 19:46:37 +0800 Subject: [PATCH 11/11] update --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 2a955e8bc1..89e52605ec 100644 --- a/pom.xml +++ b/pom.xml @@ -1262,7 +1262,7 @@ com.github.p-hoffmann trexsql-ext - v0.1.15 + v0.1.18